Update with commit 7b20915f4e36a801873a26351b8b030c4d8d7b3b
Browse filesSee: https://github.com/huggingface/transformers/commit/7b20915f4e36a801873a26351b8b030c4d8d7b3b
- frameworks.json +2 -0
- pipeline_tags.json +3 -0
frameworks.json
CHANGED
|
@@ -122,6 +122,8 @@
|
|
| 122 |
{"model_type":"glm4","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
| 123 |
{"model_type":"glm4_moe","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
| 124 |
{"model_type":"glm4v","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
|
|
|
|
|
|
|
| 125 |
{"model_type":"glm4v_text","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
| 126 |
{"model_type":"glpn","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoImageProcessor"}
|
| 127 |
{"model_type":"got_ocr2","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
|
|
|
|
| 122 |
{"model_type":"glm4","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
| 123 |
{"model_type":"glm4_moe","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
| 124 |
{"model_type":"glm4v","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
|
| 125 |
+
{"model_type":"glm4v_moe","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
|
| 126 |
+
{"model_type":"glm4v_moe_text","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
| 127 |
{"model_type":"glm4v_text","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
| 128 |
{"model_type":"glpn","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoImageProcessor"}
|
| 129 |
{"model_type":"got_ocr2","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
|
pipeline_tags.json
CHANGED
|
@@ -498,6 +498,9 @@
|
|
| 498 |
{"model_class":"Glm4MoeModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
| 499 |
{"model_class":"Glm4vForConditionalGeneration","pipeline_tag":"image-text-to-text","auto_class":"AutoModelForImageTextToText"}
|
| 500 |
{"model_class":"Glm4vModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
|
|
|
|
|
|
|
|
|
| 501 |
{"model_class":"Glm4vTextModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
| 502 |
{"model_class":"GlmForCausalLM","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}
|
| 503 |
{"model_class":"GlmForSequenceClassification","pipeline_tag":"text-classification","auto_class":"AutoModelForSequenceClassification"}
|
|
|
|
| 498 |
{"model_class":"Glm4MoeModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
| 499 |
{"model_class":"Glm4vForConditionalGeneration","pipeline_tag":"image-text-to-text","auto_class":"AutoModelForImageTextToText"}
|
| 500 |
{"model_class":"Glm4vModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
| 501 |
+
{"model_class":"Glm4vMoeForConditionalGeneration","pipeline_tag":"image-text-to-text","auto_class":"AutoModelForImageTextToText"}
|
| 502 |
+
{"model_class":"Glm4vMoeModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
| 503 |
+
{"model_class":"Glm4vMoeTextModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
| 504 |
{"model_class":"Glm4vTextModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
| 505 |
{"model_class":"GlmForCausalLM","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}
|
| 506 |
{"model_class":"GlmForSequenceClassification","pipeline_tag":"text-classification","auto_class":"AutoModelForSequenceClassification"}
|