Update with commit ebbcf00ad1b06fa87effe179d128e73390255844
Browse filesSee: https://github.com/huggingface/transformers/commit/ebbcf00ad1b06fa87effe179d128e73390255844
- frameworks.json +1 -0
- pipeline_tags.json +1 -0
frameworks.json
CHANGED
|
@@ -282,6 +282,7 @@
|
|
| 282 |
{"model_type":"qwen3","pytorch":true,"processor":"AutoTokenizer"}
|
| 283 |
{"model_type":"qwen3_moe","pytorch":true,"processor":"AutoTokenizer"}
|
| 284 |
{"model_type":"qwen3_next","pytorch":true,"processor":"AutoTokenizer"}
|
|
|
|
| 285 |
{"model_type":"qwen3_vl","pytorch":true,"processor":"AutoProcessor"}
|
| 286 |
{"model_type":"qwen3_vl_moe","pytorch":true,"processor":"AutoProcessor"}
|
| 287 |
{"model_type":"qwen3_vl_moe_text","pytorch":true,"processor":"AutoTokenizer"}
|
|
|
|
| 282 |
{"model_type":"qwen3","pytorch":true,"processor":"AutoTokenizer"}
|
| 283 |
{"model_type":"qwen3_moe","pytorch":true,"processor":"AutoTokenizer"}
|
| 284 |
{"model_type":"qwen3_next","pytorch":true,"processor":"AutoTokenizer"}
|
| 285 |
+
{"model_type":"qwen3_omni_moe","pytorch":true,"processor":"AutoProcessor"}
|
| 286 |
{"model_type":"qwen3_vl","pytorch":true,"processor":"AutoProcessor"}
|
| 287 |
{"model_type":"qwen3_vl_moe","pytorch":true,"processor":"AutoProcessor"}
|
| 288 |
{"model_type":"qwen3_vl_moe_text","pytorch":true,"processor":"AutoTokenizer"}
|
pipeline_tags.json
CHANGED
|
@@ -944,6 +944,7 @@
|
|
| 944 |
{"model_class":"Qwen3NextForSequenceClassification","pipeline_tag":"text-classification","auto_class":"AutoModelForSequenceClassification"}
|
| 945 |
{"model_class":"Qwen3NextForTokenClassification","pipeline_tag":"token-classification","auto_class":"AutoModelForTokenClassification"}
|
| 946 |
{"model_class":"Qwen3NextModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
|
|
|
| 947 |
{"model_class":"Qwen3VLForConditionalGeneration","pipeline_tag":"image-to-text","auto_class":"AutoModelForVision2Seq"}
|
| 948 |
{"model_class":"Qwen3VLModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
| 949 |
{"model_class":"Qwen3VLMoeForConditionalGeneration","pipeline_tag":"image-to-text","auto_class":"AutoModelForVision2Seq"}
|
|
|
|
| 944 |
{"model_class":"Qwen3NextForSequenceClassification","pipeline_tag":"text-classification","auto_class":"AutoModelForSequenceClassification"}
|
| 945 |
{"model_class":"Qwen3NextForTokenClassification","pipeline_tag":"token-classification","auto_class":"AutoModelForTokenClassification"}
|
| 946 |
{"model_class":"Qwen3NextModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
| 947 |
+
{"model_class":"Qwen3OmniMoeForConditionalGeneration","pipeline_tag":"text-to-audio","auto_class":"AutoModelForTextToWaveform"}
|
| 948 |
{"model_class":"Qwen3VLForConditionalGeneration","pipeline_tag":"image-to-text","auto_class":"AutoModelForVision2Seq"}
|
| 949 |
{"model_class":"Qwen3VLModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
| 950 |
{"model_class":"Qwen3VLMoeForConditionalGeneration","pipeline_tag":"image-to-text","auto_class":"AutoModelForVision2Seq"}
|