lysandre HF Staff commited on
Commit
aaa21c4
·
verified ·
1 Parent(s): d47d5ec

Update with commit 4b8c6d4cf8c779bf0895deb980669f5b2cb5d182

Browse files

See: https://github.com/huggingface/transformers/commit/4b8c6d4cf8c779bf0895deb980669f5b2cb5d182

Files changed (2) hide show
  1. frameworks.json +1 -0
  2. pipeline_tags.json +1 -0
frameworks.json CHANGED
@@ -215,6 +215,7 @@
215
  {"model_type":"pvt_v2","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoImageProcessor"}
216
  {"model_type":"qdqbert","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
217
  {"model_type":"qwen2","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
 
218
  {"model_type":"qwen2_5_vl","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
219
  {"model_type":"qwen2_audio","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
220
  {"model_type":"qwen2_moe","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
 
215
  {"model_type":"pvt_v2","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoImageProcessor"}
216
  {"model_type":"qdqbert","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
217
  {"model_type":"qwen2","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
218
+ {"model_type":"qwen2_5_omni","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
219
  {"model_type":"qwen2_5_vl","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
220
  {"model_type":"qwen2_audio","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
221
  {"model_type":"qwen2_moe","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
pipeline_tags.json CHANGED
@@ -776,6 +776,7 @@
776
  {"model_class":"Qwen2MoeModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
777
  {"model_class":"Qwen2VLForConditionalGeneration","pipeline_tag":"image-text-to-text","auto_class":"AutoModelForImageTextToText"}
778
  {"model_class":"Qwen2VLModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
 
779
  {"model_class":"Qwen2_5_VLForConditionalGeneration","pipeline_tag":"image-text-to-text","auto_class":"AutoModelForImageTextToText"}
780
  {"model_class":"Qwen2_5_VLModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
781
  {"model_class":"Qwen3ForCausalLM","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}
 
776
  {"model_class":"Qwen2MoeModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
777
  {"model_class":"Qwen2VLForConditionalGeneration","pipeline_tag":"image-text-to-text","auto_class":"AutoModelForImageTextToText"}
778
  {"model_class":"Qwen2VLModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
779
+ {"model_class":"Qwen2_5OmniForConditionalGeneration","pipeline_tag":"text-to-audio","auto_class":"AutoModelForTextToWaveform"}
780
  {"model_class":"Qwen2_5_VLForConditionalGeneration","pipeline_tag":"image-text-to-text","auto_class":"AutoModelForImageTextToText"}
781
  {"model_class":"Qwen2_5_VLModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
782
  {"model_class":"Qwen3ForCausalLM","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}