Update with commit 2515a5a29045fcec3935be25fd4e6f0475759b53
Browse filesSee: https://github.com/huggingface/transformers/commit/2515a5a29045fcec3935be25fd4e6f0475759b53
- frameworks.json +1 -0
- pipeline_tags.json +1 -0
frameworks.json
CHANGED
@@ -20,6 +20,7 @@
|
|
20 |
{"model_type":"blenderbot-small","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoTokenizer"}
|
21 |
{"model_type":"blip","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoProcessor"}
|
22 |
{"model_type":"blip-2","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
|
|
|
23 |
{"model_type":"bloom","pytorch":true,"tensorflow":false,"flax":true,"processor":"AutoTokenizer"}
|
24 |
{"model_type":"bridgetower","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
|
25 |
{"model_type":"bros","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
|
|
20 |
{"model_type":"blenderbot-small","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoTokenizer"}
|
21 |
{"model_type":"blip","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoProcessor"}
|
22 |
{"model_type":"blip-2","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
|
23 |
+
{"model_type":"blip_2_qformer","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
24 |
{"model_type":"bloom","pytorch":true,"tensorflow":false,"flax":true,"processor":"AutoTokenizer"}
|
25 |
{"model_type":"bridgetower","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
|
26 |
{"model_type":"bros","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
pipeline_tags.json
CHANGED
@@ -63,6 +63,7 @@
|
|
63 |
{"model_class":"Blip2ForConditionalGeneration","pipeline_tag":"visual-question-answering","auto_class":"AutoModelForVisualQuestionAnswering"}
|
64 |
{"model_class":"Blip2ForImageTextRetrieval","pipeline_tag":"zero-shot-image-classification","auto_class":"AutoModelForZeroShotImageClassification"}
|
65 |
{"model_class":"Blip2Model","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
|
|
66 |
{"model_class":"BlipForConditionalGeneration","pipeline_tag":"image-text-to-text","auto_class":"AutoModelForImageTextToText"}
|
67 |
{"model_class":"BlipForQuestionAnswering","pipeline_tag":"visual-question-answering","auto_class":"AutoModelForVisualQuestionAnswering"}
|
68 |
{"model_class":"BlipModel","pipeline_tag":"zero-shot-image-classification","auto_class":"AutoModelForZeroShotImageClassification"}
|
|
|
63 |
{"model_class":"Blip2ForConditionalGeneration","pipeline_tag":"visual-question-answering","auto_class":"AutoModelForVisualQuestionAnswering"}
|
64 |
{"model_class":"Blip2ForImageTextRetrieval","pipeline_tag":"zero-shot-image-classification","auto_class":"AutoModelForZeroShotImageClassification"}
|
65 |
{"model_class":"Blip2Model","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
66 |
+
{"model_class":"Blip2QFormerModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
67 |
{"model_class":"BlipForConditionalGeneration","pipeline_tag":"image-text-to-text","auto_class":"AutoModelForImageTextToText"}
|
68 |
{"model_class":"BlipForQuestionAnswering","pipeline_tag":"visual-question-answering","auto_class":"AutoModelForVisualQuestionAnswering"}
|
69 |
{"model_class":"BlipModel","pipeline_tag":"zero-shot-image-classification","auto_class":"AutoModelForZeroShotImageClassification"}
|