lysandre HF staff commited on
Commit
1a02c45
1 Parent(s): a27d2e8

Update with commit c7f076a00ee54f777b3d3322c91bc11489a47950

Browse files

See: https://github.com/huggingface/transformers/commit/c7f076a00ee54f777b3d3322c91bc11489a47950

Files changed (2) hide show
  1. frameworks.json +1 -0
  2. pipeline_tags.json +1 -0
frameworks.json CHANGED
@@ -191,6 +191,7 @@
191
  {"model_type":"van","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoImageProcessor"}
192
  {"model_type":"videomae","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoImageProcessor"}
193
  {"model_type":"vilt","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
 
194
  {"model_type":"vision-encoder-decoder","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoTokenizer"}
195
  {"model_type":"vision-text-dual-encoder","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoProcessor"}
196
  {"model_type":"visual_bert","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
 
191
  {"model_type":"van","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoImageProcessor"}
192
  {"model_type":"videomae","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoImageProcessor"}
193
  {"model_type":"vilt","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
194
+ {"model_type":"vipllava","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
195
  {"model_type":"vision-encoder-decoder","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoTokenizer"}
196
  {"model_type":"vision-text-dual-encoder","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoProcessor"}
197
  {"model_type":"visual_bert","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
pipeline_tags.json CHANGED
@@ -971,6 +971,7 @@
971
  {"model_class":"VideoMAEModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
972
  {"model_class":"ViltForQuestionAnswering","pipeline_tag":"visual-question-answering","auto_class":"AutoModelForVisualQuestionAnswering"}
973
  {"model_class":"ViltModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
 
974
  {"model_class":"VisionTextDualEncoderModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
975
  {"model_class":"VisualBertForPreTraining","pipeline_tag":"pretraining","auto_class":"AutoModelForPreTraining"}
976
  {"model_class":"VisualBertModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
 
971
  {"model_class":"VideoMAEModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
972
  {"model_class":"ViltForQuestionAnswering","pipeline_tag":"visual-question-answering","auto_class":"AutoModelForVisualQuestionAnswering"}
973
  {"model_class":"ViltModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
974
+ {"model_class":"VipLlavaForConditionalGeneration","pipeline_tag":"pretraining","auto_class":"AutoModelForPreTraining"}
975
  {"model_class":"VisionTextDualEncoderModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
976
  {"model_class":"VisualBertForPreTraining","pipeline_tag":"pretraining","auto_class":"AutoModelForPreTraining"}
977
  {"model_class":"VisualBertModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}