lysandre HF staff commited on
Commit
3c5639e
1 Parent(s): 2333559

Update with commit bd9f4d79517a3ad2f9da999d090dc3bbfc506dc4

Browse files

See: https://github.com/huggingface/transformers/commit/bd9f4d79517a3ad2f9da999d090dc3bbfc506dc4

Files changed (2) hide show
  1. frameworks.json +1 -0
  2. pipeline_tags.json +1 -0
frameworks.json CHANGED
@@ -216,6 +216,7 @@
216
  {"model_type":"univnet","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoFeatureExtractor"}
217
  {"model_type":"upernet","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoImageProcessor"}
218
  {"model_type":"van","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoImageProcessor"}
 
219
  {"model_type":"videomae","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoImageProcessor"}
220
  {"model_type":"vilt","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
221
  {"model_type":"vipllava","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
 
216
  {"model_type":"univnet","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoFeatureExtractor"}
217
  {"model_type":"upernet","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoImageProcessor"}
218
  {"model_type":"van","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoImageProcessor"}
219
+ {"model_type":"video_llava","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
220
  {"model_type":"videomae","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoImageProcessor"}
221
  {"model_type":"vilt","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
222
  {"model_type":"vipllava","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
pipeline_tags.json CHANGED
@@ -1037,6 +1037,7 @@
1037
  {"model_class":"ViTMSNForImageClassification","pipeline_tag":"image-classification","auto_class":"AutoModelForImageClassification"}
1038
  {"model_class":"ViTMSNModel","pipeline_tag":"image-feature-extraction","auto_class":"AutoModel"}
1039
  {"model_class":"ViTModel","pipeline_tag":"image-feature-extraction","auto_class":"AutoModel"}
 
1040
  {"model_class":"VideoMAEForPreTraining","pipeline_tag":"pretraining","auto_class":"AutoModelForPreTraining"}
1041
  {"model_class":"VideoMAEForVideoClassification","pipeline_tag":"video-classification","auto_class":"AutoModelForVideoClassification"}
1042
  {"model_class":"VideoMAEModel","pipeline_tag":"image-feature-extraction","auto_class":"AutoModel"}
 
1037
  {"model_class":"ViTMSNForImageClassification","pipeline_tag":"image-classification","auto_class":"AutoModelForImageClassification"}
1038
  {"model_class":"ViTMSNModel","pipeline_tag":"image-feature-extraction","auto_class":"AutoModel"}
1039
  {"model_class":"ViTModel","pipeline_tag":"image-feature-extraction","auto_class":"AutoModel"}
1040
+ {"model_class":"VideoLlavaForConditionalGeneration","pipeline_tag":"pretraining","auto_class":"AutoModelForPreTraining"}
1041
  {"model_class":"VideoMAEForPreTraining","pipeline_tag":"pretraining","auto_class":"AutoModelForPreTraining"}
1042
  {"model_class":"VideoMAEForVideoClassification","pipeline_tag":"video-classification","auto_class":"AutoModelForVideoClassification"}
1043
  {"model_class":"VideoMAEModel","pipeline_tag":"image-feature-extraction","auto_class":"AutoModel"}