sgugger commited on
Commit
904a58a
1 Parent(s): 64eaf4b

Update with commit 2d9853b22622e9c5017241a14cda415d6bca13a2

Browse files

See: https://github.com/huggingface/transformers/commit/2d9853b22622e9c5017241a14cda415d6bca13a2

Files changed (2) hide show
  1. frameworks.json +1 -0
  2. pipeline_tags.json +2 -0
frameworks.json CHANGED
@@ -113,6 +113,7 @@
113
  {"model_type":"visual_bert","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
114
  {"model_type":"vit","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoFeatureExtractor"}
115
  {"model_type":"vit_mae","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoFeatureExtractor"}
 
116
  {"model_type":"wav2vec2","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoProcessor"}
117
  {"model_type":"wav2vec2-conformer","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
118
  {"model_type":"wavlm","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
 
113
  {"model_type":"visual_bert","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
114
  {"model_type":"vit","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoFeatureExtractor"}
115
  {"model_type":"vit_mae","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoFeatureExtractor"}
116
+ {"model_type":"vit_msn","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoFeatureExtractor"}
117
  {"model_type":"wav2vec2","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoProcessor"}
118
  {"model_type":"wav2vec2-conformer","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
119
  {"model_type":"wavlm","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
pipeline_tags.json CHANGED
@@ -712,6 +712,8 @@
712
  {"model_class":"ViTForImageClassification","pipeline_tag":"image-classification","auto_class":"AutoModelForImageClassification"}
713
  {"model_class":"ViTMAEForPreTraining","pipeline_tag":"pretraining","auto_class":"AutoModelForPreTraining"}
714
  {"model_class":"ViTMAEModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
 
 
715
  {"model_class":"ViTModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
716
  {"model_class":"VideoMAEForPreTraining","pipeline_tag":"pretraining","auto_class":"AutoModelForPreTraining"}
717
  {"model_class":"VideoMAEModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
 
712
  {"model_class":"ViTForImageClassification","pipeline_tag":"image-classification","auto_class":"AutoModelForImageClassification"}
713
  {"model_class":"ViTMAEForPreTraining","pipeline_tag":"pretraining","auto_class":"AutoModelForPreTraining"}
714
  {"model_class":"ViTMAEModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
715
+ {"model_class":"ViTMSNForImageClassification","pipeline_tag":"image-classification","auto_class":"AutoModelForImageClassification"}
716
+ {"model_class":"ViTMSNModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
717
  {"model_class":"ViTModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
718
  {"model_class":"VideoMAEForPreTraining","pipeline_tag":"pretraining","auto_class":"AutoModelForPreTraining"}
719
  {"model_class":"VideoMAEModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}