sgugger commited on
Commit
f342524
1 Parent(s): 436c5a2

Update with commit d151a8c55032d5a21800ea0813c4304af8b8e9f7

Browse files

See: https://github.com/huggingface/transformers/commit/d151a8c55032d5a21800ea0813c4304af8b8e9f7

Files changed (2) hide show
  1. frameworks.json +2 -0
  2. pipeline_tags.json +4 -0
frameworks.json CHANGED
@@ -7,6 +7,7 @@
7
  {"model_type":"big_bird","pytorch":true,"tensorflow":false,"flax":true,"processor":"AutoTokenizer"}
8
  {"model_type":"bigbird_pegasus","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
9
  {"model_type":"biogpt","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
 
10
  {"model_type":"blenderbot","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoTokenizer"}
11
  {"model_type":"blenderbot-small","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoTokenizer"}
12
  {"model_type":"bloom","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
@@ -129,6 +130,7 @@
129
  {"model_type":"vision-text-dual-encoder","pytorch":true,"tensorflow":false,"flax":true,"processor":"AutoProcessor"}
130
  {"model_type":"visual_bert","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
131
  {"model_type":"vit","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoFeatureExtractor"}
 
132
  {"model_type":"vit_mae","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoFeatureExtractor"}
133
  {"model_type":"vit_msn","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoFeatureExtractor"}
134
  {"model_type":"wav2vec2","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoProcessor"}
 
7
  {"model_type":"big_bird","pytorch":true,"tensorflow":false,"flax":true,"processor":"AutoTokenizer"}
8
  {"model_type":"bigbird_pegasus","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
9
  {"model_type":"biogpt","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
10
+ {"model_type":"bit","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
11
  {"model_type":"blenderbot","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoTokenizer"}
12
  {"model_type":"blenderbot-small","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoTokenizer"}
13
  {"model_type":"bloom","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
 
130
  {"model_type":"vision-text-dual-encoder","pytorch":true,"tensorflow":false,"flax":true,"processor":"AutoProcessor"}
131
  {"model_type":"visual_bert","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
132
  {"model_type":"vit","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoFeatureExtractor"}
133
+ {"model_type":"vit_hybrid","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
134
  {"model_type":"vit_mae","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoFeatureExtractor"}
135
  {"model_type":"vit_msn","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoFeatureExtractor"}
136
  {"model_type":"wav2vec2","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoProcessor"}
pipeline_tags.json CHANGED
@@ -40,6 +40,8 @@
40
  {"model_class":"BigBirdPegasusModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
41
  {"model_class":"BioGptForCausalLM","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}
42
  {"model_class":"BioGptModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
 
 
43
  {"model_class":"BlenderbotForCausalLM","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}
44
  {"model_class":"BlenderbotForConditionalGeneration","pipeline_tag":"text2text-generation","auto_class":"AutoModelForSeq2SeqLM"}
45
  {"model_class":"BlenderbotModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
@@ -766,6 +768,8 @@
766
  {"model_class":"VanForImageClassification","pipeline_tag":"image-classification","auto_class":"AutoModelForImageClassification"}
767
  {"model_class":"VanModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
768
  {"model_class":"ViTForImageClassification","pipeline_tag":"image-classification","auto_class":"AutoModelForImageClassification"}
 
 
769
  {"model_class":"ViTMAEForPreTraining","pipeline_tag":"pretraining","auto_class":"AutoModelForPreTraining"}
770
  {"model_class":"ViTMAEModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
771
  {"model_class":"ViTMSNForImageClassification","pipeline_tag":"image-classification","auto_class":"AutoModelForImageClassification"}
 
40
  {"model_class":"BigBirdPegasusModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
41
  {"model_class":"BioGptForCausalLM","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}
42
  {"model_class":"BioGptModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
43
+ {"model_class":"BitForImageClassification","pipeline_tag":"image-classification","auto_class":"AutoModelForImageClassification"}
44
+ {"model_class":"BitModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
45
  {"model_class":"BlenderbotForCausalLM","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}
46
  {"model_class":"BlenderbotForConditionalGeneration","pipeline_tag":"text2text-generation","auto_class":"AutoModelForSeq2SeqLM"}
47
  {"model_class":"BlenderbotModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
 
768
  {"model_class":"VanForImageClassification","pipeline_tag":"image-classification","auto_class":"AutoModelForImageClassification"}
769
  {"model_class":"VanModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
770
  {"model_class":"ViTForImageClassification","pipeline_tag":"image-classification","auto_class":"AutoModelForImageClassification"}
771
+ {"model_class":"ViTHybridForImageClassification","pipeline_tag":"image-classification","auto_class":"AutoModelForImageClassification"}
772
+ {"model_class":"ViTHybridModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
773
  {"model_class":"ViTMAEForPreTraining","pipeline_tag":"pretraining","auto_class":"AutoModelForPreTraining"}
774
  {"model_class":"ViTMAEModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
775
  {"model_class":"ViTMSNForImageClassification","pipeline_tag":"image-classification","auto_class":"AutoModelForImageClassification"}