sgugger commited on
Commit
723a0a7
1 Parent(s): b08a3b9

Update with commit d25e25ee2b63ebfcd099deb689a5a7272574a10f

Browse files

See: https://github.com/huggingface/transformers/commit/d25e25ee2b63ebfcd099deb689a5a7272574a10f

Files changed (2) hide show
  1. frameworks.json +1 -0
  2. pipeline_tags.json +4 -0
frameworks.json CHANGED
@@ -78,6 +78,7 @@
78
  {"model_type":"vit_mae","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoFeatureExtractor"}
79
  {"model_type":"wav2vec2","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoProcessor"}
80
  {"model_type":"wavlm","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
 
81
  {"model_type":"xlm","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoTokenizer"}
82
  {"model_type":"xlm-prophetnet","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
83
  {"model_type":"xlm-roberta","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoTokenizer"}
 
78
  {"model_type":"vit_mae","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoFeatureExtractor"}
79
  {"model_type":"wav2vec2","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoProcessor"}
80
  {"model_type":"wavlm","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
81
+ {"model_type":"xglm","pytorch":true,"tensorflow":false,"flax":true,"processor":"AutoTokenizer"}
82
  {"model_type":"xlm","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoTokenizer"}
83
  {"model_type":"xlm-prophetnet","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
84
  {"model_type":"xlm-roberta","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoTokenizer"}
pipeline_tags.json CHANGED
@@ -194,6 +194,8 @@
194
  {"model_class":"FlaxVisionTextDualEncoderModel","pipeline_tag":"feature-extraction","auto_class":"Flax_AutoModel"}
195
  {"model_class":"FlaxWav2Vec2ForPreTraining","pipeline_tag":"pretraining","auto_class":"Flax_AutoModelForPreTraining"}
196
  {"model_class":"FlaxWav2Vec2Model","pipeline_tag":"feature-extraction","auto_class":"Flax_AutoModel"}
 
 
197
  {"model_class":"FunnelBaseModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
198
  {"model_class":"FunnelForMaskedLM","pipeline_tag":"fill-mask","auto_class":"AutoModelForMaskedLM"}
199
  {"model_class":"FunnelForMultipleChoice","pipeline_tag":"multiple-choice","auto_class":"AutoModelForMultipleChoice"}
@@ -571,6 +573,8 @@
571
  {"model_class":"WavLMForSequenceClassification","pipeline_tag":"audio-classification","auto_class":"AutoModelForAudioClassification"}
572
  {"model_class":"WavLMForXVector","pipeline_tag":"audio-xvector","auto_class":"AutoModelForAudioXVector"}
573
  {"model_class":"WavLMModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
 
 
574
  {"model_class":"XLMForMultipleChoice","pipeline_tag":"multiple-choice","auto_class":"AutoModelForMultipleChoice"}
575
  {"model_class":"XLMForQuestionAnsweringSimple","pipeline_tag":"question-answering","auto_class":"AutoModelForQuestionAnswering"}
576
  {"model_class":"XLMForSequenceClassification","pipeline_tag":"text-classification","auto_class":"AutoModelForSequenceClassification"}
 
194
  {"model_class":"FlaxVisionTextDualEncoderModel","pipeline_tag":"feature-extraction","auto_class":"Flax_AutoModel"}
195
  {"model_class":"FlaxWav2Vec2ForPreTraining","pipeline_tag":"pretraining","auto_class":"Flax_AutoModelForPreTraining"}
196
  {"model_class":"FlaxWav2Vec2Model","pipeline_tag":"feature-extraction","auto_class":"Flax_AutoModel"}
197
+ {"model_class":"FlaxXGLMForCausalLM","pipeline_tag":"text-generation","auto_class":"Flax_AutoModelForCausalLM"}
198
+ {"model_class":"FlaxXGLMModel","pipeline_tag":"feature-extraction","auto_class":"Flax_AutoModel"}
199
  {"model_class":"FunnelBaseModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
200
  {"model_class":"FunnelForMaskedLM","pipeline_tag":"fill-mask","auto_class":"AutoModelForMaskedLM"}
201
  {"model_class":"FunnelForMultipleChoice","pipeline_tag":"multiple-choice","auto_class":"AutoModelForMultipleChoice"}
 
573
  {"model_class":"WavLMForSequenceClassification","pipeline_tag":"audio-classification","auto_class":"AutoModelForAudioClassification"}
574
  {"model_class":"WavLMForXVector","pipeline_tag":"audio-xvector","auto_class":"AutoModelForAudioXVector"}
575
  {"model_class":"WavLMModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
576
+ {"model_class":"XGLMForCausalLM","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}
577
+ {"model_class":"XGLMModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
578
  {"model_class":"XLMForMultipleChoice","pipeline_tag":"multiple-choice","auto_class":"AutoModelForMultipleChoice"}
579
  {"model_class":"XLMForQuestionAnsweringSimple","pipeline_tag":"question-answering","auto_class":"AutoModelForQuestionAnswering"}
580
  {"model_class":"XLMForSequenceClassification","pipeline_tag":"text-classification","auto_class":"AutoModelForSequenceClassification"}