sgugger commited on
Commit
632c651
1 Parent(s): cf8bebf

Update with commit c72d7d91bf4899760725793421eff9da640c8527

Browse files

See: https://github.com/huggingface/transformers/commit/c72d7d91bf4899760725793421eff9da640c8527

Files changed (2) hide show
  1. frameworks.json +1 -1
  2. pipeline_tags.json +2 -0
frameworks.json CHANGED
@@ -111,7 +111,7 @@
111
  {"model_type":"wav2vec2","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoProcessor"}
112
  {"model_type":"wav2vec2-conformer","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
113
  {"model_type":"wavlm","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
114
- {"model_type":"xglm","pytorch":true,"tensorflow":false,"flax":true,"processor":"AutoTokenizer"}
115
  {"model_type":"xlm","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoTokenizer"}
116
  {"model_type":"xlm-prophetnet","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
117
  {"model_type":"xlm-roberta","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoTokenizer"}
 
111
  {"model_type":"wav2vec2","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoProcessor"}
112
  {"model_type":"wav2vec2-conformer","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
113
  {"model_type":"wavlm","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
114
+ {"model_type":"xglm","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoTokenizer"}
115
  {"model_type":"xlm","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoTokenizer"}
116
  {"model_type":"xlm-prophetnet","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
117
  {"model_type":"xlm-roberta","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoTokenizer"}
pipeline_tags.json CHANGED
@@ -643,6 +643,8 @@
643
  {"model_class":"TFViTMAEModel","pipeline_tag":"feature-extraction","auto_class":"TF_AutoModel"}
644
  {"model_class":"TFViTModel","pipeline_tag":"feature-extraction","auto_class":"TF_AutoModel"}
645
  {"model_class":"TFWav2Vec2Model","pipeline_tag":"feature-extraction","auto_class":"TF_AutoModel"}
 
 
646
  {"model_class":"TFXLMForMultipleChoice","pipeline_tag":"multiple-choice","auto_class":"TF_AutoModelForMultipleChoice"}
647
  {"model_class":"TFXLMForQuestionAnsweringSimple","pipeline_tag":"question-answering","auto_class":"TF_AutoModelForQuestionAnswering"}
648
  {"model_class":"TFXLMForSequenceClassification","pipeline_tag":"text-classification","auto_class":"TF_AutoModelForSequenceClassification"}
 
643
  {"model_class":"TFViTMAEModel","pipeline_tag":"feature-extraction","auto_class":"TF_AutoModel"}
644
  {"model_class":"TFViTModel","pipeline_tag":"feature-extraction","auto_class":"TF_AutoModel"}
645
  {"model_class":"TFWav2Vec2Model","pipeline_tag":"feature-extraction","auto_class":"TF_AutoModel"}
646
+ {"model_class":"TFXGLMForCausalLM","pipeline_tag":"text-generation","auto_class":"TF_AutoModelForCausalLM"}
647
+ {"model_class":"TFXGLMModel","pipeline_tag":"feature-extraction","auto_class":"TF_AutoModel"}
648
  {"model_class":"TFXLMForMultipleChoice","pipeline_tag":"multiple-choice","auto_class":"TF_AutoModelForMultipleChoice"}
649
  {"model_class":"TFXLMForQuestionAnsweringSimple","pipeline_tag":"question-answering","auto_class":"TF_AutoModelForQuestionAnswering"}
650
  {"model_class":"TFXLMForSequenceClassification","pipeline_tag":"text-classification","auto_class":"TF_AutoModelForSequenceClassification"}