Update with commit f3d2f7a6e08efe18debf59512325f02128394b43
Browse filesSee: https://github.com/huggingface/transformers/commit/f3d2f7a6e08efe18debf59512325f02128394b43
- frameworks.json +1 -0
- pipeline_tags.json +4 -0
frameworks.json
CHANGED
@@ -59,6 +59,7 @@
|
|
59 |
{"model_type":"lxmert","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoTokenizer"}
|
60 |
{"model_type":"m2m_100","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
61 |
{"model_type":"marian","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoTokenizer"}
|
|
|
62 |
{"model_type":"maskformer","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoFeatureExtractor"}
|
63 |
{"model_type":"mbart","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoTokenizer"}
|
64 |
{"model_type":"mctct","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoFeatureExtractor"}
|
|
|
59 |
{"model_type":"lxmert","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoTokenizer"}
|
60 |
{"model_type":"m2m_100","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
61 |
{"model_type":"marian","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoTokenizer"}
|
62 |
+
{"model_type":"markuplm","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
|
63 |
{"model_type":"maskformer","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoFeatureExtractor"}
|
64 |
{"model_type":"mbart","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoTokenizer"}
|
65 |
{"model_type":"mctct","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoFeatureExtractor"}
|
pipeline_tags.json
CHANGED
@@ -350,6 +350,10 @@
|
|
350 |
{"model_class":"MarianForCausalLM","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}
|
351 |
{"model_class":"MarianMTModel","pipeline_tag":"text2text-generation","auto_class":"AutoModelForSeq2SeqLM"}
|
352 |
{"model_class":"MarianModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
|
|
|
|
|
|
|
|
353 |
{"model_class":"MaskFormerModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
354 |
{"model_class":"MegatronBertForCausalLM","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}
|
355 |
{"model_class":"MegatronBertForMaskedLM","pipeline_tag":"fill-mask","auto_class":"AutoModelForMaskedLM"}
|
|
|
350 |
{"model_class":"MarianForCausalLM","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}
|
351 |
{"model_class":"MarianMTModel","pipeline_tag":"text2text-generation","auto_class":"AutoModelForSeq2SeqLM"}
|
352 |
{"model_class":"MarianModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
353 |
+
{"model_class":"MarkupLMForQuestionAnswering","pipeline_tag":"question-answering","auto_class":"AutoModelForQuestionAnswering"}
|
354 |
+
{"model_class":"MarkupLMForSequenceClassification","pipeline_tag":"text-classification","auto_class":"AutoModelForSequenceClassification"}
|
355 |
+
{"model_class":"MarkupLMForTokenClassification","pipeline_tag":"token-classification","auto_class":"AutoModelForTokenClassification"}
|
356 |
+
{"model_class":"MarkupLMModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
357 |
{"model_class":"MaskFormerModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
358 |
{"model_class":"MegatronBertForCausalLM","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}
|
359 |
{"model_class":"MegatronBertForMaskedLM","pipeline_tag":"fill-mask","auto_class":"AutoModelForMaskedLM"}
|