Update with commit 12d66b47012c9258f9557e6d3a0c13bcd1c72871
Browse filesSee: https://github.com/huggingface/transformers/commit/12d66b47012c9258f9557e6d3a0c13bcd1c72871
- frameworks.json +1 -0
- pipeline_tags.json +1 -0
frameworks.json
CHANGED
@@ -67,6 +67,7 @@
|
|
67 |
{"model_type":"nystromformer","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
68 |
{"model_type":"openai-gpt","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoTokenizer"}
|
69 |
{"model_type":"opt","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoTokenizer"}
|
|
|
70 |
{"model_type":"pegasus","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoTokenizer"}
|
71 |
{"model_type":"perceiver","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
72 |
{"model_type":"plbart","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
|
|
67 |
{"model_type":"nystromformer","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
68 |
{"model_type":"openai-gpt","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoTokenizer"}
|
69 |
{"model_type":"opt","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoTokenizer"}
|
70 |
+
{"model_type":"owlvit","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
|
71 |
{"model_type":"pegasus","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoTokenizer"}
|
72 |
{"model_type":"perceiver","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
73 |
{"model_type":"plbart","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
pipeline_tags.json
CHANGED
@@ -374,6 +374,7 @@
|
|
374 |
{"model_class":"OpenAIGPTForSequenceClassification","pipeline_tag":"text-classification","auto_class":"AutoModelForSequenceClassification"}
|
375 |
{"model_class":"OpenAIGPTLMHeadModel","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}
|
376 |
{"model_class":"OpenAIGPTModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
|
|
377 |
{"model_class":"PLBartForCausalLM","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}
|
378 |
{"model_class":"PLBartForConditionalGeneration","pipeline_tag":"text2text-generation","auto_class":"AutoModelForSeq2SeqLM"}
|
379 |
{"model_class":"PLBartForSequenceClassification","pipeline_tag":"text-classification","auto_class":"AutoModelForSequenceClassification"}
|
|
|
374 |
{"model_class":"OpenAIGPTForSequenceClassification","pipeline_tag":"text-classification","auto_class":"AutoModelForSequenceClassification"}
|
375 |
{"model_class":"OpenAIGPTLMHeadModel","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}
|
376 |
{"model_class":"OpenAIGPTModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
377 |
+
{"model_class":"OwlViTModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
378 |
{"model_class":"PLBartForCausalLM","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}
|
379 |
{"model_class":"PLBartForConditionalGeneration","pipeline_tag":"text2text-generation","auto_class":"AutoModelForSeq2SeqLM"}
|
380 |
{"model_class":"PLBartForSequenceClassification","pipeline_tag":"text-classification","auto_class":"AutoModelForSequenceClassification"}
|