Update with commit f7076cd346f48aee850b8c54e6e129c33a404308
Browse filesSee: https://github.com/huggingface/transformers/commit/f7076cd346f48aee850b8c54e6e129c33a404308
- frameworks.json +1 -1
- pipeline_tags.json +2 -0
frameworks.json
CHANGED
@@ -110,7 +110,7 @@
|
|
110 |
{"model_type":"mega","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
111 |
{"model_type":"megatron-bert","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
112 |
{"model_type":"mgp-str","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
|
113 |
-
{"model_type":"mistral","pytorch":true,"tensorflow":false,"flax":
|
114 |
{"model_type":"mixtral","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
115 |
{"model_type":"mobilebert","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoTokenizer"}
|
116 |
{"model_type":"mobilenet_v1","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoImageProcessor"}
|
|
|
110 |
{"model_type":"mega","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
111 |
{"model_type":"megatron-bert","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
112 |
{"model_type":"mgp-str","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
|
113 |
+
{"model_type":"mistral","pytorch":true,"tensorflow":false,"flax":true,"processor":"AutoTokenizer"}
|
114 |
{"model_type":"mixtral","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
115 |
{"model_type":"mobilebert","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoTokenizer"}
|
116 |
{"model_type":"mobilenet_v1","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoImageProcessor"}
|
pipeline_tags.json
CHANGED
@@ -285,6 +285,8 @@
|
|
285 |
{"model_class":"FlaxMT5Model","pipeline_tag":"feature-extraction","auto_class":"Flax_AutoModel"}
|
286 |
{"model_class":"FlaxMarianMTModel","pipeline_tag":"text2text-generation","auto_class":"Flax_AutoModelForSeq2SeqLM"}
|
287 |
{"model_class":"FlaxMarianModel","pipeline_tag":"feature-extraction","auto_class":"Flax_AutoModel"}
|
|
|
|
|
288 |
{"model_class":"FlaxOPTForCausalLM","pipeline_tag":"text-generation","auto_class":"Flax_AutoModelForCausalLM"}
|
289 |
{"model_class":"FlaxOPTModel","pipeline_tag":"feature-extraction","auto_class":"Flax_AutoModel"}
|
290 |
{"model_class":"FlaxPegasusForConditionalGeneration","pipeline_tag":"text2text-generation","auto_class":"Flax_AutoModelForSeq2SeqLM"}
|
|
|
285 |
{"model_class":"FlaxMT5Model","pipeline_tag":"feature-extraction","auto_class":"Flax_AutoModel"}
|
286 |
{"model_class":"FlaxMarianMTModel","pipeline_tag":"text2text-generation","auto_class":"Flax_AutoModelForSeq2SeqLM"}
|
287 |
{"model_class":"FlaxMarianModel","pipeline_tag":"feature-extraction","auto_class":"Flax_AutoModel"}
|
288 |
+
{"model_class":"FlaxMistralForCausalLM","pipeline_tag":"text-generation","auto_class":"Flax_AutoModelForCausalLM"}
|
289 |
+
{"model_class":"FlaxMistralModel","pipeline_tag":"feature-extraction","auto_class":"Flax_AutoModel"}
|
290 |
{"model_class":"FlaxOPTForCausalLM","pipeline_tag":"text-generation","auto_class":"Flax_AutoModelForCausalLM"}
|
291 |
{"model_class":"FlaxOPTModel","pipeline_tag":"feature-extraction","auto_class":"Flax_AutoModel"}
|
292 |
{"model_class":"FlaxPegasusForConditionalGeneration","pipeline_tag":"text2text-generation","auto_class":"Flax_AutoModelForSeq2SeqLM"}
|