lysandre HF staff commited on
Commit
e67e951
1 Parent(s): e6ed94e

Update with commit 3b742ea84cfc32432d60c0b65c886576ef736833

Browse files

See: https://github.com/huggingface/transformers/commit/3b742ea84cfc32432d60c0b65c886576ef736833

Files changed (2) hide show
  1. frameworks.json +2 -0
  2. pipeline_tags.json +2 -0
frameworks.json CHANGED
@@ -164,6 +164,8 @@
164
  {"model_type":"segformer","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoImageProcessor"}
165
  {"model_type":"sew","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
166
  {"model_type":"sew-d","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
 
 
167
  {"model_type":"speech-encoder-decoder","pytorch":true,"tensorflow":false,"flax":true,"processor":"AutoTokenizer"}
168
  {"model_type":"speech_to_text","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoProcessor"}
169
  {"model_type":"speecht5","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
 
164
  {"model_type":"segformer","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoImageProcessor"}
165
  {"model_type":"sew","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
166
  {"model_type":"sew-d","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
167
+ {"model_type":"siglip","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
168
+ {"model_type":"siglip_vision_model","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
169
  {"model_type":"speech-encoder-decoder","pytorch":true,"tensorflow":false,"flax":true,"processor":"AutoTokenizer"}
170
  {"model_type":"speech_to_text","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoProcessor"}
171
  {"model_type":"speecht5","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
pipeline_tags.json CHANGED
@@ -661,6 +661,8 @@
661
  {"model_class":"SeamlessM4Tv2Model","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
662
  {"model_class":"SegformerForImageClassification","pipeline_tag":"image-classification","auto_class":"AutoModelForImageClassification"}
663
  {"model_class":"SegformerModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
 
 
664
  {"model_class":"Speech2Text2ForCausalLM","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}
665
  {"model_class":"Speech2TextForConditionalGeneration","pipeline_tag":"automatic-speech-recognition","auto_class":"AutoModelForSpeechSeq2Seq"}
666
  {"model_class":"Speech2TextModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
 
661
  {"model_class":"SeamlessM4Tv2Model","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
662
  {"model_class":"SegformerForImageClassification","pipeline_tag":"image-classification","auto_class":"AutoModelForImageClassification"}
663
  {"model_class":"SegformerModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
664
+ {"model_class":"SiglipModel","pipeline_tag":"zero-shot-image-classification","auto_class":"AutoModelForZeroShotImageClassification"}
665
+ {"model_class":"SiglipVisionModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
666
  {"model_class":"Speech2Text2ForCausalLM","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}
667
  {"model_class":"Speech2TextForConditionalGeneration","pipeline_tag":"automatic-speech-recognition","auto_class":"AutoModelForSpeechSeq2Seq"}
668
  {"model_class":"Speech2TextModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}