{ "_name_or_path": "fixie-ai/ultravox-v0.2", "architectures": [ "UltravoxModel" ], "audio_config": { "_name_or_path": "openai/whisper-small", "activation_dropout": 0.0, "activation_function": "gelu", "apply_spec_augment": false, "architectures": [ "WhisperForConditionalGeneration" ], "attention_dropout": 0.0, "begin_suppress_tokens": [ 220, 50257 ], "bos_token_id": 50257, "d_model": 768, "decoder_attention_heads": 12, "decoder_ffn_dim": 3072, "decoder_layerdrop": 0.0, "decoder_layers": 12, "decoder_start_token_id": 50258, "dropout": 0.0, "encoder_attention_heads": 12, "encoder_ffn_dim": 3072, "encoder_layerdrop": 0.0, "encoder_layers": 12, "eos_token_id": 50257, "forced_decoder_ids": [ [ 1, 50259 ], [ 2, 50359 ], [ 3, 50363 ] ], "init_std": 0.02, "is_encoder_decoder": true, "max_length": 448, "max_source_positions": 1500, "max_target_positions": 448, "median_filter_width": 7, "model_type": "whisper", "num_hidden_layers": 12, "num_mel_bins": 80, "pad_token_id": 50257, "scale_embedding": false, "suppress_tokens": [ 1, 2, 7, 8, 9, 10, 14, 25, 26, 27, 28, 29, 31, 58, 59, 60, 61, 62, 63, 90, 91, 92, 93, 359, 503, 522, 542, 873, 893, 902, 918, 922, 931, 1350, 1853, 1982, 2460, 2627, 3246, 3253, 3268, 3536, 3846, 3961, 4183, 4667, 6585, 6647, 7273, 9061, 9383, 10428, 10929, 11938, 12033, 12331, 12562, 13793, 14157, 14635, 15265, 15618, 16553, 16604, 18362, 18956, 20075, 21675, 22520, 26130, 26161, 26435, 28279, 29464, 31650, 32302, 32470, 36865, 42863, 47425, 49870, 50254, 50258, 50360, 50361, 50362 ], "torch_dtype": "float32", "use_cache": true, "vocab_size": 51865 }, "audio_model_id": "openai/whisper-small", "audio_token_index": 32000, "auto_map": { "AutoConfig": "ultravox_config.UltravoxConfig", "AutoModel": "ultravox_model.UltravoxModel" }, "custom_pipelines": { "ultravox-pipeline": { "default": { "model": { "pt": [ "fixie-ai/ultravox-v0.2", "main" ] } }, "impl": "ultravox_pipeline.UltravoxPipeline", "pt": ["AutoModel"], "tf": [], "type": "multimodal" } }, "hidden_size": 4096, "ignore_index": -100, "initializer_range": 0.02, "model_type": "ultravox", "norm_init": 0.4, "projector_act": "swiglu", "stack_factor": 8, "text_config": { "_name_or_path": "meta-llama/Meta-Llama-3-8B-Instruct", "architectures": [ "LlamaForCausalLM" ], "bos_token_id": 128000, "eos_token_id": 128009, "intermediate_size": 14336, "max_position_embeddings": 8192, "model_type": "llama", "num_key_value_heads": 8, "rms_norm_eps": 1e-05, "rope_theta": 500000.0, "torch_dtype": "bfloat16", "vocab_size": 128256 }, "text_model_id": null, "torch_dtype": "bfloat16", "transformers_version": "4.41.2", "vocab_size": 128256 }