{ "base_model_name_or_path": "meta-llama/Meta-Llama-3-8B-Instruct", "architectures": [ "MLPSpeculatorPreTrainedModel" ], "emb_dim": 4096, "inner_dim": 3072, "model_type": "mlp_speculator", "n_candidates": 5, "n_predict": 4, "top_k_tokens_per_head": [ 4, 3, 2, 2 ], "torch_dtype": "float16", "transformers_version": "4.38.2", "vocab_size": 128256 }