MambaInLlama_0_75: prompt_template: "zephyr-7b-alpha/prompt.txt" fn_completions: "huggingface_local_completions" completions_kwargs: model_name: "JunxiongWang/MambaInLlama_0_75" model_kwargs: torch_dtype: 'bfloat16' max_new_tokens: 2048 temperature: 0.7 top_p: 1.0 do_sample: True pretty_name: "Mamba 0 75 From meta-llama/Meta-Llama-3-8B-Instruct" link: "https://huggingface.co/JunxiongWang/MambaInLlama_0_75/"