Safetensors
llama
p2l-135m-bt-01132025 / model_list.json
evanfrick's picture
commit files to HF hub
c6a19a6
[
"amazon-nova-lite-v1.0",
"amazon-nova-micro-v1.0",
"amazon-nova-pro-v1.0",
"athene-70b-0725",
"athene-v2-chat",
"c4ai-aya-expanse-32b",
"c4ai-aya-expanse-8b",
"chatgpt-4o-latest-20240808",
"chatgpt-4o-latest-20240903",
"chatgpt-4o-latest-20241120",
"claude-3-5-haiku-20241022",
"claude-3-5-sonnet-20240620",
"claude-3-5-sonnet-20241022",
"claude-3-haiku-20240307",
"claude-3-opus-20240229",
"claude-3-sonnet-20240229",
"codestral-2405",
"command-r",
"command-r-08-2024",
"command-r-plus",
"command-r-plus-08-2024",
"dbrx-instruct-preview",
"deepseek-coder-v2",
"deepseek-coder-v2-0724",
"deepseek-v2-api-0628",
"deepseek-v2.5",
"deepseek-v2.5-1210",
"deepseek-v3",
"gemini-1.5-flash-001",
"gemini-1.5-flash-002",
"gemini-1.5-flash-8b-001",
"gemini-1.5-flash-8b-exp-0827",
"gemini-1.5-flash-exp-0827",
"gemini-1.5-pro-001",
"gemini-1.5-pro-002",
"gemini-1.5-pro-api-0409-preview",
"gemini-1.5-pro-exp-0801",
"gemini-1.5-pro-exp-0827",
"gemini-2.0-flash-exp",
"gemini-2.0-flash-thinking-exp-1219",
"gemini-advanced-0514",
"gemini-exp-1114",
"gemini-exp-1121",
"gemini-exp-1206",
"gemma-1.1-2b-it",
"gemma-1.1-7b-it",
"gemma-2-27b-it",
"gemma-2-2b-it",
"gemma-2-9b-it",
"gemma-2-9b-it-simpo",
"glm-4-0116",
"glm-4-0520",
"glm-4-plus",
"gpt-3.5-turbo-0125",
"gpt-4-0125-preview",
"gpt-4-0314",
"gpt-4-0613",
"gpt-4-1106-preview",
"gpt-4-turbo-2024-04-09",
"gpt-4o-2024-05-13",
"gpt-4o-2024-08-06",
"gpt-4o-mini-2024-07-18",
"granite-3.0-2b-instruct",
"granite-3.0-8b-instruct",
"grok-2-2024-08-13",
"grok-2-mini-2024-08-13",
"hunyuan-standard-256k",
"internlm2_5-20b-chat",
"jamba-1.5-large",
"jamba-1.5-mini",
"llama-2-13b-chat",
"llama-2-70b-chat",
"llama-3-70b-instruct",
"llama-3-8b-instruct",
"llama-3.1-405b-instruct-bf16",
"llama-3.1-405b-instruct-fp8",
"llama-3.1-70b-instruct",
"llama-3.1-8b-instruct",
"llama-3.1-nemotron-51b-instruct",
"llama-3.1-nemotron-70b-instruct",
"llama-3.1-tulu-3-70b",
"llama-3.1-tulu-3-8b",
"llama-3.2-1b-instruct",
"llama-3.2-3b-instruct",
"llama-3.3-70b-instruct",
"ministral-8b-2410",
"mistral-7b-instruct-v0.2",
"mistral-large-2402",
"mistral-large-2407",
"mistral-large-2411",
"mistral-medium",
"mixtral-8x22b-instruct-v0.1",
"mixtral-8x7b-instruct-v0.1",
"nemotron-4-340b-instruct",
"o1-2024-12-17",
"o1-mini",
"o1-preview",
"phi-3-medium-4k-instruct",
"phi-3-mini-128k-instruct",
"phi-3-mini-4k-instruct",
"phi-3-mini-4k-instruct-june-2024",
"phi-3-small-8k-instruct",
"qwen-max-0428",
"qwen-max-0919",
"qwen-plus-0828",
"qwen1.5-110b-chat",
"qwen1.5-14b-chat",
"qwen1.5-32b-chat",
"qwen1.5-72b-chat",
"qwen2-72b-instruct",
"qwen2.5-72b-instruct",
"qwen2.5-coder-32b-instruct",
"qwen2.5-plus-1127",
"qwq-32b-preview",
"reka-core-20240501",
"reka-core-20240722",
"reka-core-20240904",
"reka-flash-20240722",
"reka-flash-20240904",
"reka-flash-21b-20240226",
"reka-flash-21b-20240226-online",
"reka-flash-preview-20240611",
"smollm2-1.7b-instruct",
"snowflake-arctic-instruct",
"yi-1.5-34b-chat",
"yi-34b-chat",
"yi-large",
"yi-large-preview",
"yi-lightning",
"yi-lightning-lite"
]