| from dataclasses import dataclass |
| from datetime import datetime |
| from enum import Enum |
|
|
|
|
| class LMStyle(Enum): |
| OpenAIChat = "OpenAIChat" |
| OpenAIReasonPreview = "OpenAIReasonPreview" |
| OpenAIReason = "OpenAIReason" |
|
|
| Claude = "Claude" |
| Claude3 = "Claude3" |
| Claude3Thinking = "Claude3Thinking" |
|
|
| Gemini = "Gemini" |
| GeminiThinking = "GeminiThinking" |
| Grok = "Grok" |
|
|
| MistralWeb = "MistralWeb" |
| CohereCommand = "CohereCommand" |
|
|
| DataBricks = "DataBricks" |
| DeepSeekAPI = "DeepSeekAPI" |
|
|
| GenericBase = "GenericBase" |
|
|
| DeepSeekCodeInstruct = "DeepSeekCodeInstruct" |
| CodeLLaMaInstruct = "CodeLLaMaInstruct" |
| StarCoderInstruct = "StarCoderInstruct" |
| CodeQwenInstruct = "CodeQwenInstruct" |
| QwQ = "QwQ" |
| LLaMa3 = "LLaMa3" |
| DeepSeekR1 = "DeepSeekR1" |
|
|
| TogetherAI = "TogetherAI" |
|
|
|
|
| @dataclass |
| class LanguageModel: |
| model_name: str |
| model_repr: str |
| model_style: LMStyle |
| release_date: datetime | None |
| link: str | None = None |
|
|
| def __hash__(self) -> int: |
| return hash(self.model_name) |
|
|
| def to_dict(self) -> dict: |
| return { |
| "model_name": self.model_name, |
| "model_repr": self.model_repr, |
| "model_style": self.model_style.value, |
| "release_date": int(self.release_date.timestamp() * 1000), |
| "link": self.link, |
| } |
|
|
|
|
| LanguageModelList: list[LanguageModel] = [ |
| |
| LanguageModel( |
| "meta-llama/Meta-Llama-3-8B", |
| "LLama3-8b-Base", |
| LMStyle.GenericBase, |
| datetime(2023, 1, 1), |
| link="https://huggingface.co/meta-llama/Meta-Llama-3-8B", |
| ), |
| LanguageModel( |
| "meta-llama/Meta-Llama-3-70B", |
| "LLama3-70b-Base", |
| LMStyle.GenericBase, |
| datetime(2023, 1, 1), |
| link="https://huggingface.co/meta-llama/Meta-Llama-3-70B", |
| ), |
| |
| LanguageModel( |
| "meta-llama/Meta-Llama-3-8B-Instruct", |
| "LLama3-8b-Ins", |
| LMStyle.LLaMa3, |
| datetime(2023, 1, 1), |
| link="https://huggingface.co/meta-llama/Meta-Llama-3-8B-Instruct", |
| ), |
| LanguageModel( |
| "meta-llama/Meta-Llama-3-70B-Instruct", |
| "LLama3-70b-Ins", |
| LMStyle.LLaMa3, |
| datetime(2023, 1, 1), |
| link="https://huggingface.co/meta-llama/Meta-Llama-3-70B-Instruct", |
| ), |
| |
| LanguageModel( |
| "meta-llama/Llama-3.1-8B", |
| "LLama3.1-8b-Base", |
| LMStyle.GenericBase, |
| datetime(2023, 1, 1), |
| link="https://huggingface.co/meta-llama/Llama-3.1-8B", |
| ), |
| LanguageModel( |
| "meta-llama/Llama-3.1-70B", |
| "LLama3.1-70b-Base", |
| LMStyle.GenericBase, |
| datetime(2023, 1, 1), |
| link="https://huggingface.co/meta-llama/Llama-3.1-70B", |
| ), |
| LanguageModel( |
| "meta-llama/Llama-3.1-405B-FP8", |
| "LLama3.1-405b-Base-FP8", |
| LMStyle.GenericBase, |
| datetime(2023, 1, 1), |
| link="https://huggingface.co/meta-llama/Llama-3.1-405B-FP8", |
| ), |
| |
| LanguageModel( |
| "meta-llama/Llama-3.1-8B-Instruct", |
| "LLama3.1-8b-Ins", |
| LMStyle.LLaMa3, |
| datetime(2023, 1, 1), |
| link="https://huggingface.co/meta-llama/Llama-3.1-8B-Instruct", |
| ), |
| LanguageModel( |
| "meta-llama/Llama-3.1-70B-Instruct", |
| "LLama3.1-70b-Ins", |
| LMStyle.LLaMa3, |
| datetime(2023, 1, 1), |
| link="https://huggingface.co/meta-llama/Llama-3.1-70B-Instruct", |
| ), |
| LanguageModel( |
| "meta-llama/Llama-3.1-405B-Instruct-FP8", |
| "LLama3.1-405b-Ins-FP8", |
| LMStyle.LLaMa3, |
| datetime(2023, 1, 1), |
| link="https://huggingface.co/meta-llama/Llama-3.1-405B-Instruct-FP8", |
| ), |
| |
| |
| |
| |
| |
| |
| |
| |
| LanguageModel( |
| "meta-llama/Llama-3.3-70B-Instruct", |
| "LLama3.3-70b-Ins", |
| LMStyle.LLaMa3, |
| datetime(2023, 1, 1), |
| link="https://huggingface.co/meta-llama/Llama-3.3-70B-Instruct", |
| ), |
| |
| LanguageModel( |
| "deepseek-ai/deepseek-coder-33b-base", |
| "DSCoder-33b-Base", |
| LMStyle.GenericBase, |
| datetime(2023, 1, 1), |
| link="https://huggingface.co/deepseek-ai/deepseek-coder-33b-base", |
| ), |
| LanguageModel( |
| "deepseek-ai/deepseek-coder-6.7b-base", |
| "DSCoder-6.7b-Base", |
| LMStyle.GenericBase, |
| datetime(2023, 1, 1), |
| link="https://huggingface.co/deepseek-ai/deepseek-coder-6.7b-base", |
| ), |
| LanguageModel( |
| "deepseek-ai/deepseek-coder-1.3b-base", |
| "DSCoder-1.3b-Base", |
| LMStyle.GenericBase, |
| datetime(2023, 1, 1), |
| link="https://huggingface.co/deepseek-ai/deepseek-coder-1.3b-base", |
| ), |
| |
| LanguageModel( |
| "deepseek-ai/deepseek-coder-33b-instruct", |
| "DSCoder-33b-Ins", |
| LMStyle.DeepSeekCodeInstruct, |
| datetime(2023, 9, 1), |
| link="https://huggingface.co/deepseek-ai/deepseek-coder-33b-instruct", |
| ), |
| LanguageModel( |
| "deepseek-ai/deepseek-coder-6.7b-instruct", |
| "DSCoder-6.7b-Ins", |
| LMStyle.DeepSeekCodeInstruct, |
| datetime(2023, 9, 1), |
| link="https://huggingface.co/deepseek-ai/deepseek-coder-6.7b-instruct", |
| ), |
| LanguageModel( |
| "deepseek-ai/deepseek-coder-1.3b-instruct", |
| "DSCoder-1.3b-Ins", |
| LMStyle.DeepSeekCodeInstruct, |
| datetime(2023, 8, 1), |
| link="https://huggingface.co/deepseek-ai/deepseek-coder-1.3b-instruct", |
| ), |
| |
| LanguageModel( |
| "01-ai/Yi-Coder-9B-Chat", |
| "Yi-Coder-9B-Chat", |
| LMStyle.DeepSeekAPI, |
| datetime(2023, 8, 1), |
| link="https://huggingface.co/01-ai/Yi-Coder-9B-Chat", |
| ), |
| |
| LanguageModel( |
| "deepseek-r1-preview", |
| "DeepSeek-R1-Preview", |
| LMStyle.DeepSeekAPI, |
| datetime(2024, 6, 30), |
| link="https://api-docs.deepseek.com/news/news1120", |
| ), |
| LanguageModel( |
| "deepseek-r1-lite-preview", |
| "DeepSeek-R1-Lite-Preview", |
| LMStyle.DeepSeekAPI, |
| datetime(2024, 6, 30), |
| link="https://api-docs.deepseek.com/news/news1120", |
| ), |
| LanguageModel( |
| "deepseek-chat", |
| "DeepSeek-V3", |
| LMStyle.DeepSeekAPI, |
| datetime(2024, 6, 30), |
| link="https://huggingface.co/deepseek-ai/DeepSeek-V3", |
| ), |
| |
| LanguageModel( |
| "deepseek-coder", |
| "DeepSeekCoder-V2.5", |
| LMStyle.DeepSeekAPI, |
| datetime(2023, 8, 1), |
| link="https://huggingface.co/deepseek-ai/DeepSeek-V2", |
| ), |
| |
| LanguageModel( |
| "gpt-3.5-turbo-0301", |
| "GPT-3.5-Turbo-0301", |
| LMStyle.OpenAIChat, |
| datetime(2021, 10, 1), |
| link="https://openai.com/blog/new-models-and-developer-products-announced-at-devday", |
| ), |
| LanguageModel( |
| "gpt-3.5-turbo-0125", |
| "GPT-3.5-Turbo-0125", |
| LMStyle.OpenAIChat, |
| datetime(2021, 10, 1), |
| link="https://openai.com/blog/new-embedding-models-and-api-updates#:~:text=Other%20new%20models%20and%20lower%20pricing", |
| ), |
| |
| LanguageModel( |
| "gpt-4-0613", |
| "GPT-4-0613", |
| LMStyle.OpenAIChat, |
| datetime(2021, 10, 1), |
| link="https://platform.openai.com/docs/models/gpt-4-turbo-and-gpt-4", |
| ), |
| LanguageModel( |
| "gpt-4-1106-preview", |
| "GPT-4-Turbo-1106", |
| LMStyle.OpenAIChat, |
| datetime(2023, 4, 30), |
| link="https://openai.com/blog/new-models-and-developer-products-announced-at-devday", |
| ), |
| LanguageModel( |
| "gpt-4-turbo-2024-04-09", |
| "GPT-4-Turbo-2024-04-09", |
| LMStyle.OpenAIChat, |
| datetime(2023, 4, 30), |
| link="https://platform.openai.com/docs/models/gpt-4-turbo-and-gpt-4", |
| ), |
| |
| LanguageModel( |
| "gpt-4o-2024-05-13", |
| "GPT-4O-2024-05-13", |
| LMStyle.OpenAIChat, |
| datetime(2023, 4, 30), |
| link="https://openai.com/index/spring-update", |
| ), |
| LanguageModel( |
| "gpt-4o-2024-08-06", |
| "GPT-4O-2024-08-06", |
| LMStyle.OpenAIChat, |
| datetime(2023, 4, 30), |
| link="https://openai.com/index/spring-update", |
| ), |
| LanguageModel( |
| "gpt-4o-mini-2024-07-18", |
| "GPT-4O-mini-2024-07-18", |
| LMStyle.OpenAIChat, |
| datetime(2023, 4, 30), |
| link="https://openai.com/index/spring-update", |
| ), |
| |
| LanguageModel( |
| "o1-preview-2024-09-12", |
| "O1-Preview-2024-09-12", |
| LMStyle.OpenAIReasonPreview, |
| datetime(2023, 4, 30), |
| link="https://platform.openai.com/docs/guides/reasoning", |
| ), |
| LanguageModel( |
| "o1-mini-2024-09-12", |
| "O1-Mini-2024-09-12", |
| LMStyle.OpenAIReasonPreview, |
| datetime(2023, 4, 30), |
| link="https://platform.openai.com/docs/guides/reasoning", |
| ), |
| |
| LanguageModel( |
| "o1-2024-12-17__low", |
| "O1-2024-12-17 (Low)", |
| LMStyle.OpenAIReason, |
| datetime(2023, 4, 30), |
| link="https://platform.openai.com/docs/api-reference/chat/create#chat-create-reasoning_effort", |
| ), |
| LanguageModel( |
| "o1-2024-12-17__medium", |
| "O1-2024-12-17 (Med)", |
| LMStyle.OpenAIReason, |
| datetime(2023, 4, 30), |
| link="htthttps://platform.openai.com/docs/api-reference/chat/create#chat-create-reasoning_effort", |
| ), |
| LanguageModel( |
| "o1-2024-12-17__high", |
| "O1-2024-12-17 (High)", |
| LMStyle.OpenAIReason, |
| datetime(2023, 4, 30), |
| link="https://platform.openai.com/docs/api-reference/chat/create#chat-create-reasoning_effort", |
| ), |
| |
| LanguageModel( |
| "o3-mini-2025-01-31__low", |
| "O3-Mini-2025-01-31 (Low)", |
| LMStyle.OpenAIReason, |
| datetime(2023, 4, 30), |
| link="https://platform.openai.com/docs/api-reference/chat/create#chat-create-reasoning_effort", |
| ), |
| LanguageModel( |
| "o3-mini-2025-01-31__medium", |
| "O3-Mini-2025-01-31 (Med)", |
| LMStyle.OpenAIReason, |
| datetime(2023, 4, 30), |
| link="https://platform.openai.com/docs/api-reference/chat/create#chat-create-reasoning_effort", |
| ), |
| LanguageModel( |
| "o3-mini-2025-01-31__high", |
| "O3-Mini-2025-01-31 (High)", |
| LMStyle.OpenAIReason, |
| datetime(2023, 4, 30), |
| link="https://platform.openai.com/docs/api-reference/chat/create#chat-create-reasoning_effort", |
| ), |
| LanguageModel( |
| "o4-mini__high", |
| "O4-Mini (High)", |
| LMStyle.OpenAIReason, |
| datetime(2023, 4, 30), |
| link="https://platform.openai.com/docs/api-reference/chat/create#chat-create-reasoning_effort", |
| ), |
| LanguageModel( |
| "o4-mini__medium", |
| "O4-Mini (Medium)", |
| LMStyle.OpenAIReason, |
| datetime(2023, 4, 30), |
| link="https://platform.openai.com/docs/api-reference/chat/create#chat-create-reasoning_effort", |
| ), |
| LanguageModel( |
| "o4-mini__low", |
| "O4-Mini (Low)", |
| LMStyle.OpenAIReason, |
| datetime(2023, 4, 30), |
| link="https://platform.openai.com/docs/api-reference/chat/create#chat-create-reasoning_effort", |
| ), |
| LanguageModel( |
| "o3__high", |
| "O3 (High)", |
| LMStyle.OpenAIReason, |
| datetime(2023, 4, 30), |
| link="https://platform.openai.com/docs/api-reference/chat/create#chat-create-reasoning_effort", |
| ), |
| |
| LanguageModel( |
| "claude-instant-1", |
| "Claude-Instant-1", |
| LMStyle.Claude, |
| datetime(2022, 12, 31), |
| link="https://www.anthropic.com/index/introducing-claude", |
| ), |
| LanguageModel( |
| "claude-2", |
| "Claude-2", |
| LMStyle.Claude, |
| datetime(2022, 12, 31), |
| link="https://www.anthropic.com/index/claude-2", |
| ), |
| |
| LanguageModel( |
| "claude-3-opus-20240229", |
| "Claude-3-Opus", |
| LMStyle.Claude3, |
| datetime(2023, 9, 1), |
| link="https://www.anthropic.com/index/claude-3", |
| ), |
| LanguageModel( |
| "claude-3-sonnet-20240229", |
| "Claude-3-Sonnet", |
| LMStyle.Claude3, |
| datetime(2023, 9, 1), |
| link="https://www.anthropic.com/index/claude-3", |
| ), |
| LanguageModel( |
| "claude-3-5-sonnet-20240620", |
| "Claude-3.5-Sonnet-20240620", |
| LMStyle.Claude3, |
| datetime(2024, 3, 31), |
| link="https://www.anthropic.com/news/claude-3-5-sonnet", |
| ), |
| LanguageModel( |
| "claude-3-5-sonnet-20241022", |
| "Claude-3.5-Sonnet-20241022", |
| LMStyle.Claude3, |
| datetime(2024, 3, 31), |
| link="https://www.anthropic.com/news/claude-3-5-sonnet", |
| ), |
| LanguageModel( |
| "claude-3-7-sonnet-20250219", |
| "Claude-3.7-Sonnet (Thinking)", |
| LMStyle.Claude3Thinking, |
| datetime(2024, 3, 31), |
| link="https://www.anthropic.com/news/claude-3-7-sonnet", |
| ), |
| LanguageModel( |
| "claude-3-haiku-20240307", |
| "Claude-3-Haiku", |
| LMStyle.Claude3, |
| datetime(2023, 4, 30), |
| link="https://www.anthropic.com/index/claude-3", |
| ), |
| LanguageModel( |
| "claude-sonnet-4-20250514", |
| "Claude-Sonnet-4 (Thinking)", |
| LMStyle.Claude3Thinking, |
| datetime(2023, 4, 30), |
| link="https://www.anthropic.com/claude/sonnet", |
| ), |
| LanguageModel( |
| "claude-opus-4-20250514", |
| "Claude-Opus-4 (Thinking)", |
| LMStyle.Claude3Thinking, |
| datetime(2023, 4, 30), |
| link="https://www.anthropic.com/claude/sonnet", |
| ), |
| LanguageModel( |
| "claude-opus-4-20250514_nothink", |
| "Claude-Opus-4", |
| LMStyle.Claude3Thinking, |
| datetime(2023, 4, 30), |
| link="https://www.anthropic.com/claude/sonnet", |
| ), |
| LanguageModel( |
| "claude-sonnet-4-20250514_nothink", |
| "Claude-Sonnet-4", |
| LMStyle.Claude3, |
| datetime(2023, 4, 30), |
| link="https://www.anthropic.com/claude/sonnet", |
| ), |
| |
| LanguageModel( |
| "gemini-1.5-pro-002", |
| "Gemini-Pro-1.5-002", |
| LMStyle.Gemini, |
| datetime(2023, 4, 30), |
| link="https://blog.google/technology/ai/gemini-api-developers-cloud", |
| ), |
| LanguageModel( |
| "gemini-1.5-flash-002", |
| "Gemini-Flash-1.5-002", |
| LMStyle.Gemini, |
| datetime(2023, 4, 30), |
| link="https://blog.google/technology/ai/gemini-api-developers-cloud", |
| ), |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| LanguageModel( |
| "gemini-2.5-pro", |
| "Gemini-2.5-Pro-06-17", |
| LMStyle.GeminiThinking, |
| datetime(2023, 4, 30), |
| link="https://blog.google/technology/google-deepmind/gemini-model-thinking-updates-march-2025/#advanced-coding", |
| ), |
| LanguageModel( |
| "gemini-2.5-pro-preview-06-05", |
| "Gemini-2.5-Pro-06-05", |
| LMStyle.GeminiThinking, |
| datetime(2023, 4, 30), |
| link="https://blog.google/technology/google-deepmind/gemini-model-thinking-updates-march-2025/#advanced-coding", |
| ), |
| LanguageModel( |
| "gemini-2.5-pro-preview-05-06", |
| "Gemini-2.5-Pro-05-06", |
| LMStyle.GeminiThinking, |
| datetime(2023, 4, 30), |
| link="https://blog.google/technology/google-deepmind/gemini-model-thinking-updates-march-2025/#advanced-coding", |
| ), |
| |
| |
| |
| |
| |
| |
| |
| LanguageModel( |
| "gemini-2.5-flash-preview-04-17", |
| "Gemini-2.5-Flash-04-17", |
| LMStyle.GeminiThinking, |
| datetime(2023, 4, 30), |
| link="https://developers.googleblog.com/en/start-building-with-gemini-25-flash/", |
| ), |
| LanguageModel( |
| "gemini-2.5-flash-preview-05-20", |
| "Gemini-2.5-Flash-05-20", |
| LMStyle.GeminiThinking, |
| datetime(2023, 4, 30), |
| link="https://developers.googleblog.com/en/start-building-with-gemini-25-flash/", |
| ), |
| |
| LanguageModel( |
| "bigcode/starcoder2-3b", |
| "StarCoder2-3b", |
| LMStyle.GenericBase, |
| datetime(2023, 1, 1), |
| link="https://huggingface.co/bigcode/starcoder2-7b-magicoder-instruct/tree/main", |
| ), |
| LanguageModel( |
| "bigcode/starcoder2-7b", |
| "StarCoder2-7b", |
| LMStyle.GenericBase, |
| datetime(2023, 1, 1), |
| link="https://huggingface.co/bigcode/starcoder2-7b-magicoder-instruct/tree/main", |
| ), |
| LanguageModel( |
| "bigcode/starcoder2-15b", |
| "StarCoder2-15b", |
| LMStyle.GenericBase, |
| datetime(2023, 1, 1), |
| link="https://huggingface.co/bigcode/starcoder2-7b-magicoder-instruct/tree/main", |
| ), |
| LanguageModel( |
| "google/codegemma-7b", |
| "CodeGemma-7b-Base", |
| LMStyle.GenericBase, |
| datetime(2023, 1, 1), |
| link="https://huggingface.co/google/codegemma-7b", |
| ), |
| LanguageModel( |
| "google/codegemma-2b", |
| "CodeGemma-2b-Base", |
| LMStyle.GenericBase, |
| datetime(2023, 1, 1), |
| link="https://huggingface.co/google/codegemma-2b", |
| ), |
| LanguageModel( |
| "google/gemma-7b", |
| "Gemma-7b-Base", |
| LMStyle.GenericBase, |
| datetime(2023, 1, 1), |
| link="https://huggingface.co/google/gemma-7b", |
| ), |
| LanguageModel( |
| "google/gemma-2b", |
| "Gemma-2b-Base", |
| LMStyle.GenericBase, |
| datetime(2023, 1, 1), |
| link="https://huggingface.co/google/gemma-2b", |
| ), |
| |
| LanguageModel( |
| "mistral-large-latest", |
| "Mistral-Large", |
| LMStyle.MistralWeb, |
| datetime(2023, 1, 1), |
| link="https://mistral.ai/news/mistral-large/", |
| ), |
| |
| LanguageModel( |
| "open-mixtral-8x22b", |
| "Mixtral-8x22B-Ins", |
| LMStyle.MistralWeb, |
| datetime(2023, 1, 1), |
| link="https://mistral.ai/news/mixtral-8x22b/", |
| ), |
| LanguageModel( |
| "open-mixtral-8x7b", |
| "Mixtral-8x7B-Ins", |
| LMStyle.MistralWeb, |
| datetime(2023, 1, 1), |
| link="https://mistral.ai/news/mixtral-8x7b/", |
| ), |
| LanguageModel( |
| "codestral-latest", |
| "Codestral-Latest", |
| LMStyle.MistralWeb, |
| datetime(2023, 1, 1), |
| link="https://mistral.ai/news/codestral/", |
| ), |
| |
| LanguageModel( |
| "Qwen/QwQ-32B-Preview", |
| "QwQ-32B-Preview", |
| LMStyle.QwQ, |
| datetime(2024, 6, 30), |
| link="https://huggingface.co/Qwen/QwQ-32B-Preview", |
| ), |
| LanguageModel( |
| "Qwen/QwQ-32B", |
| "QwQ-32B", |
| LMStyle.QwQ, |
| datetime(2024, 6, 30), |
| link="https://huggingface.co/Qwen/QwQ-32B", |
| ), |
| |
| LanguageModel( |
| "Qwen/Qwen2-72B-Instruct", |
| "Qwen2-Ins-72B", |
| LMStyle.CodeQwenInstruct, |
| datetime(2023, 8, 30), |
| link="https://huggingface.co/Qwen/Qwen2-72B-Instruct", |
| ), |
| |
| LanguageModel( |
| "Qwen/Qwen2.5-7B-Instruct", |
| "Qwen2.5-Ins-7B", |
| LMStyle.CodeQwenInstruct, |
| datetime(2023, 8, 30), |
| link="https://huggingface.co/Qwen/Qwen2.5-7B-Instruct", |
| ), |
| LanguageModel( |
| "Qwen/Qwen3-4B-Base", |
| "Qwen3-4B-Base", |
| LMStyle.GenericBase, |
| datetime(2023, 8, 30), |
| link="https://huggingface.co/Qwen/Qwen3-4B-Base", |
| ), |
| LanguageModel( |
| "Qwen/Qwen3-1.7B-Base", |
| "Qwen3-1.7B-Base", |
| LMStyle.GenericBase, |
| datetime(2023, 8, 30), |
| link="https://huggingface.co/Qwen/Qwen3-1.7B-Base", |
| ), |
| LanguageModel( |
| "ftajwar/qwen3_4B_Base_MaxRL_Polaris_1000_steps", |
| "ftajwar/qwen3_4B_Base_MaxRL_Polaris_1000_steps", |
| LMStyle.GenericBase, |
| datetime(2023, 8, 30), |
| link="https://huggingface.co/ftajwar/qwen3_4B_Base_MaxRL_Polaris_1000_steps", |
| ), |
| LanguageModel( |
| "ftajwar/qwen3_4B_Base_GRPO_Polaris_1000_steps", |
| "ftajwar/qwen3_4B_Base_GRPO_Polaris_1000_steps", |
| LMStyle.GenericBase, |
| datetime(2023, 8, 30), |
| link="https://huggingface.co/ftajwar/qwen3_4B_Base_GRPO_Polaris_1000_steps", |
| ), |
| LanguageModel( |
| "/data/models/Qwen2.5-1.5B-Instruct/", |
| "/data/models/Qwen2.5-1.5B-Instruct/", |
| LMStyle.CodeQwenInstruct, |
| datetime(2023, 8, 30), |
| link="https://huggingface.co/Qwen/Qwen2.5-1.5B-Instruct", |
| ), |
| LanguageModel( |
| "/data/repos/agentica-rllm/scripts/train/deepcoder/checkpoints/deepcoder-1.5b/grpo-lcb-prompt/global_step_900/actor/checkpoint", |
| "grpo_900", |
| LMStyle.CodeQwenInstruct, |
| datetime(2023, 8, 30), |
| link="https://huggingface.co/Qwen/Qwen2.5-1.5B-Instruct", |
| ), |
| LanguageModel( |
| "/data/repos/agentica-rllm/scripts/train/deepcoder/checkpoints/deepcoder-1.5b/tailrl-lcb-prompt/global_step_900/actor/checkpoint", |
| "tailrl_900", |
| LMStyle.CodeQwenInstruct, |
| datetime(2023, 8, 30), |
| link="https://huggingface.co/Qwen/Qwen2.5-1.5B-Instruct", |
| ), |
| LanguageModel( |
| "Qwen/Qwen2.5-32B-Instruct", |
| "Qwen2.5-Ins-32B", |
| LMStyle.CodeQwenInstruct, |
| datetime(2023, 8, 30), |
| link="https://huggingface.co/Qwen/Qwen2.5-32B-Instruct", |
| ), |
| LanguageModel( |
| "Qwen/Qwen2.5-72B-Instruct", |
| "Qwen2.5-Ins-72B", |
| LMStyle.CodeQwenInstruct, |
| datetime(2023, 8, 30), |
| link="https://huggingface.co/Qwen/Qwen2.5-72B-Instruct", |
| ), |
| |
| LanguageModel( |
| "Qwen/Qwen2.5-Coder-7B-Instruct", |
| "Qwen2.5-Coder-Ins-7B", |
| LMStyle.CodeQwenInstruct, |
| datetime(2024, 6, 30), |
| link="https://huggingface.co/Qwen/Qwen2.5-Coder-7B-Instruct", |
| ), |
| LanguageModel( |
| "Qwen/Qwen2.5-Coder-32B-Instruct", |
| "Qwen2.5-Coder-Ins-32B", |
| LMStyle.CodeQwenInstruct, |
| datetime(2024, 6, 30), |
| link="https://huggingface.co/Qwen/Qwen2.5-Coder-32B-Instruct", |
| ), |
| LanguageModel( |
| "Qwen/Qwen3-235B-A22B", |
| "Qwen3-235B-A22B", |
| LMStyle.CodeQwenInstruct, |
| datetime(2024, 6, 30), |
| link="https://huggingface.co/Qwen/Qwen3-235B-A22B", |
| ), |
| LanguageModel( |
| "azerogpt", |
| "AzeroGPT-64b", |
| LMStyle.CodeQwenInstruct, |
| datetime(2024, 3, 1), |
| "https://azerogpt.soundai.com", |
| ), |
| LanguageModel( |
| "Kimi-k1.6-IOI-high", |
| "Kimi-k1.6-IOI-high", |
| LMStyle.CodeQwenInstruct, |
| datetime(2024, 7, 30), |
| "https://kimi.moonshot.cn/", |
| ), |
| LanguageModel( |
| "Kimi-k1.6-IOI", |
| "Kimi-k1.6-IOI", |
| LMStyle.CodeQwenInstruct, |
| datetime(2024, 7, 30), |
| "https://kimi.moonshot.cn/", |
| ), |
| LanguageModel( |
| "Qwen/QwQ-Max-Preview", |
| "QwQ-Max-Preview", |
| LMStyle.QwQ, |
| datetime(2024, 6, 30), |
| link="https://huggingface.co/Qwen/QwQ-Max-Preview", |
| ), |
| LanguageModel( |
| "accounts/fireworks/models/deepseek-r1", |
| "DeepSeek-R1", |
| LMStyle.DeepSeekR1, |
| datetime(2024, 6, 30), |
| link="https://huggingface.co/deepseek-ai/DeepSeek-R1", |
| ), |
| LanguageModel( |
| "deepseek-reasoner", |
| "DeepSeek-R1-0528", |
| LMStyle.DeepSeekR1, |
| datetime(2024, 6, 30), |
| link="https://huggingface.co/deepseek-ai/DeepSeek-R1-0528", |
| ), |
| |
| LanguageModel( |
| "deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B", |
| "DeepSeek-R1-Distill-Qwen-1.5B", |
| LMStyle.DeepSeekR1, |
| datetime(2025, 1, 20), |
| link="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B", |
| ), |
| LanguageModel( |
| "deepseek-ai/DeepSeek-R1-Distill-Qwen-7B", |
| "DeepSeek-R1-Distill-Qwen-7B", |
| LMStyle.DeepSeekR1, |
| datetime(2025, 1, 20), |
| link="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-7B", |
| ), |
| LanguageModel( |
| "deepseek-ai/DeepSeek-R1-Distill-Qwen-14B", |
| "DeepSeek-R1-Distill-Qwen-14B", |
| LMStyle.DeepSeekR1, |
| datetime(2025, 1, 20), |
| link="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-14B", |
| ), |
| LanguageModel( |
| "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B", |
| "DeepSeek-R1-Distill-Qwen-32B", |
| LMStyle.DeepSeekR1, |
| datetime(2025, 1, 20), |
| link="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B", |
| ), |
| LanguageModel( |
| "deepseek-ai/DeepSeek-R1-Distill-Llama-8B", |
| "DeepSeek-R1-Distill-Llama-8B", |
| LMStyle.DeepSeekR1, |
| datetime(2025, 1, 20), |
| link="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Llama-8B", |
| ), |
| LanguageModel( |
| "deepseek-ai/DeepSeek-R1-Distill-Llama-70B", |
| "DeepSeek-R1-Distill-Llama-70B", |
| LMStyle.DeepSeekR1, |
| datetime(2025, 1, 20), |
| link="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Llama-70B", |
| ), |
| LanguageModel( |
| model_name="command-r-plus-08-2024", |
| model_repr="command-r-plus-08-2024", |
| model_style=LMStyle.CohereCommand, |
| release_date=datetime(2024, 8, 1), |
| link="https://docs.cohere.com/docs/command-r-plus", |
| ), |
| LanguageModel( |
| model_name="command-r-08-2024", |
| model_repr="command-r-08-2024", |
| model_style=LMStyle.CohereCommand, |
| release_date=datetime(2024, 8, 1), |
| link="https://docs.cohere.com/docs/command-r", |
| ), |
| LanguageModel( |
| model_name="command-r7b-12-2024", |
| model_repr="command-r7b-12-2024", |
| model_style=LMStyle.CohereCommand, |
| release_date=datetime(2024, 12, 13), |
| link="https://docs.cohere.com/docs/command-r7b", |
| ), |
| LanguageModel( |
| model_name="command-a-03-2025", |
| model_repr="command-a-03-2025", |
| model_style=LMStyle.CohereCommand, |
| release_date=datetime(2025, 3, 13), |
| link="https://docs.cohere.com/docs/command-a", |
| ), |
| LanguageModel( |
| model_name="Qwen/Qwen2.5-72B-Instruct-Turbo", |
| model_repr="Qwen2.5-72B-Instruct-Turbo", |
| model_style=LMStyle.TogetherAI, |
| release_date=datetime(2024, 9, 19), |
| link="https://docs.together.ai/docs/serverless-models", |
| ), |
| |
| |
| |
| |
| |
| |
| |
| LanguageModel( |
| "MetaStone-L1-7B", |
| "MetaStone-L1-7B", |
| LMStyle.DeepSeekR1, |
| datetime(2025, 3, 12), |
| "https://www.wenxiaobai.com/", |
| ), |
| LanguageModel( |
| "grok-3-mini-beta", |
| "Grok-3-Mini", |
| LMStyle.Grok, |
| datetime(2024, 3, 1), |
| "https://x.com/i/grok", |
| ), |
| LanguageModel( |
| "grok-3-mini-beta_high", |
| "Grok-3-Mini (High)", |
| LMStyle.Grok, |
| datetime(2024, 3, 1), |
| "https://x.com/i/grok", |
| ), |
| LanguageModel( |
| "nvidia/Llama-3_1-Nemotron-Ultra-253B-v1", |
| "Llama-3_1-Nemotron-Ultra-253B-v1", |
| LMStyle.DeepSeekR1, |
| datetime(2024, 4, 1), |
| "https://huggingface.co/nvidia/Llama-3_1-Nemotron-Ultra-253B-v1", |
| ), |
| LanguageModel( |
| "nvidia/Llama-3_1-Nemotron-Nano-8B-v1", |
| "Llama-3_1-Nemotron-Nano-8B-v1", |
| LMStyle.DeepSeekR1, |
| datetime(2024, 4, 1), |
| "https://huggingface.co/nvidia/Llama-3_1-Nemotron-Nano-8B-v1/", |
| ), |
| LanguageModel( |
| "agentica-org/DeepCoder-14B-Preview", |
| "DeepCoder-14B-Preview", |
| LMStyle.DeepSeekR1, |
| datetime(2024, 4, 1), |
| "https://huggingface.co/agentica-org/DeepCoder-14B-Preview", |
| ), |
| ] |
|
|
| LanguageModelStore: dict[str, LanguageModel] = { |
| lm.model_name: lm for lm in LanguageModelList |
| } |
|
|
| if __name__ == "__main__": |
| print(list(LanguageModelStore.keys())) |
|
|