Software-Engineering-Arena / context_window.json
zhiminy's picture
add mistral models
ab10f2f
raw
history blame
911 Bytes
{
"gpt-3.5-turbo": 16000,
"gpt-4-turbo": 128000,
"gpt-4o": 128000,
"gpt-4o-mini": 128000,
"claude-3-5-haiku-20241022" : 200000,
"claude-3-5-sonnet-20241022" : 200000,
"claude-3-opus-20240229" : 200000,
"deepseek-chat": 64000,
"deepseek-r1": 64000,
"gemini-1.5-flash": 1048576,
"gemini-1.5-pro": 2097152,
"gemini-2.0-flash-lite-preview": 1048576,
"gemini-2.0-pro-exp": 2097152,
"llama-3.1-8b": 128000,
"llama-3.1-405b": 128000,
"llama-3.1-70b": 128000,
"llama-3.3-70b": 128000,
"mistral-embed": 8000,
"mistral-large-latest": 131000,
"mistral-small-latest": 32000,
"o1": 128000,
"o1-mini": 128000,
"o3-mini": 200000,
"Qwen2.5-32B-Instruct": 131072,
"qwen2.5-72b": 32768,
"Qwen2.5-72B-Instruct": 131072,
"Qwen2.5-72B-Instruct-128k": 131072,
"Qwen2.5-Coder-32B-Instruct": 131072,
"yi-large": 32000
}