api / models /fetch.py
chipling's picture
Upload 38 files
b166a40 verified
import random
class FetchModel:
@staticmethod
def all_models():
models = [
{
"id": "llama-4-maverick-17b",
"name": "LLaMA 4 Maverick 17B",
"description": "Meta AI's 17B-parameter general-purpose model from the LLaMA 4 series, designed for high-quality text generation.",
"type": "text"
},
{
"id": "llama-4-scout-17b",
"name": "LLaMA 4 Scout 17B",
"description": "Instruction-tuned version of LLaMA 4 by Meta, tailored for alignment and structured task performance.",
"type": "text"
},
{
"id": "llama-3.1-8b",
"name": "LLaMA 3.1 8B",
"description": "A fast and lightweight 8B parameter model from Meta's LLaMA 3.1 line, optimized for low-latency inference.",
"type": "text"
},
{
"id": "llama-3.3-70b",
"name": "LLaMA 3.3 70B",
"description": "Meta's 70B parameter flagship model from LLaMA 3.3, designed for state-of-the-art language understanding and generation.",
"type": "text"
},
{
"id": "deepseek-r1",
"name": "DeepSeek R1",
"description": "DeepSeek AIs foundational model focused on reasoning, language understanding, and long-context comprehension.",
"type": "text"
},
{
"id": "deepseek-v3",
"name": "DeepSeek V3",
"description": "DeepSeek AIs third-generation model with enhanced reasoning and coding abilities.",
"type": "text"
},
{
"id": "qwen-2.5-72b",
"name": "Qwen 2.5 72B",
"description": "Large instruction-tuned language model from Qwen 2.5 family, optimized for complex NLP tasks.",
"type": "text"
},
{
"id": "gemma-2-27b",
"name": "Gemma 2 27B",
"description": "Googles instruction-tuned model with 27B parameters, capable of high-performance natural language understanding.",
"type": "text"
},
{
"id": "grok-3",
"name": "Grok 3",
"description": "xAI's general-purpose large language model designed for reasoning, conversation, and alignment.",
"type": "text"
},
{
"id": "grok-3-fast",
"name": "Grok 3 (Fast)",
"description": "A low-latency version of Grok 3 optimized for responsiveness and quick task execution.",
"type": "text"
},
{
"id": "grok-3-mini",
"name": "Grok 3 Mini",
"description": "A smaller variant of Grok 3 designed for lighter inference while maintaining core capabilities.",
"type": "text"
},
{
"id": "grok-3-mini-fast",
"name": "Grok 3 Mini (Fast)",
"description": "Fast and lightweight variant of Grok 3 Mini for extremely low-latency use cases.",
"type": "text"
},
{
"id": "grok-2-1212",
"name": "Grok 2 1212",
"description": "An earlier generation Grok model from xAI, optimized for general language tasks with improved efficiency.",
"type": "text"
},
{
"id": "fal-ai/fast-sdxl",
"name": "Fast SDXL",
"description": "A fast and efficient image generation model from the SDXL family, optimized for high-quality outputs.",
"type": "image"
}
]
return models
@staticmethod
def select_model(id):
if id == "llama-4-maverick-17b":
options = ['meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8']
model = random.choice(options)
return model
elif id == "llama-4-scout-17b":
options = ['meta-llama/Llama-4-Scout-17B-16E-Instruct', 'meta-llama/llama-4-scout-17b-16e-instruct']
model = random.choice(options)
return model
elif id == "llama-3.1-8b":
options = ['llama-3.1-8b-instant']
model = random.choice(options)
return model
elif id == "llama-3.3-70b":
options = ['meta-llama/Llama-3.3-70B-Instruct-Turbo', 'llama-3.3-70b-versatile', 'meta-llama/Llama-3.3-70B-Instruct-Turbo']
model = random.choice(options)
return model
elif id == "deepseek-r1":
options = ['deepseek-ai/DeepSeek-R1', 'deepseek-r1-distill-llama-70b', 'deepseek-ai/DeepSeek-R1-Turbo', 'deepseek-ai/DeepSeek-R1-Distill-Llama-70B', 'deepseek-ai/DeepSeek-R1-Distill-Qwen-32B']
model = random.choice(options)
return model
elif id == "deepseek-v3":
options = ['deepseek-ai/DeepSeek-V3']
model = random.choice(options)
return model
elif id == "qwen-2.5-72b":
options = ['Qwen/Qwen2.5-VL-72B-Instruct', 'Qwen/Qwen2.5-72B-Instruct']
model = random.choice(options)
return model
elif id == "gemma-2-27b":
options = ['google/gemma-2-27b-it']
model = random.choice(options)
return model
elif id == "grok-3":
options = ['grok-3']
model = random.choice(options)
return model
elif id == "grok-3-fast":
options = ['grok-3-fast']
model = random.choice(options)
return model
elif id == "grok-3-mini":
options = ['grok-3-mini']
model = random.choice(options)
return model
elif id == "grok-3-mini-fast":
options = ['grok-3-mini-fast']
model = random.choice(options)
return model
elif id == "grok-2-1212":
options = ['grok-2-1212']
model = random.choice(options)
return model