|
|
| import gradio as gr |
| import requests |
|
|
| def predict_with_model(message, history, model_name): |
| url = 'http://localhost:11434/api/chat' |
| payload = { |
| 'model': model_name, |
| 'messages': [{'role': 'user', 'content': message}], |
| 'stream': False |
| } |
|
|
| try: |
| response = requests.post(url, json=payload) |
| response.raise_for_status() |
| return response.json()['message']['content'] |
| except Exception as e: |
| return f"Fehler mit Modell {model_name}: {str(e)}" |
|
|
| available_models = ["deepseek-coder-v2", "dolphin-mistral", "worm-r1", "deepseek-r1:7b"] |
|
|
| with gr.Blocks(theme="soft") as multi_model_app: |
| gr.Markdown("# Ollama Multi-Model Chat") |
| model_dropdown = gr.Dropdown( |
| choices=available_models, |
| value=available_models[0], |
| label="Wähle ein Modell" |
| ) |
|
|
| gr.ChatInterface( |
| fn=predict_with_model, |
| additional_inputs=[model_dropdown], |
| title="DeepSeek & Friends", |
| description="Wähle dein bevorzugtes Modell aus dem Dropdown." |
| ) |
|
|
| multi_model_app.launch() |
|
|