Spaces:
Sleeping
Sleeping
import streamlit as st | |
import requests | |
import json | |
# Ollama API endpoint | |
OLLAMA_API_URL = "http://localhost:7860/api/chat" # Or your Ollama server address | |
# Streamlit UI | |
st.title("DeepSeek-R1 Chat") | |
# User prompt input | |
prompt = st.text_input("Enter your prompt:") | |
# Button to send the prompt | |
if st.button("Send"): | |
# Prepare the request data | |
data = { | |
"model": "deepseek-r1:7b", | |
"messages": [{"role": "user", "content": prompt}], | |
"stream": False | |
} | |
# Send the request to Ollama | |
try: | |
response = requests.post(OLLAMA_API_URL, data=json.dumps(data), headers={'Content-Type': 'application/json'}) | |
response.raise_for_status() # Raise an exception for bad status codes | |
response_data = response.json() | |
# Extract the model's response | |
model_response = response_data['message']['content'] | |
st.write(f"**Model:** {model_response}") | |
except requests.exceptions.RequestException as e: | |
st.error(f"Error: {e}") | |