File size: 1,003 Bytes
8114dfe
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
import streamlit as st
import requests
import json

# Ollama API endpoint
OLLAMA_API_URL = "http://localhost:7860/api/chat"  # Or your Ollama server address

# Streamlit UI
st.title("DeepSeek-R1 Chat")

# User prompt input
prompt = st.text_input("Enter your prompt:")

# Button to send the prompt
if st.button("Send"):
    # Prepare the request data
    data = {
        "model": "deepseek-r1:7b",
        "messages": [{"role": "user", "content": prompt}],
        "stream": False
    }

    # Send the request to Ollama
    try:
        response = requests.post(OLLAMA_API_URL, data=json.dumps(data), headers={'Content-Type': 'application/json'})
        response.raise_for_status()  # Raise an exception for bad status codes
        response_data = response.json()
        # Extract the model's response
        model_response = response_data['message']['content']
        st.write(f"**Model:** {model_response}")

    except requests.exceptions.RequestException as e:
        st.error(f"Error: {e}")