import streamlit as st
import requests

# Function to query the Hugging Face API
def query(payload, api_url):
    headers = {"Authorization": f"Bearer {st.secrets['HF_TOKEN']}"}
    response = requests.post(api_url, headers=headers, json=payload)
    return response.json()

# Page configuration
st.set_page_config(
    page_title="DeepSeek Chatbot - ruslanmv.com",
    page_icon="🤖",
    layout="centered"
)

# Initialize session state for chat history
if "messages" not in st.session_state:
    st.session_state.messages = []
if "selected_model" not in st.session_state:
    st.session_state.selected_model = "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B"

# Sidebar configuration
with st.sidebar:
    st.header("Model Configuration")
    st.markdown("[Get HuggingFace Token](https://huggingface.co/settings/tokens)")

    # Dropdown to select model
    model_options = [
        "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
        "deepseek-ai/DeepSeek-R1",
        "deepseek-ai/DeepSeek-R1-Zero"
    ]
    selected_model = st.selectbox("Select Model", model_options, index=model_options.index(st.session_state.selected_model))
    st.session_state.selected_model = selected_model

    system_message = st.text_area(
        "System Message",
        value="You are a friendly Chatbot created by ruslanmv.com",
        height=100
    )

    max_tokens = st.slider(
        "Max Tokens",
        1, 4000, 512
    )

    temperature = st.slider(
        "Temperature",
        0.1, 4.0, 0.7
    )

    top_p = st.slider(
        "Top-p",
        0.1, 1.0, 0.9
    )

# Chat interface
st.title("🤖 DeepSeek Chatbot")
st.caption("Powered by Hugging Face Inference API - Configure in sidebar")

# Display chat history
for message in st.session_state.messages:
    with st.chat_message(message["role"]):
        st.markdown(message["content"])

# Handle input
if prompt := st.chat_input("Type your message..."):
    st.session_state.messages.append({"role": "user", "content": prompt})

    with st.chat_message("user"):
        st.markdown(prompt)

    try:
        with st.spinner("Generating response..."):
            # Prepare the payload for the API
            payload = {
                "inputs": prompt,
                "parameters": {
                    "max_new_tokens": max_tokens,
                    "temperature": temperature,
                    "top_p": top_p,
                    "return_full_text": False
                }
            }

            # Query the Hugging Face API using the selected model
            api_url = f"https://api-inference.huggingface.co/models/{st.session_state.selected_model}"
            output = query(payload, api_url)

            # Handle API response
            if isinstance(output, list) and len(output) > 0 and 'generated_text' in output[0]:
                assistant_response = output[0]['generated_text']

                with st.chat_message("assistant"):
                    st.markdown(assistant_response)

                st.session_state.messages.append({"role": "assistant", "content": assistant_response})
            else:
                st.error("Error: Unable to generate a response. Please try again.")

    except Exception as e:
        st.error(f"Application Error: {str(e)}")

'''

import streamlit as st
import requests

# Hugging Face API URL (default model)
API_URL = "https://api-inference.huggingface.co/models/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B"

# Function to query the Hugging Face API
def query(payload, api_url):
    headers = {"Authorization": f"Bearer {st.secrets['HF_TOKEN']}"}
    response = requests.post(api_url, headers=headers, json=payload)
    return response.json()

# Page configuration
st.set_page_config(
    page_title="DeepSeek Chatbot - ruslanmv.com",
    page_icon="🤖",
    layout="centered"
)

# Initialize session state for chat history
if "messages" not in st.session_state:
    st.session_state.messages = []

# Sidebar configuration
with st.sidebar:
    st.header("Model Configuration")
    st.markdown("[Get HuggingFace Token](https://huggingface.co/settings/tokens)")

    # Dropdown to select model
    model_options = [
        "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
        "deepseek-ai/DeepSeek-R1",
        "deepseek-ai/DeepSeek-R1-Zero"
    ]
    selected_model = st.selectbox("Select Model", model_options, index=0)

    system_message = st.text_area(
        "System Message",
        value="You are a friendly Chatbot created by ruslanmv.com",
        height=100
    )

    max_tokens = st.slider(
        "Max Tokens",
        1, 4000, 512
    )

    temperature = st.slider(
        "Temperature",
        0.1, 4.0, 0.7
    )

    top_p = st.slider(
        "Top-p",
        0.1, 1.0, 0.9
    )

# Chat interface
st.title("🤖 DeepSeek Chatbot")
st.caption("Powered by Hugging Face Inference API - Configure in sidebar")

# Display chat history
for message in st.session_state.messages:
    with st.chat_message(message["role"]):
        st.markdown(message["content"])

# Handle input
if prompt := st.chat_input("Type your message..."):
    st.session_state.messages.append({"role": "user", "content": prompt})

    with st.chat_message("user"):
        st.markdown(prompt)

    try:
        with st.spinner("Generating response..."):
            # Prepare the payload for the API
            payload = {
                "inputs": prompt,
                "parameters": {
                    "max_new_tokens": max_tokens,
                    "temperature": temperature,
                    "top_p": top_p,
                    "return_full_text": False
                }
            }

            # Query the Hugging Face API using the selected model
            output = query(payload, f"https://api-inference.huggingface.co/models/{selected_model}")

            # Handle API response
            if isinstance(output, list) and len(output) > 0 and 'generated_text' in output[0]:
                assistant_response = output[0]['generated_text']

                with st.chat_message("assistant"):
                    st.markdown(assistant_response)

                st.session_state.messages.append({"role": "assistant", "content": assistant_response})
            else:
                st.error("Error: Unable to generate a response. Please try again.")

    except Exception as e:
        st.error(f"Application Error: {str(e)}")




'''