Spaces:
Sleeping
Sleeping
File size: 5,247 Bytes
e6f643b 2e0e60d 927e3c1 2e0e60d 927e3c1 e070fa8 927e3c1 2e0e60d 927e3c1 e070fa8 927e3c1 422cf94 e070fa8 927e3c1 422cf94 927e3c1 422cf94 927e3c1 e070fa8 927e3c1 5ac943d 927e3c1 2e0e60d 927e3c1 422cf94 5ac943d 422cf94 927e3c1 422cf94 927e3c1 422cf94 927e3c1 e070fa8 927e3c1 e070fa8 927e3c1 92fe91c 357d792 3500289 927e3c1 e070fa8 927e3c1 e070fa8 2e0e60d e070fa8 fbe04e6 e070fa8 5ac943d e070fa8 2271fe8 e070fa8 7c9c132 e070fa8 5ac943d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 |
import streamlit as st
from groq import Groq
import requests
# =======================
# API Clients
# =======================
client = Groq(api_key=st.secrets["GROQ_API_KEY"])
HF_API_KEY = st.secrets["HF_API_KEY"]
# =======================
# Sidebar Settings
# =======================
st.sidebar.title("βοΈ Settings")
provider_choice = st.sidebar.radio(
"Choose Provider", ["Groq", "Hugging Face"]
)
if provider_choice == "Groq":
model_choice = st.sidebar.selectbox(
"Choose Groq Model", ["llama-3.1-8b-instant", "llama-3.1-70b-versatile", "mixtral-8x7b-32768"]
)
else:
model_choice = st.sidebar.selectbox(
"Choose HF Model", ["mistralai/Mixtral-8x7B-Instruct-v0.1", "tiiuae/falcon-7b-instruct"]
)
st.title("π€ CodeCraft AI - Mini Copilot (Chat Edition)")
# =======================
# Session state for chats
# =======================
if "generate_chat" not in st.session_state:
st.session_state.generate_chat = []
if "debug_chat" not in st.session_state:
st.session_state.debug_chat = []
if "explain_chat" not in st.session_state:
st.session_state.explain_chat = []
# =======================
# Helper functions
# =======================
def call_groq(chat_history: list[tuple[str, str]], system_prompt: str) -> str:
try:
response = client.chat.completions.create(
model=model_choice,
messages=[{"role": "system", "content": system_prompt}] + [{"role": role, "content": msg} for role, msg in chat_history],
temperature=0.4
)
return response.choices[0].message.content
except Exception as e:
return f"β οΈ Groq Error: {str(e)}"
def call_hf(prompt: str) -> str:
headers = {"Authorization": f"Bearer {HF_API_KEY}"}
payload = {"inputs": prompt}
response = requests.post(
f"https://api-inference.huggingface.co/models/{model_choice}",
headers=headers,
json=payload,
timeout=60
)
if response.status_code == 200:
result = response.json()
if isinstance(result, list) and "generated_text" in result[0]:
return result[0]["generated_text"]
else:
return str(result)
return f"β οΈ HF Error: {response.text}"
def get_ai_response(chat_history: list[tuple[str, str]], system_prompt: str) -> str:
if provider_choice == "Groq":
return call_groq(chat_history, system_prompt)
else:
prompt = system_prompt + "\n\n"
for role, msg in chat_history:
prompt += f"{role.upper()}: {msg}\n"
return call_hf(prompt)
# =======================
# Chat UI
# =======================
def chat_ui(tab_name, chat_history, system_prompt, input_key):
st.subheader(tab_name)
# --- Chat history display ---
with st.container():
for role, msg in chat_history:
if role == "user":
with st.chat_message("user"):
st.write(msg)
else:
with st.chat_message("assistant"):
if "```" in msg: # detect code blocks
parts = msg.split("```")
for i, part in enumerate(parts):
if i % 2 == 1: # inside code block
lang, *code_lines = part.split("\n")
code = "\n".join(code_lines)
st.code(code, language=lang if lang else "python")
else:
st.write(part)
else:
st.write(msg)
# --- Input bar + send button in one row ---
col1, col2 = st.columns([10, 1])
with col1:
user_input = st.text_input("Type your message...", key=input_key, label_visibility="collapsed")
with col2:
send_btn = st.button("β€", key=input_key + "_send")
# --- Handle input ---
if send_btn and user_input.strip():
chat_history.append(("user", user_input.strip()))
with st.spinner("Thinking..."):
ai_msg = get_ai_response(chat_history, system_prompt)
chat_history.append(("assistant", ai_msg))
st.rerun()
# =======================
# Tabs
# =======================
tab1, tab2, tab3 = st.tabs(["π‘ Generate Code", "π Debug Code", "π Explain Code"])
with tab1:
chat_ui(
"π‘ Generate Code",
st.session_state.generate_chat,
"You are a helpful coding assistant. Generate correct code first, then a short simple explanation.",
input_key="generate_input"
)
with tab2:
chat_ui(
"π Debug Code",
st.session_state.debug_chat,
"You are an expert code debugger. Fix errors and give corrected code, then explain what changed and why in simple terms.",
input_key="debug_input"
)
with tab3:
chat_ui(
"π Explain Code",
st.session_state.explain_chat,
"You are a teacher that explains code in simple words. The user pastes code, and you explain step by step.",
input_key="explain_input"
)
# =======================
# Footer
# =======================
st.markdown("---")
st.caption("π Built for Hackathons using Streamlit + Hugging Face + Groq")
|