| | import streamlit as st |
| | import os |
| | import uuid |
| | from streamlit_chat import message |
| |
|
| | |
| | from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace |
| |
|
| | |
| | from langchain_core.messages import ( |
| | AIMessage, |
| | HumanMessage, |
| | SystemMessage, |
| | BaseMessage |
| | ) |
| |
|
| | |
| | HF_API_TOKEN = os.environ.get("HUGGINGFACEHUB_API_TOKEN") |
| |
|
| | |
| | SYSTEM_PROMPT = SystemMessage( |
| | content=( |
| | "You are Kitchen Buddy 👨🍳, a warm and friendly culinary assistant. " |
| | "Your job is to help people with anything related to food, cooking, or cuisine.\n\n" |
| | "You can:\n" |
| | "- Explain what ingredients are, their uses, and their cultural background\n" |
| | "- Suggest recipes and meal ideas\n" |
| | "- Offer ingredient substitutions\n" |
| | "- Teach cooking techniques and science\n" |
| | "- Provide healthy diet adaptations\n" |
| | "- Explore global cuisines & traditions\n\n" |
| | "Keep your tone helpful and approachable. " |
| | "If a user asks about a food item (e.g., 'what are apples'), explain what it is and how it’s commonly used. " |
| | "If they ask what to cook with it, suggest a few recipes. " |
| | "If something is unrelated to food or cooking, politely redirect back to culinary topics." |
| | ) |
| | ) |
| |
|
| | |
| | st.set_page_config(page_title="Kitchen Buddy 👨🍳", layout="centered") |
| | st.title("👨🍳 Kitchen Buddy") |
| |
|
| | |
| | available_models = { |
| | "Mistral-7B-Instruct-v0.2": "Mistralai/Mistral-7B-Instruct-v0.2", |
| | "Llama-2-7B-Chat": "meta-llama/Llama-2-7b-chat-hf", |
| | "Qwen1.5-7B-Chat": "Qwen/Qwen1.5-7B-Chat", |
| | |
| | } |
| |
|
| | |
| | if 'selected_model_key' not in st.session_state: |
| | st.session_state.selected_model_key = "Mistral-7B-Instruct-v0.2" |
| |
|
| | |
| | @st.cache_resource |
| | def initialize_llm(repo_id): |
| | if not HF_API_TOKEN: |
| | return None |
| |
|
| | os.environ['HUGGINGFACEHUB_API_TOKEN'] = HF_API_TOKEN |
| | try: |
| | llm = HuggingFaceEndpoint( |
| | repo_id=repo_id, |
| | task="text-generation", |
| | max_new_tokens=512, |
| | temperature=0.7, |
| | do_sample=True, |
| | repetition_penalty=1.03 |
| | ) |
| | chat_model = ChatHuggingFace(llm=llm) |
| | return chat_model |
| | except Exception as e: |
| | st.error(f"❌ Failed to initialize {repo_id}. Check API key and model availability.") |
| | print(f"Detailed LLM init error for {repo_id}: {e}") |
| | return None |
| |
|
| | def get_current_repo_id(): |
| | return available_models.get(st.session_state.selected_model_key, available_models["Mistral-7B-Instruct-v0.2"]) |
| |
|
| | |
| | CHAT_MODEL = initialize_llm(get_current_repo_id()) |
| |
|
| | |
| | current_model_display = st.session_state.selected_model_key |
| | st.markdown(f""" |
| | Your friendly culinary assistant — ask about recipes, ingredients, and cooking techniques. |
| | **🤖 Model in use:** `{current_model_display}` |
| | """) |
| |
|
| | |
| | if CHAT_MODEL is None: |
| | st.warning("⚠️ Model initialization failed. Please check your HF API token and try reloading.") |
| |
|
| | |
| | def new_chat(): |
| | new_id = str(uuid.uuid4()) |
| | st.session_state.chats[new_id] = [SYSTEM_PROMPT] |
| | st.session_state.chat_titles[new_id] = "New Chat" |
| | st.session_state.current_chat_id = new_id |
| |
|
| | if 'chats' not in st.session_state: |
| | st.session_state.chats = {} |
| | st.session_state.chat_titles = {} |
| | new_chat() |
| | if 'current_chat_id' not in st.session_state: |
| | new_chat() |
| | if 'generate_next' not in st.session_state: |
| | st.session_state.generate_next = False |
| |
|
| |
|
| | def get_current_messages() -> list[BaseMessage]: |
| | return st.session_state.chats.get(st.session_state.current_chat_id, [SYSTEM_PROMPT]) |
| |
|
| | def set_current_chat(chat_id): |
| | st.session_state.current_chat_id = chat_id |
| |
|
| | def convert_to_streamlit_message(msg: BaseMessage): |
| | if isinstance(msg, SystemMessage): |
| | return None, None |
| | role = "user" if isinstance(msg, HumanMessage) else "assistant" |
| | return msg.content, role |
| |
|
| |
|
| | |
| | with st.sidebar: |
| | |
| | st.subheader("🤖 Model Selector") |
| | selected_key = st.selectbox( |
| | "Choose a model:", |
| | options=list(available_models.keys()), |
| | index=list(available_models.keys()).index(st.session_state.selected_model_key), |
| | key="model_selector" |
| | ) |
| | if selected_key != st.session_state.selected_model_key: |
| | st.session_state.selected_model_key = selected_key |
| | |
| | st.cache_resource.clear() |
| | st.success(f"✅ Switched to {selected_key}. Reloading model...") |
| | st.rerun() |
| |
|
| | if st.button("🔄 Reload Current Model", use_container_width=True): |
| | st.cache_resource.clear() |
| | st.success("✅ Reloading model...") |
| | st.rerun() |
| |
|
| | |
| | has_real_chats = any( |
| | len(history) > 1 for history in st.session_state.chats.values() |
| | ) |
| |
|
| | if not has_real_chats: |
| | |
| | st.button("📭 No saved conversations yet", use_container_width=True, disabled=True) |
| |
|
| | st.markdown(""" |
| | ### 👨🍳 Welcome! |
| | Ask me anything about cooking: |
| | - Recipes and ideas |
| | - Ingredient substitutions |
| | - Cooking techniques |
| | *Try asking:* |
| | • "What can I make with apples?" |
| | • "How do I cook pasta al dente?" |
| | """) |
| | else: |
| | |
| | if st.button("🟥 New Chat", use_container_width=True): |
| | new_chat() |
| | st.rerun() |
| | |
| | st.markdown("---") |
| | st.subheader("📜 Chat History") |
| |
|
| | |
| | for chat_id, title in list(st.session_state.chat_titles.items()): |
| | if len(st.session_state.chats.get(chat_id, [SYSTEM_PROMPT])) > 1: |
| | display_title = title |
| | is_current = chat_id == st.session_state.current_chat_id |
| |
|
| | if st.button( |
| | display_title, |
| | key=f"chat_switch_{chat_id}", |
| | type="primary" if is_current else "secondary", |
| | use_container_width=True |
| | ): |
| | set_current_chat(chat_id) |
| | st.rerun() |
| |
|
| | |
| |
|
| | |
| | |
| | if prompt := st.chat_input("Ask about a recipe, technique, or substitution..."): |
| | if CHAT_MODEL is None: |
| | st.session_state.chats[st.session_state.current_chat_id].append(HumanMessage(content=prompt)) |
| | st.session_state.chats[st.session_state.current_chat_id].append( |
| | AIMessage(content="Error: Model is not initialized. Check API key setup.") |
| | ) |
| | st.rerun() |
| |
|
| | |
| | st.session_state.chats[st.session_state.current_chat_id].append(HumanMessage(content=prompt)) |
| | |
| | |
| | |
| | culinary_keywords = [ |
| | |
| | "cook", "cooking", "kitchen", "chef", "meal", "food", "dish", "recipe", "cuisine", "menu", "flavor", "taste", |
| | |
| | "ingredient", "spice", "herb", "oil", "salt", "pepper", "garlic", "onion", "tomato", "butter", "cheese", |
| | "meat", "beef", "pork", "chicken", "lamb", "fish", "seafood", "shrimp", "crab", "lobster", |
| | "vegetable", "fruit", "grain", "rice", "pasta", "bread", "noodles", "beans", "tofu", "egg", |
| | |
| | "bake", "roast", "grill", "barbecue", "bbq", "fry", "deep fry", "saute", "sauté", "boil", "steam", "poach", |
| | "simmer", "stew", "braise", "marinate", "blend", "chop", "slice", "dice", "whisk", "knead", "ferment", |
| | |
| | "soup", "salad", "sandwich", "burger", "pizza", "pasta", "stew", "curry", "sauce", "stir fry", "omelette", |
| | "dessert", "cake", "cookie", "pie", "pastry", "bread", "tart", "pudding", "ice cream", |
| | |
| | "italian", "french", "spanish", "greek", "mediterranean", "japanese", "chinese", "korean", "thai", |
| | "vietnamese", "indian", "mexican", "latin", "filipino", "turkish", "middle eastern", "moroccan", |
| | |
| | "vegan", "vegetarian", "gluten-free", "keto", "paleo", "halal", "kosher", "low-carb", "low-fat", |
| | |
| | "coffee", "tea", "smoothie", "wine", "cocktail", "beer", "drink", "juice", |
| | |
| | "thanksgiving", "christmas", "new year", "ramadan", "eid", "hanukkah", "valentine", "birthday", "party", |
| | |
| | "sous vide", "confit", "smoking", "curing", "pickling", "plating", "molecular gastronomy", |
| | |
| | "mise en place", "umami", "maillard reaction", "deglaçage", "roux", "stock", "broth", |
| | |
| | "truffle", "saffron", "caviar", "foie gras", "kimchi", "kombu", "nori", "tamarind", "matcha", "miso", |
| | |
| | "diabetic-friendly", "heart-healthy", "organic", "sustainable", "farm-to-table", |
| | |
| | "blender", "mixer", "pressure cooker", "air fryer", "cast iron", "oven", "microwave", "thermometer" |
| | ] |
| |
|
| |
|
| | |
| | culinary_phrases = [ |
| | "what can i make with", |
| | "how do i cook", |
| | "how to cook", |
| | "how to make", |
| | "substitute for", |
| | "what is", |
| | "uses of" |
| | ] |
| | |
| | prompt_lower = prompt.lower() |
| | |
| | |
| | is_culinary = ( |
| | any(word in prompt_lower for word in culinary_keywords) or |
| | any(phrase in prompt_lower for phrase in culinary_phrases) |
| | ) |
| | |
| | if not is_culinary: |
| | restriction_msg = AIMessage(content="⚠️ I can only answer questions about cooking, recipes, ingredients, or culinary techniques. Please ask something food-related.") |
| | st.session_state.chats[st.session_state.current_chat_id].append(restriction_msg) |
| | st.rerun() |
| | else: |
| | st.session_state.generate_next = True |
| | st.rerun() |
| |
|
| | |
| | messages = get_current_messages() |
| |
|
| | |
| | if len(messages) == 1 and isinstance(messages[0], SystemMessage): |
| | message("Start the conversation by typing your first culinary question below!", key="welcome_bubble") |
| |
|
| | for i in range(1, len(messages)): |
| | content, role = convert_to_streamlit_message(messages[i]) |
| | if not content: |
| | continue |
| | |
| | if role == "assistant": |
| | if "recipe" in content.lower(): |
| | message(f"👨🍳 **Chef’s Recipe:**\n\n{content}", key=f"chat_ai_{i}") |
| | else: |
| | message(f"👨🍳 {content}", key=f"chat_ai_{i}") |
| | elif role == "user": |
| | message(content, is_user=True, key=f"chat_user_{i}") |
| |
|
| |
|
| | |
| | if st.session_state.generate_next: |
| | st.session_state.generate_next = False |
| |
|
| | full_history = get_current_messages() |
| |
|
| | |
| | with st.spinner("👨🍳 Our culinary expert is crafting your response..."): |
| | try: |
| | ai_message: AIMessage = CHAT_MODEL.invoke(full_history) |
| | st.session_state.chats[st.session_state.current_chat_id].append(ai_message) |
| |
|
| | |
| | if st.session_state.chat_titles[st.session_state.current_chat_id] == "New Chat": |
| | st.session_state.chat_titles[st.session_state.current_chat_id] = full_history[-1].content[:30] + "..." |
| | except Exception as e: |
| | error_message = "I'm sorry, I encountered a brief issue while preparing the answer. Please try again." |
| | st.session_state.chats[st.session_state.current_chat_id].append(AIMessage(content=error_message)) |
| | print(f"Full LLM invocation error: {e}") |
| |
|
| | st.rerun() |