from langchain_google_genai import ChatGoogleGenerativeAI from langchain_core.prompts import ChatPromptTemplate import streamlit as st import os openai_api_k = os.getenv("api_key") model = ChatGoogleGenerativeAI(temperature=0, model="gemini-1.5-pro",google_api_key=openai_api_k,max_tokens=160) prompt = ChatPromptTemplate.from_messages([ ("system", "You are a good chef. You are a helpful assistant that suggests how to cook a dish."), ("human", "Give a proper way with mentioning ingredients\n{input}") ]) st.title("Dish Maker") st.write("Enter the dish you want to make:") # User input dish = st.text_area("Dish") if st.button("Generate process"): chain = prompt | model response = chain.invoke({"input": dish}) if response: st.write("Generated Cooking Process:") st.write(response.content) else: st.write("No response from the model. Please try again.")