Spaces:
Sleeping
Sleeping
from langchain_groq import ChatGroq | |
from dotenv import load_dotenv | |
from langchain_core.prompts import ChatPromptTemplate | |
from langchain_core.output_parsers import StrOutputParser | |
import os | |
import streamlit as st | |
load_dotenv() | |
st.title("Simple LangChain Chat-Bot") | |
prompt = ChatPromptTemplate.from_messages([ | |
('system','You are a AI asisstant and your work is to answer the user question'), | |
('human',"question:{question}") | |
]) | |
def simple_function(prompt,question,temperature,llm): | |
llm = ChatGroq(model=llm,api_key=os.getenv("GROQ_API_KEY")) | |
parser = StrOutputParser() | |
chain = prompt|llm|parser | |
response = chain.invoke({"question":question}) | |
return response | |
question = st.text_area("Ask any question") | |
llm = st.sidebar.selectbox('Choose a Model',['llama-3.3-70b-versatile','llama-3.1-8b-instant','llama3-70b-8192']) | |
temperature = st.sidebar.slider("Set the temperature",min_value=0.0,max_value=1.0,value=0.7) | |
if st.button("Response the given question"): | |
if st.spinner("Waiting..."): | |
if question: | |
st.write(simple_function(prompt,question,temperature,llm)) |