Spaces:
Sleeping
Sleeping
| from langchain.chat_models import ChatOpenAI | |
| from langchain.schema import HumanMessage | |
| from dotenv import load_dotenv | |
| import streamlit as st | |
| import os | |
| load_dotenv() | |
| #function to load openAI model and get response | |
| def get_model_response(question): | |
| llm=ChatOpenAI( | |
| base_url = "https://openrouter.ai/api/v1", | |
| openai_api_key = os.getenv("OPENROUTE_API_KEY"), | |
| model= "deepseek/deepseek-r1-zero:free", | |
| temperature=0.4 | |
| ) | |
| response = llm.invoke([HumanMessage(content=question)]) | |
| return response | |
| st.set_page_config(page_title="Chat With your love ❤️") | |
| st.header("Langchain Application") | |
| input = st.text_input("Input: ", key=input) | |
| response = get_model_response(input) | |
| submit = st.button("Ask") | |
| if submit: | |
| st.subheader("the Response is: ") | |
| st.write(response.content) |