Spaces:
Sleeping
Sleeping
DINMAY KUMAR BRAHMA
commited on
Commit
•
f2a0d4e
1
Parent(s):
6df7b86
add
Browse files- README.md +13 -0
- app.py +49 -0
- requirements.txt +5 -0
README.md
CHANGED
@@ -9,5 +9,18 @@ app_file: app.py
|
|
9 |
pinned: false
|
10 |
license: mit
|
11 |
---
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
12 |
|
13 |
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
9 |
pinned: false
|
10 |
license: mit
|
11 |
---
|
12 |
+
# Chatbot with Streamlit and LangChain
|
13 |
+
|
14 |
+
This project implements a chatbot using Streamlit, LangChain, and the Mistral Chat API. The chatbot takes user input and responds with answers generated using the language model.
|
15 |
+
|
16 |
+
## Features
|
17 |
+
|
18 |
+
- User-friendly interface built with Streamlit.
|
19 |
+
- Integration with LangChain for handling prompts.
|
20 |
+
- API integration with Mistral for generating responses.
|
21 |
+
- Environment variable management with `python-dotenv`.
|
22 |
+
|
23 |
+
## Contributing
|
24 |
+
**Contributions are welcome! Please open an issue or submit a pull request for any changes or improvements.**
|
25 |
|
26 |
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
app.py
ADDED
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# make sure to install `langchain` and `langchain-mistralai` in your Python environment
|
2 |
+
|
3 |
+
import os
|
4 |
+
from langchain_mistralai import ChatMistralAI
|
5 |
+
import streamlit as st
|
6 |
+
from dotenv import load_dotenv
|
7 |
+
from langchain_core.output_parsers import StrOutputParser
|
8 |
+
|
9 |
+
# Initialize session state
|
10 |
+
if 'result' not in st.session_state:
|
11 |
+
st.session_state.result = None
|
12 |
+
|
13 |
+
# Langchain settings
|
14 |
+
load_dotenv(".env")
|
15 |
+
os.environ["LANGCHAIN_PROJECT"] = "mistral_app"
|
16 |
+
os.environ["LANGCHAIN_ENDPOINT"] = "https://api.smith.langchain.com"
|
17 |
+
os.environ["LANGCHAIN_TRACING_V2"] = "true"
|
18 |
+
os.environ["LANGCHAIN_API_KEY"] = os.getenv("LANGCHAIN_API_KEY")
|
19 |
+
# Initialize the LLM
|
20 |
+
@st.cache_resource
|
21 |
+
def initialize_llm():
|
22 |
+
api_key = os.getenv("API_KEY")
|
23 |
+
mistral_model = "open-codestral-mamba"
|
24 |
+
return ChatMistralAI(
|
25 |
+
model=mistral_model,
|
26 |
+
temperature=0,
|
27 |
+
api_key=api_key,
|
28 |
+
token_limit=2000,
|
29 |
+
random_seed=0
|
30 |
+
)
|
31 |
+
|
32 |
+
llm = initialize_llm()
|
33 |
+
parser = StrOutputParser()
|
34 |
+
|
35 |
+
#streamlit framework
|
36 |
+
st.title("Chatbot(MistralAI)")
|
37 |
+
input_text = st.text_input("Feel free to ask me anything")
|
38 |
+
|
39 |
+
# Only make API call when submit button is pressed
|
40 |
+
if st.button("Submit") and input_text:
|
41 |
+
with st.spinner("Thinking..."):
|
42 |
+
output = llm.invoke([("user", input_text)])
|
43 |
+
st.session_state.result = parser.invoke(output)
|
44 |
+
|
45 |
+
# Display results
|
46 |
+
if input_text:
|
47 |
+
st.write("You said: " + input_text)
|
48 |
+
if st.session_state.result:
|
49 |
+
st.write(st.session_state.result)
|
requirements.txt
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
langchain
|
2 |
+
langchain-mistralai
|
3 |
+
streamlit
|
4 |
+
python-dotenv
|
5 |
+
streamlit-feedback
|