neuralleap commited on
Commit
0d7b757
β€’
1 Parent(s): 5d0668d

Upload 4 files

Browse files
Files changed (4) hide show
  1. README.md +23 -13
  2. app.py +106 -0
  3. groqcloud_darkmode.png +0 -0
  4. requirements.txt +154 -0
README.md CHANGED
@@ -1,13 +1,23 @@
1
- ---
2
- title: Llama3 70B Step1
3
- emoji: πŸ“ˆ
4
- colorFrom: gray
5
- colorTo: pink
6
- sdk: streamlit
7
- sdk_version: 1.34.0
8
- app_file: app.py
9
- pinned: false
10
- license: mit
11
- ---
12
-
13
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
1
+ # Groq LangChain Conversational Chatbot
2
+
3
+ This repository contains a Streamlit application that allows users to interact with a conversational chatbot powered by the LangChain API. The application uses the Groq API to generate responses and maintains a history of the conversation to provide context for the chatbot's responses.
4
+
5
+ ## Features
6
+
7
+ - **Conversational Interface**: The application provides a conversational interface where users can ask questions or make statements, and the chatbot responds accordingly.
8
+
9
+ - **Contextual Responses**: The application maintains a history of the conversation, which is used to provide context for the chatbot's responses.
10
+
11
+ - **LangChain Integration**: The chatbot is powered by the LangChain API, which uses advanced natural language processing techniques to generate human-like responses.
12
+
13
+ ## Usage
14
+
15
+ To use this application, you need to have Streamlit and the other required Python libraries installed. You also need to have a Groq API key, which you can obtain by signing up on the Groq website.
16
+
17
+ Once you have the necessary requirements, you can run the application by executing the script with Streamlit:
18
+
19
+ ```shell
20
+ streamlit run app.py
21
+ ```
22
+
23
+ This will start the Streamlit server and open the application in your web browser. You can then interact with the chatbot, and the application will generate responses based on the history of the conversation.
app.py ADDED
@@ -0,0 +1,106 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import os
3
+ from groq import Groq
4
+ import random
5
+
6
+ from langchain.chains import ConversationChain, LLMChain
7
+ from langchain_core.prompts import (
8
+ ChatPromptTemplate,
9
+ HumanMessagePromptTemplate,
10
+ MessagesPlaceholder,
11
+ )
12
+ from langchain_core.messages import SystemMessage
13
+ from langchain.chains.conversation.memory import ConversationBufferWindowMemory
14
+ from langchain_groq import ChatGroq
15
+ from langchain.prompts import PromptTemplate
16
+
17
+
18
+ def main():
19
+ """
20
+ This function is the main entry point of the application. It sets up the Groq client, the Streamlit interface, and handles the chat interaction.
21
+ """
22
+
23
+ # Get Groq API key
24
+ groq_api_key = os.environ['GROQ_API_KEY']
25
+
26
+ # Display the Groq logo
27
+ spacer, col = st.columns([5, 1])
28
+ with col:
29
+ st.image('groqcloud_darkmode.png')
30
+
31
+ # The title and greeting message of the Streamlit application
32
+ st.title("Chat with Groq!")
33
+ st.write("Hello! I'm your friendly Groq chatbot. I can help answer your questions, provide information, or just chat. I'm also super fast! Let's start our conversation!")
34
+
35
+ # Add customization options to the sidebar
36
+ st.sidebar.title('Customization')
37
+ system_prompt = st.sidebar.text_input("System prompt:")
38
+ model = st.sidebar.selectbox(
39
+ 'Choose a model',
40
+ ['llama3-8b-8192', 'mixtral-8x7b-32768', 'gemma-7b-it']
41
+ )
42
+ conversational_memory_length = st.sidebar.slider('Conversational memory length:', 1, 10, value = 5)
43
+
44
+ memory = ConversationBufferWindowMemory(k=conversational_memory_length, memory_key="chat_history", return_messages=True)
45
+
46
+ user_question = st.text_input("Ask a question:")
47
+
48
+ # session state variable
49
+ if 'chat_history' not in st.session_state:
50
+ st.session_state.chat_history=[]
51
+ else:
52
+ for message in st.session_state.chat_history:
53
+ memory.save_context(
54
+ {'input':message['human']},
55
+ {'output':message['AI']}
56
+ )
57
+
58
+
59
+ # Initialize Groq Langchain chat object and conversation
60
+ groq_chat = ChatGroq(
61
+ groq_api_key=groq_api_key,
62
+ model_name=model
63
+ )
64
+
65
+
66
+ # If the user has asked a question,
67
+ if user_question:
68
+
69
+ # Construct a chat prompt template using various components
70
+ prompt = ChatPromptTemplate.from_messages(
71
+ [
72
+ SystemMessage(
73
+ content=system_prompt
74
+ ), # This is the persistent system prompt that is always included at the start of the chat.
75
+
76
+ MessagesPlaceholder(
77
+ variable_name="chat_history"
78
+ ), # This placeholder will be replaced by the actual chat history during the conversation. It helps in maintaining context.
79
+
80
+ HumanMessagePromptTemplate.from_template(
81
+ "{human_input}"
82
+ ), # This template is where the user's current input will be injected into the prompt.
83
+ ]
84
+ )
85
+
86
+ # Create a conversation chain using the LangChain LLM (Language Learning Model)
87
+ conversation = LLMChain(
88
+ llm=groq_chat, # The Groq LangChain chat object initialized earlier.
89
+ prompt=prompt, # The constructed prompt template.
90
+ verbose=True, # Enables verbose output, which can be useful for debugging.
91
+ memory=memory, # The conversational memory object that stores and manages the conversation history.
92
+ )
93
+
94
+ # The chatbot's answer is generated by sending the full prompt to the Groq API.
95
+ response = conversation.predict(human_input=user_question)
96
+ message = {'human':user_question,'AI':response}
97
+ st.session_state.chat_history.append(message)
98
+ st.write("Chatbot:", response)
99
+
100
+ if __name__ == "__main__":
101
+ main()
102
+
103
+
104
+
105
+
106
+
groqcloud_darkmode.png ADDED
requirements.txt ADDED
@@ -0,0 +1,154 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ aiohttp==3.9.3
2
+ aiosignal==1.3.1
3
+ altair==5.2.0
4
+ annotated-types==0.6.0
5
+ anyio==3.6.2
6
+ appnope==0.1.3
7
+ argon2-cffi==21.3.0
8
+ argon2-cffi-bindings==21.2.0
9
+ asttokens==2.0.8
10
+ async-timeout==4.0.3
11
+ attrs==22.1.0
12
+ backcall==0.2.0
13
+ beautifulsoup4==4.11.1
14
+ bleach==5.0.1
15
+ blinker==1.7.0
16
+ cachetools==5.3.2
17
+ certifi==2024.2.2
18
+ cffi==1.15.1
19
+ charset-normalizer==3.3.2
20
+ click==8.1.7
21
+ dataclasses-json==0.6.4
22
+ debugpy==1.6.3
23
+ decorator==5.1.1
24
+ defusedxml==0.7.1
25
+ distro==1.9.0
26
+ duckdb==0.9.2
27
+ entrypoints==0.4
28
+ executing==1.1.1
29
+ fastjsonschema==2.16.2
30
+ filelock==3.13.1
31
+ frozenlist==1.4.1
32
+ fsspec==2024.2.0
33
+ gitdb==4.0.11
34
+ GitPython==3.1.41
35
+ groq==0.4.1
36
+ h11==0.14.0
37
+ httpcore==1.0.2
38
+ httpx==0.26.0
39
+ huggingface-hub==0.20.3
40
+ idna==3.4
41
+ importlib-metadata==7.0.1
42
+ install==1.3.5
43
+ ipykernel==6.16.1
44
+ ipython==8.5.0
45
+ ipython-genutils==0.2.0
46
+ ipywidgets==8.0.2
47
+ jedi==0.18.1
48
+ Jinja2==3.1.2
49
+ joblib==1.3.2
50
+ jsonpatch==1.33
51
+ jsonpointer==2.4
52
+ jsonschema==4.16.0
53
+ jupyter==1.0.0
54
+ jupyter-console==6.4.4
55
+ jupyter-server==1.21.0
56
+ jupyter_client==7.4.3
57
+ jupyter_core==4.11.2
58
+ jupyterlab-pygments==0.2.2
59
+ jupyterlab-widgets==3.0.3
60
+ langchain==0.1.5
61
+ langchain-community==0.0.19
62
+ langchain-core==0.1.21
63
+ langchain-groq==0.0.1
64
+ langsmith==0.0.87
65
+ markdown-it-py==3.0.0
66
+ MarkupSafe==2.1.1
67
+ marshmallow==3.20.2
68
+ matplotlib-inline==0.1.6
69
+ mdurl==0.1.2
70
+ mistune==2.0.4
71
+ mpmath==1.3.0
72
+ multidict==6.0.5
73
+ mypy-extensions==1.0.0
74
+ nbclassic==0.4.5
75
+ nbclient==0.7.0
76
+ nbconvert==7.2.2
77
+ nbformat==5.7.0
78
+ nest-asyncio==1.5.6
79
+ networkx==3.2.1
80
+ nltk==3.8.1
81
+ notebook==6.5.1
82
+ notebook_shim==0.2.0
83
+ numpy==1.23.4
84
+ openai==1.12.0
85
+ packaging==23.2
86
+ pandas==1.5.1
87
+ pandocfilters==1.5.0
88
+ parso==0.8.3
89
+ pexpect==4.8.0
90
+ pickleshare==0.7.5
91
+ pillow==10.2.0
92
+ pinecone-client==3.0.2
93
+ prometheus-client==0.15.0
94
+ prompt-toolkit==3.0.31
95
+ protobuf==4.25.2
96
+ psutil==5.9.3
97
+ ptyprocess==0.7.0
98
+ pure-eval==0.2.2
99
+ pyarrow==15.0.0
100
+ pycparser==2.21
101
+ pydantic==2.6.1
102
+ pydantic_core==2.16.2
103
+ pydeck==0.8.1b0
104
+ Pygments==2.13.0
105
+ pyparsing==3.0.9
106
+ pyrsistent==0.18.1
107
+ python-dateutil==2.8.2
108
+ pytz==2022.5
109
+ PyYAML==6.0
110
+ pyzmq==24.0.1
111
+ qtconsole==5.3.2
112
+ QtPy==2.2.1
113
+ regex==2023.12.25
114
+ requests==2.31.0
115
+ rich==13.7.0
116
+ safetensors==0.4.2
117
+ scikit-learn==1.4.0
118
+ scipy==1.12.0
119
+ Send2Trash==1.8.0
120
+ sentence-transformers==2.3.1
121
+ sentencepiece==0.1.99
122
+ six==1.16.0
123
+ smmap==5.0.1
124
+ sniffio==1.3.0
125
+ soupsieve==2.3.2.post1
126
+ SQLAlchemy==2.0.25
127
+ sqlparse==0.4.4
128
+ stack-data==0.5.1
129
+ streamlit==1.31.0
130
+ sympy==1.12
131
+ tenacity==8.2.3
132
+ terminado==0.16.0
133
+ threadpoolctl==3.2.0
134
+ tiktoken==0.6.0
135
+ tinycss2==1.2.1
136
+ tokenizers==0.15.1
137
+ toml==0.10.2
138
+ toolz==0.12.1
139
+ torch==2.2.0
140
+ tornado==6.2
141
+ tqdm==4.66.1
142
+ traitlets==5.5.0
143
+ transformers==4.37.2
144
+ typing-inspect==0.9.0
145
+ typing_extensions==4.9.0
146
+ tzlocal==5.2
147
+ urllib3==2.2.0
148
+ validators==0.22.0
149
+ wcwidth==0.2.5
150
+ webencodings==0.5.1
151
+ websocket-client==1.4.1
152
+ widgetsnbextension==4.0.3
153
+ yarl==1.9.4
154
+ zipp==3.17.0