SiraH commited on
Commit
699053c
1 Parent(s): 6568681

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -4
app.py CHANGED
@@ -26,6 +26,9 @@ from langchain.memory.chat_message_histories.streamlit import StreamlitChatMessa
26
  from langchain.text_splitter import RecursiveCharacterTextSplitter
27
  from langchain_community.llms import HuggingFaceHub
28
 
 
 
 
29
 
30
  # sidebar contents
31
  with st.sidebar:
@@ -87,11 +90,11 @@ def main():
87
  if "messages" not in st.session_state:
88
  st.session_state.messages = []
89
 
90
- # repo_id = "mistralai/Mistral-7B-Instruct-v0.2"
91
- # llm = HuggingFaceHub(
92
- # repo_id=repo_id, model_kwargs={"temperature": 0.1, "max_length": 128})
93
 
94
- llm = load_llama2_llamaCpp()
95
  qa_prompt = set_custom_prompt()
96
  embeddings = load_embeddings()
97
 
 
26
  from langchain.text_splitter import RecursiveCharacterTextSplitter
27
  from langchain_community.llms import HuggingFaceHub
28
 
29
+ SECRET_TOKEN = os.getenv("HF_TOKEN")
30
+ os.environ["HUGGINGFACEHUB_API_TOKEN"] = SECRET_TOKEN
31
+
32
 
33
  # sidebar contents
34
  with st.sidebar:
 
90
  if "messages" not in st.session_state:
91
  st.session_state.messages = []
92
 
93
+ repo_id = "mistralai/Mistral-7B-Instruct-v0.2"
94
+ llm = HuggingFaceHub(
95
+ repo_id=repo_id, model_kwargs={"temperature": 0.1, "max_length": 128})
96
 
97
+ # llm = load_llama2_llamaCpp()
98
  qa_prompt = set_custom_prompt()
99
  embeddings = load_embeddings()
100