lfoppiano commited on
Commit
7cdc620
1 Parent(s): 132d41d

add LLama2-70b

Browse files
Files changed (1) hide show
  1. streamlit_app.py +20 -7
streamlit_app.py CHANGED
@@ -57,6 +57,11 @@ def init_qa(model):
57
  model_kwargs={"temperature": 0.01, "max_length": 4096, "max_new_tokens": 2048})
58
  embeddings = HuggingFaceEmbeddings(
59
  model_name="all-MiniLM-L6-v2")
 
 
 
 
 
60
 
61
  return DocumentQAEngine(chat, embeddings, grobid_url=os.environ['GROBID_URL'])
62
 
@@ -82,26 +87,31 @@ def play_old_messages():
82
  else:
83
  st.write(message['content'])
84
 
 
85
  is_api_key_provided = st.session_state['api_key']
86
 
87
- model = st.sidebar.radio("Model (cannot be changed after selection or upload)", ("chatgpt-3.5-turbo", "mistral-7b-instruct-v0.1"),
 
88
  index=1,
89
  captions=[
90
  "ChatGPT 3.5 Turbo + Ada-002-text (embeddings)",
91
- "Mistral-7B-Instruct-V0.1 + Sentence BERT (embeddings)"
 
92
  ],
93
  help="Select the model you want to use.",
94
  disabled=is_api_key_provided)
95
 
96
  if not st.session_state['api_key']:
97
- if model == 'mistral-7b-instruct-v0.1':
98
- api_key = st.sidebar.text_input('Huggingface API Key') if 'HUGGINGFACEHUB_API_TOKEN' not in os.environ else os.environ['HUGGINGFACEHUB_API_TOKEN']
 
99
  if api_key:
100
  st.session_state['api_key'] = is_api_key_provided = True
101
  os.environ["HUGGINGFACEHUB_API_TOKEN"] = api_key
102
  st.session_state['rqa'] = init_qa(model)
103
  elif model == 'chatgpt-3.5-turbo':
104
- api_key = st.sidebar.text_input('OpenAI API Key') if 'OPENAI_API_KEY' not in os.environ else os.environ['OPENAI_API_KEY']
 
105
  if api_key:
106
  st.session_state['api_key'] = is_api_key_provided = True
107
  os.environ['OPENAI_API_KEY'] = api_key
@@ -137,7 +147,8 @@ question = st.chat_input(
137
  with st.sidebar:
138
  st.header("Documentation")
139
  st.markdown("https://github.com/lfoppiano/document-qa")
140
- st.markdown("""After entering your API Key (Open AI or Huggingface). Upload a scientific article as PDF document, click on the designated button and select the file from your device.""")
 
141
 
142
  st.markdown(
143
  """After uploading, please wait for the PDF to be processed. You will see a spinner or loading indicator while the processing is in progress. Once the spinner stops, you can proceed to ask your questions.""")
@@ -158,7 +169,9 @@ if uploaded_file and not st.session_state.loaded_embeddings:
158
  tmp_file = NamedTemporaryFile()
159
  tmp_file.write(bytearray(binary))
160
  # hash = get_file_hash(tmp_file.name)[:10]
161
- st.session_state['doc_id'] = hash = st.session_state['rqa'].create_memory_embeddings(tmp_file.name, chunk_size=250, perc_overlap=0.1)
 
 
162
  st.session_state['loaded_embeddings'] = True
163
 
164
  # timestamp = datetime.utcnow()
 
57
  model_kwargs={"temperature": 0.01, "max_length": 4096, "max_new_tokens": 2048})
58
  embeddings = HuggingFaceEmbeddings(
59
  model_name="all-MiniLM-L6-v2")
60
+ elif model == 'llama-2-70b-chat':
61
+ chat = HuggingFaceHub(repo_id="meta-llama/Llama-2-70b-chat-hf",
62
+ model_kwargs={"temperature": 0.01, "max_length": 4096, "max_new_tokens": 2048})
63
+ embeddings = HuggingFaceEmbeddings(
64
+ model_name="all-MiniLM-L6-v2")
65
 
66
  return DocumentQAEngine(chat, embeddings, grobid_url=os.environ['GROBID_URL'])
67
 
 
87
  else:
88
  st.write(message['content'])
89
 
90
+
91
  is_api_key_provided = st.session_state['api_key']
92
 
93
+ model = st.sidebar.radio("Model (cannot be changed after selection or upload)",
94
+ ("chatgpt-3.5-turbo", "mistral-7b-instruct-v0.1", "llama-2-70b-chat"),
95
  index=1,
96
  captions=[
97
  "ChatGPT 3.5 Turbo + Ada-002-text (embeddings)",
98
+ "Mistral-7B-Instruct-V0.1 + Sentence BERT (embeddings)",
99
+ "LLama2-70B-Chat + Sentence BERT (embeddings)",
100
  ],
101
  help="Select the model you want to use.",
102
  disabled=is_api_key_provided)
103
 
104
  if not st.session_state['api_key']:
105
+ if model == 'mistral-7b-instruct-v0.1' or 'llama-2-70b-chat':
106
+ api_key = st.sidebar.text_input('Huggingface API Key') if 'HUGGINGFACEHUB_API_TOKEN' not in os.environ else \
107
+ os.environ['HUGGINGFACEHUB_API_TOKEN']
108
  if api_key:
109
  st.session_state['api_key'] = is_api_key_provided = True
110
  os.environ["HUGGINGFACEHUB_API_TOKEN"] = api_key
111
  st.session_state['rqa'] = init_qa(model)
112
  elif model == 'chatgpt-3.5-turbo':
113
+ api_key = st.sidebar.text_input('OpenAI API Key') if 'OPENAI_API_KEY' not in os.environ else os.environ[
114
+ 'OPENAI_API_KEY']
115
  if api_key:
116
  st.session_state['api_key'] = is_api_key_provided = True
117
  os.environ['OPENAI_API_KEY'] = api_key
 
147
  with st.sidebar:
148
  st.header("Documentation")
149
  st.markdown("https://github.com/lfoppiano/document-qa")
150
+ st.markdown(
151
+ """After entering your API Key (Open AI or Huggingface). Upload a scientific article as PDF document, click on the designated button and select the file from your device.""")
152
 
153
  st.markdown(
154
  """After uploading, please wait for the PDF to be processed. You will see a spinner or loading indicator while the processing is in progress. Once the spinner stops, you can proceed to ask your questions.""")
 
169
  tmp_file = NamedTemporaryFile()
170
  tmp_file.write(bytearray(binary))
171
  # hash = get_file_hash(tmp_file.name)[:10]
172
+ st.session_state['doc_id'] = hash = st.session_state['rqa'].create_memory_embeddings(tmp_file.name,
173
+ chunk_size=250,
174
+ perc_overlap=0.1)
175
  st.session_state['loaded_embeddings'] = True
176
 
177
  # timestamp = datetime.utcnow()