mtyrrell commited on
Commit
82ef5f2
1 Parent(s): d10be35

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -1
app.py CHANGED
@@ -150,7 +150,8 @@ st.markdown('This tool seeks to provide an interface for quering national climat
150
  st.markdown('**DISCLAIMER:** *This prototype tool based on LLMs (Language Models) is provided "as is" for experimental and exploratory purposes only, and should not be used for critical or production applications. Users are advised that the tool may contain errors, bugs, or limitations and should be used with caution and awareness of potential risks, and the developers make no warranties or guarantees regarding its performance, reliability, or suitability for any specific purpose.*')
151
 
152
  # Dropdown selectbox: model
153
- model_sel = st.selectbox('Select an LLM:', model_options)
 
154
 
155
  #----Model Select logic-------
156
  if model_sel == "chatGPT":
@@ -162,6 +163,7 @@ if model_sel == "chatGPT":
162
  pipe = Pipeline()
163
  pipe.add_node(component=pn, name="prompt_node", inputs=["Query"])
164
  else:
 
165
  model = "meta-llama/Llama-2-70b-chat-hf"
166
  # Instantiate the inference client
167
  client = InferenceClient()
 
150
  st.markdown('**DISCLAIMER:** *This prototype tool based on LLMs (Language Models) is provided "as is" for experimental and exploratory purposes only, and should not be used for critical or production applications. Users are advised that the tool may contain errors, bugs, or limitations and should be used with caution and awareness of potential risks, and the developers make no warranties or guarantees regarding its performance, reliability, or suitability for any specific purpose.*')
151
 
152
  # Dropdown selectbox: model
153
+ # model_sel = st.selectbox('Select an LLM:', model_options)
154
+ model_sel = "chatGPT"
155
 
156
  #----Model Select logic-------
157
  if model_sel == "chatGPT":
 
163
  pipe = Pipeline()
164
  pipe.add_node(component=pn, name="prompt_node", inputs=["Query"])
165
  else:
166
+ # Currently disabled
167
  model = "meta-llama/Llama-2-70b-chat-hf"
168
  # Instantiate the inference client
169
  client = InferenceClient()