alfraser commited on
Commit
19f408b
·
1 Parent(s): 08cb350

Tidied up the models page and added another model just to get multi-model view

Browse files
Files changed (2) hide show
  1. config/models.json +5 -0
  2. pages/005_LLM_Models.py +9 -5
config/models.json CHANGED
@@ -4,6 +4,11 @@
4
  "name": "Llama2 Chat 7B",
5
  "id": "meta-llama/Llama-2-7b-chat-hf",
6
  "description": "The unmodified 7 billion parameter version of the llama 2 chat model from meta."
 
 
 
 
 
7
  }
8
  ]
9
  }
 
4
  "name": "Llama2 Chat 7B",
5
  "id": "meta-llama/Llama-2-7b-chat-hf",
6
  "description": "The unmodified 7 billion parameter version of the llama 2 chat model from meta."
7
+ },
8
+ {
9
+ "name": "Llama2 Chat 13B",
10
+ "id": "meta-llama/Llama-2-13b-chat-hf",
11
+ "description": "The unmodified 13 billion parameter version of the llama 2 chat model from meta."
12
  }
13
  ]
14
  }
pages/005_LLM_Models.py CHANGED
@@ -7,6 +7,9 @@ if st_setup('LLM Models'):
7
  st.write("# LLM Models")
8
  st.write("The project uses a number of different models which are deployed with other components to form a variety of architectures. This page lists those models, and allows users to interact in isolation just with the model directly, excluding any other architecture components.")
9
 
 
 
 
10
  SESSION_KEY_CHAT_SERVER = 'chat_server'
11
  HF_AUTH_KEY_SECRET = 'hf_token'
12
  button_count = 0
@@ -28,18 +31,19 @@ if st_setup('LLM Models'):
28
  st.write(f'### {server_count} models configured')
29
 
30
  with st.container():
31
- for i, m_name in enumerate(HFLlamaChatModel.available_models()):
 
32
  with st.container(): # row
33
  content, actions = st.columns([4, 1])
34
  with content:
35
- st.write(m_name)
36
 
37
  with actions:
38
  if st.button("Chat with this model", key=button_key()):
39
- st.session_state[SESSION_KEY_CHAT_SERVER] = m_name
40
  st.rerun()
41
- if i != len(HFLlamaChatModel.available_models()) - 1:
42
- st.divider()
43
 
44
  if SESSION_KEY_CHAT_SERVER in st.session_state:
45
  with chat_container:
 
7
  st.write("# LLM Models")
8
  st.write("The project uses a number of different models which are deployed with other components to form a variety of architectures. This page lists those models, and allows users to interact in isolation just with the model directly, excluding any other architecture components.")
9
 
10
+ if st.button('Force reload of models config'):
11
+ HFLlamaChatModel.load_configs()
12
+
13
  SESSION_KEY_CHAT_SERVER = 'chat_server'
14
  HF_AUTH_KEY_SECRET = 'hf_token'
15
  button_count = 0
 
31
  st.write(f'### {server_count} models configured')
32
 
33
  with st.container():
34
+ st.divider()
35
+ for i, m in enumerate(HFLlamaChatModel.models):
36
  with st.container(): # row
37
  content, actions = st.columns([4, 1])
38
  with content:
39
+ st.write(f'**{m.name}** \n\n _{m.description}_')
40
 
41
  with actions:
42
  if st.button("Chat with this model", key=button_key()):
43
+ st.session_state[SESSION_KEY_CHAT_SERVER] = m.name
44
  st.rerun()
45
+
46
+ st.divider()
47
 
48
  if SESSION_KEY_CHAT_SERVER in st.session_state:
49
  with chat_container: