AlexanderKazakov commited on
Commit
cfc7185
1 Parent(s): 1f71841

add falcon-180B-chat

Browse files
gradio_app/app.py CHANGED
@@ -165,7 +165,8 @@ with gr.Blocks() as demo:
165
  choices=[
166
  "gpt-3.5-turbo",
167
  "mistralai/Mistral-7B-Instruct-v0.1",
168
- "GeneZC/MiniChat-3B",
 
169
  ],
170
  value="gpt-3.5-turbo",
171
  label='LLM'
 
165
  choices=[
166
  "gpt-3.5-turbo",
167
  "mistralai/Mistral-7B-Instruct-v0.1",
168
+ "tiiuae/falcon-180B-chat",
169
+ # "GeneZC/MiniChat-3B",
170
  ],
171
  value="gpt-3.5-turbo",
172
  label='LLM'
gradio_app/backend/HuggingfaceGenerator.py CHANGED
@@ -27,7 +27,7 @@ class HuggingfaceGenerator:
27
  top_p: float = None, repetition_penalty: float = None,
28
  stream: bool = True,
29
  ):
30
- self.tokenizer = AutoTokenizer.from_pretrained(model_name)
31
  self.hf_client = InferenceClient(model_name, token=HF_TOKEN)
32
  self.stream = stream
33
 
 
27
  top_p: float = None, repetition_penalty: float = None,
28
  stream: bool = True,
29
  ):
30
+ self.tokenizer = AutoTokenizer.from_pretrained(model_name, token=HF_TOKEN)
31
  self.hf_client = InferenceClient(model_name, token=HF_TOKEN)
32
  self.stream = stream
33
 
gradio_app/backend/query_llm.py CHANGED
@@ -53,7 +53,7 @@ def construct_openai_messages(context, history):
53
  def get_message_constructor(llm_name):
54
  if llm_name == 'gpt-3.5-turbo':
55
  return construct_openai_messages
56
- if llm_name in ['mistralai/Mistral-7B-Instruct-v0.1', "GeneZC/MiniChat-3B"]:
57
  return construct_mistral_messages
58
  raise ValueError('Unknown LLM name')
59
 
@@ -64,7 +64,7 @@ def get_llm_generator(llm_name):
64
  model_name=llm_name, max_tokens=512, temperature=0, stream=True
65
  )
66
  return cgi.chat_completion
67
- if llm_name == 'mistralai/Mistral-7B-Instruct-v0.1':
68
  hfg = HuggingfaceGenerator(
69
  model_name=llm_name, temperature=0, max_new_tokens=512,
70
  )
 
53
  def get_message_constructor(llm_name):
54
  if llm_name == 'gpt-3.5-turbo':
55
  return construct_openai_messages
56
+ if llm_name in ['mistralai/Mistral-7B-Instruct-v0.1', "tiiuae/falcon-180B-chat", "GeneZC/MiniChat-3B"]:
57
  return construct_mistral_messages
58
  raise ValueError('Unknown LLM name')
59
 
 
64
  model_name=llm_name, max_tokens=512, temperature=0, stream=True
65
  )
66
  return cgi.chat_completion
67
+ if llm_name == 'mistralai/Mistral-7B-Instruct-v0.1' or llm_name == "tiiuae/falcon-180B-chat":
68
  hfg = HuggingfaceGenerator(
69
  model_name=llm_name, temperature=0, max_new_tokens=512,
70
  )
settings.py CHANGED
@@ -27,6 +27,7 @@ thresh_distances = {
27
 
28
  context_lengths = {
29
  "mistralai/Mistral-7B-Instruct-v0.1": 4096,
 
30
  "GeneZC/MiniChat-3B": 4096,
31
  "gpt-3.5-turbo": 4096,
32
  "sentence-transformers/all-MiniLM-L6-v2": 128,
 
27
 
28
  context_lengths = {
29
  "mistralai/Mistral-7B-Instruct-v0.1": 4096,
30
+ "tiiuae/falcon-180B-chat": 2048,
31
  "GeneZC/MiniChat-3B": 4096,
32
  "gpt-3.5-turbo": 4096,
33
  "sentence-transformers/all-MiniLM-L6-v2": 128,