Staticaliza commited on
Commit
514b2f9
1 Parent(s): 05764b1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -11
app.py CHANGED
@@ -11,8 +11,8 @@ KEY = os.environ.get("KEY")
11
 
12
  SPECIAL_SYMBOLS = ["⠀", "⠀"] # ["‹", "›"] ['"', '"']
13
 
14
- DEFAULT_INPUT = f"User: Hi!"
15
- DEFAULT_PREOUTPUT = f"Statical: "
16
  DEFAULT_INSTRUCTION = "Statical is a helpful chatbot who is communicating with people."
17
 
18
  DEFAULT_STOPS = '["⠀", "⠀"]' # '["‹", "›"]' '[\"\\\"\"]'
@@ -31,13 +31,13 @@ for model_name, model_endpoint in API_ENDPOINTS.items():
31
  CHOICES.append(model_name)
32
  CLIENTS[model_name] = InferenceClient(model_endpoint, headers = { "Authorization": f"Bearer {API_TOKEN}" })
33
 
34
- def format(instruction, history, input, preoutput):
35
  sy_l, sy_r = SPECIAL_SYMBOLS[0], SPECIAL_SYMBOLS[1]
36
  formatted_history = "".join(f"{sy_l}{message[0]}{sy_r}\n{sy_l}{message[1]}{sy_r}\n" for message in history)
37
  formatted_input = f"{sy_l}INSTRUCTIONS: {instruction}{sy_r}\n{formatted_history}{sy_l}{input}{sy_r}\n{sy_l}"
38
- return f"{formatted_input}{preoutput}", formatted_input
39
 
40
- def predict(access_key, instruction, history, input, preoutput, model, temperature, top_p, top_k, rep_p, max_tokens, stop_seqs, seed):
41
 
42
  if (access_key != KEY):
43
  print(">>> MODEL FAILED: Input: " + input + ", Attempted Key: " + access_key)
@@ -46,12 +46,12 @@ def predict(access_key, instruction, history, input, preoutput, model, temperatu
46
  instruction = instruction or DEFAULT_INSTRUCTION
47
  history = history or []
48
  input = input or ""
49
- preoutput = preoutput or ""
50
  stop_seqs = stop_seqs or DEFAULT_STOPS
51
 
52
  stops = json.loads(stop_seqs)
53
 
54
- formatted_input, formatted_input_base = format(instruction, history, input, preoutput)
55
  print(seed)
56
  print(formatted_input)
57
  response = CLIENTS[model].text_generation(
@@ -70,7 +70,7 @@ def predict(access_key, instruction, history, input, preoutput, model, temperatu
70
  )
71
 
72
  sy_l, sy_r = SPECIAL_SYMBOLS[0], SPECIAL_SYMBOLS[1]
73
- result = preoutput + response
74
 
75
  for stop in stops:
76
  result = result.split(stop, 1)[0]
@@ -93,13 +93,13 @@ def maintain_cloud():
93
 
94
  with gr.Blocks() as demo:
95
  with gr.Row(variant = "panel"):
96
- gr.Markdown("🔯 This is a private LLM CHAT Space owned within STC Holdings!\n\n\nhttps://discord.gg/6JRtGawz7B")
97
 
98
  with gr.Row():
99
  with gr.Column():
100
  history = gr.Chatbot(abel = "History", elem_id = "chatbot")
101
  input = gr.Textbox(label = "Input", value = DEFAULT_INPUT, lines = 2)
102
- preoutput = gr.Textbox(label = "Pre-Output", value = DEFAULT_PREOUTPUT, lines = 1)
103
  instruction = gr.Textbox(label = "Instruction", value = DEFAULT_INSTRUCTION, lines = 4)
104
  access_key = gr.Textbox(label = "Access Key", lines = 1)
105
  run = gr.Button("▶")
@@ -120,7 +120,7 @@ with gr.Blocks() as demo:
120
  with gr.Column():
121
  output = gr.Textbox(label = "Output", value = "", lines = 50)
122
 
123
- run.click(predict, inputs = [access_key, instruction, history, input, preoutput, model, temperature, top_p, top_k, rep_p, max_tokens, stop_seqs, seed], outputs = [output, input, history])
124
  clear.click(clear_history, [], history)
125
  cloud.click(maintain_cloud, inputs = [], outputs = [input, output])
126
 
 
11
 
12
  SPECIAL_SYMBOLS = ["⠀", "⠀"] # ["‹", "›"] ['"', '"']
13
 
14
+ DEFAULT_INPUT = "User: Hi!"
15
+ DEFAULT_WRAP = "Statical: %s"
16
  DEFAULT_INSTRUCTION = "Statical is a helpful chatbot who is communicating with people."
17
 
18
  DEFAULT_STOPS = '["⠀", "⠀"]' # '["‹", "›"]' '[\"\\\"\"]'
 
31
  CHOICES.append(model_name)
32
  CLIENTS[model_name] = InferenceClient(model_endpoint, headers = { "Authorization": f"Bearer {API_TOKEN}" })
33
 
34
+ def format(instruction, history, input, wrap):
35
  sy_l, sy_r = SPECIAL_SYMBOLS[0], SPECIAL_SYMBOLS[1]
36
  formatted_history = "".join(f"{sy_l}{message[0]}{sy_r}\n{sy_l}{message[1]}{sy_r}\n" for message in history)
37
  formatted_input = f"{sy_l}INSTRUCTIONS: {instruction}{sy_r}\n{formatted_history}{sy_l}{input}{sy_r}\n{sy_l}"
38
+ return f"{formatted_input}{wrap % ("")}", formatted_input
39
 
40
+ def predict(access_key, instruction, history, input, wrap, model, temperature, top_p, top_k, rep_p, max_tokens, stop_seqs, seed):
41
 
42
  if (access_key != KEY):
43
  print(">>> MODEL FAILED: Input: " + input + ", Attempted Key: " + access_key)
 
46
  instruction = instruction or DEFAULT_INSTRUCTION
47
  history = history or []
48
  input = input or ""
49
+ wrap = wrap or ""
50
  stop_seqs = stop_seqs or DEFAULT_STOPS
51
 
52
  stops = json.loads(stop_seqs)
53
 
54
+ formatted_input, formatted_input_base = format(instruction, history, input, wrap)
55
  print(seed)
56
  print(formatted_input)
57
  response = CLIENTS[model].text_generation(
 
70
  )
71
 
72
  sy_l, sy_r = SPECIAL_SYMBOLS[0], SPECIAL_SYMBOLS[1]
73
+ result = wrap % (response)
74
 
75
  for stop in stops:
76
  result = result.split(stop, 1)[0]
 
93
 
94
  with gr.Blocks() as demo:
95
  with gr.Row(variant = "panel"):
96
+ gr.Markdown("✡️ This is a private LLM CHAT Space owned within STC Holdings!\n\n\nhttps://discord.gg/6JRtGawz7B")
97
 
98
  with gr.Row():
99
  with gr.Column():
100
  history = gr.Chatbot(abel = "History", elem_id = "chatbot")
101
  input = gr.Textbox(label = "Input", value = DEFAULT_INPUT, lines = 2)
102
+ wrap = gr.Textbox(label = "Wrap", value = DEFAULT_WRAP, lines = 1)
103
  instruction = gr.Textbox(label = "Instruction", value = DEFAULT_INSTRUCTION, lines = 4)
104
  access_key = gr.Textbox(label = "Access Key", lines = 1)
105
  run = gr.Button("▶")
 
120
  with gr.Column():
121
  output = gr.Textbox(label = "Output", value = "", lines = 50)
122
 
123
+ run.click(predict, inputs = [access_key, instruction, history, input, wrap, model, temperature, top_p, top_k, rep_p, max_tokens, stop_seqs, seed], outputs = [output, input, history])
124
  clear.click(clear_history, [], history)
125
  cloud.click(maintain_cloud, inputs = [], outputs = [input, output])
126