Staticaliza commited on
Commit
0299602
1 Parent(s): 44ae3d6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -3
app.py CHANGED
@@ -26,8 +26,8 @@ for model_name, model_endpoint in API_ENDPOINTS.items():
26
  def format(instruction = "", history = "", input = "", preinput = ""):
27
  sy_l, sy_r = SPECIAL_SYMBOLS[0], SPECIAL_SYMBOLS[1]
28
  formatted_history = '\n'.join(f"{sy_l}{message}{sy_r}" for message in history)
29
- task_message = f"System: {sy_l}{instruction}{sy_r}\n{formatted_history}\n{sy_l}{input}{sy_r}\n{preinput}"
30
- return prompt
31
 
32
  def predict(instruction, history, input, preinput, access_key, model, temperature, top_p, top_k, rep_p, max_tokens, stop_seqs, seed):
33
 
@@ -38,6 +38,8 @@ def predict(instruction, history, input, preinput, access_key, model, temperatur
38
  stops = json.loads(stop_seqs)
39
 
40
  formatted_input = format(instruction, history, input, preinput)
 
 
41
 
42
  response = CLIENTS[model].text_generation(
43
  formatted_input,
@@ -60,14 +62,19 @@ def predict(instruction, history, input, preinput, access_key, model, temperatur
60
  match = pattern.search(pre_result)
61
  get_result = match.group(1).strip() if match else ""
62
 
 
 
63
  print(f"---\nUSER: {input}\nBOT: {get_result}\n---")
64
 
65
  return (get_result, input)
66
 
 
 
 
67
  def maintain_cloud():
68
  print(">>> SPACE MAINTAINED!")
69
  return ("SUCCESS!", "SUCCESS!")
70
-
71
  with gr.Blocks() as demo:
72
  with gr.Row(variant = "panel"):
73
  gr.Markdown("🔯 This is a private LLM CHAT Space owned within STC Holdings!\n\n\nhttps://discord.gg/6JRtGawz7B")
@@ -80,6 +87,7 @@ with gr.Blocks() as demo:
80
  instruction = gr.Textbox(label = "Instruction", lines = 4)
81
  access_key = gr.Textbox(label = "Access Key", lines = 1)
82
  run = gr.Button("▶")
 
83
  cloud = gr.Button("☁️")
84
 
85
  with gr.Column():
@@ -97,6 +105,7 @@ with gr.Blocks() as demo:
97
  output = gr.Textbox(label = "Output", value = "", lines = 50)
98
 
99
  run.click(predict, inputs = [instruction, history, input, preinput, access_key, model, temperature, top_p, top_k, rep_p, max_tokens, stop_seqs, seed], outputs = [output, input])
 
100
  cloud.click(maintain_cloud, inputs = [], outputs = [input, output])
101
 
102
  demo.queue(concurrency_count = 500, api_open = True).launch(show_api = True)
 
26
  def format(instruction = "", history = "", input = "", preinput = ""):
27
  sy_l, sy_r = SPECIAL_SYMBOLS[0], SPECIAL_SYMBOLS[1]
28
  formatted_history = '\n'.join(f"{sy_l}{message}{sy_r}" for message in history)
29
+ formatted_input = f"System: {sy_l}{instruction}{sy_r}\n{formatted_history}\n{sy_l}{input}{sy_r}\n{preinput}"
30
+ return formatted_input
31
 
32
  def predict(instruction, history, input, preinput, access_key, model, temperature, top_p, top_k, rep_p, max_tokens, stop_seqs, seed):
33
 
 
38
  stops = json.loads(stop_seqs)
39
 
40
  formatted_input = format(instruction, history, input, preinput)
41
+
42
+ history = history + [[input, ""]]
43
 
44
  response = CLIENTS[model].text_generation(
45
  formatted_input,
 
62
  match = pattern.search(pre_result)
63
  get_result = match.group(1).strip() if match else ""
64
 
65
+ history = history + [get_result]
66
+
67
  print(f"---\nUSER: {input}\nBOT: {get_result}\n---")
68
 
69
  return (get_result, input)
70
 
71
+ def clear():
72
+ return []
73
+
74
  def maintain_cloud():
75
  print(">>> SPACE MAINTAINED!")
76
  return ("SUCCESS!", "SUCCESS!")
77
+
78
  with gr.Blocks() as demo:
79
  with gr.Row(variant = "panel"):
80
  gr.Markdown("🔯 This is a private LLM CHAT Space owned within STC Holdings!\n\n\nhttps://discord.gg/6JRtGawz7B")
 
87
  instruction = gr.Textbox(label = "Instruction", lines = 4)
88
  access_key = gr.Textbox(label = "Access Key", lines = 1)
89
  run = gr.Button("▶")
90
+ clear = gr.Button("🗑️")
91
  cloud = gr.Button("☁️")
92
 
93
  with gr.Column():
 
105
  output = gr.Textbox(label = "Output", value = "", lines = 50)
106
 
107
  run.click(predict, inputs = [instruction, history, input, preinput, access_key, model, temperature, top_p, top_k, rep_p, max_tokens, stop_seqs, seed], outputs = [output, input])
108
+ clear.click(clear, [], chatbot)
109
  cloud.click(maintain_cloud, inputs = [], outputs = [input, output])
110
 
111
  demo.queue(concurrency_count = 500, api_open = True).launch(show_api = True)