ThomasSimonini HF staff commited on
Commit
d0806a8
1 Parent(s): 47e64ed

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -17
app.py CHANGED
@@ -21,9 +21,6 @@ article = """
21
  theme="huggingface"
22
 
23
 
24
-
25
-
26
- """
27
  # Builds the prompt from what previously happened
28
  def build_prompt(conversation, context, interlocutor_names):
29
  prompt = context + "\n"
@@ -41,30 +38,27 @@ def clean_chat_output(txt, prompt, interlocutor_names):
41
  output = txt.replace(prompt, '')
42
  output = output[:output.find(delimiter)]
43
  return output
44
- """
45
-
46
 
47
 
48
- def chat(message, npc_name, prompt, top_p, temperature, max_new_tokens, history=[]):
49
  interlocutor_names = ["Player", npc_name]
50
 
51
  print("message", message)
52
  print("npc_name", npc_name)
 
53
  print("top_p", top_p)
54
  print("temperature", temperature)
55
  print("max_new_tokens", max_new_tokens)
56
  print("history", history)
57
  response = "Test"
58
- history.append((message, response))
59
- """
60
- history = gr.get_state() or []
61
  history.append((message, ""))
62
- gr.set_state(history)
63
  conversation = history
64
- prompt = build_prompt(conversation, context, interlocutor_names)
 
 
65
 
66
  # Build JSON
67
- json_ = {"inputs": prompt,
68
  "parameters":
69
  {
70
  "top_p": top_p,
@@ -72,14 +66,14 @@ def chat(message, npc_name, prompt, top_p, temperature, max_new_tokens, history=
72
  "max_new_tokens": max_new_tokens,
73
  "return_full_text": False
74
  }}
75
-
76
- output = query(json_)
 
77
  output = output[0]['generated_text']
 
78
  answer = clean_chat_output(output, prompt, interlocutor_names)
79
  response = answer
80
  history[-1] = (message, response)
81
- gr.set_state(history)
82
- """
83
  return response, history
84
 
85
 
@@ -88,7 +82,7 @@ def chat(message, npc_name, prompt, top_p, temperature, max_new_tokens, history=
88
  iface = gr.Interface(fn=chat,
89
  inputs=[Textbox(label="message"),
90
  Textbox(label="npc_name"),
91
- Textbox(label="prompt"),
92
  Slider(minimum=0.5, maximum=1, step=0.05, default=0.9, label="top_p"),
93
  Slider(minimum=0.5, maximum=1.5, step=0.1, default=1.1, label="temperature"),
94
  Slider(minimum=20, maximum=250, step=10, default=50, label="max_new_tokens"),
 
21
  theme="huggingface"
22
 
23
 
 
 
 
24
  # Builds the prompt from what previously happened
25
  def build_prompt(conversation, context, interlocutor_names):
26
  prompt = context + "\n"
 
38
  output = txt.replace(prompt, '')
39
  output = output[:output.find(delimiter)]
40
  return output
 
 
41
 
42
 
43
+ def chat(message, npc_name, initial_prompt, top_p, temperature, max_new_tokens, history=[]):
44
  interlocutor_names = ["Player", npc_name]
45
 
46
  print("message", message)
47
  print("npc_name", npc_name)
48
+ print("initial_prompt", initial_prompt)
49
  print("top_p", top_p)
50
  print("temperature", temperature)
51
  print("max_new_tokens", max_new_tokens)
52
  print("history", history)
53
  response = "Test"
 
 
 
54
  history.append((message, ""))
 
55
  conversation = history
56
+
57
+ # Build the prompt
58
+ prompt = build_prompt(conversation, initial_prompt, interlocutor_names)
59
 
60
  # Build JSON
61
+ json_req = {"inputs": prompt,
62
  "parameters":
63
  {
64
  "top_p": top_p,
 
66
  "max_new_tokens": max_new_tokens,
67
  "return_full_text": False
68
  }}
69
+
70
+ # Get the output
71
+ output = query(json_req)
72
  output = output[0]['generated_text']
73
+
74
  answer = clean_chat_output(output, prompt, interlocutor_names)
75
  response = answer
76
  history[-1] = (message, response)
 
 
77
  return response, history
78
 
79
 
 
82
  iface = gr.Interface(fn=chat,
83
  inputs=[Textbox(label="message"),
84
  Textbox(label="npc_name"),
85
+ Textbox(label="initial_prompt"),
86
  Slider(minimum=0.5, maximum=1, step=0.05, default=0.9, label="top_p"),
87
  Slider(minimum=0.5, maximum=1.5, step=0.1, default=1.1, label="temperature"),
88
  Slider(minimum=20, maximum=250, step=10, default=50, label="max_new_tokens"),