wenhuchen commited on
Commit
b51e863
1 Parent(s): bfc56dc

update temperature

Browse files
Files changed (1) hide show
  1. app.py +44 -40
app.py CHANGED
@@ -49,6 +49,7 @@ def get_chat_history(history):
49
  chat_history.append({"role": "assistant", "text": ""})
50
  return chat_history
51
 
 
52
  def get_chat_images(history):
53
  images = []
54
  for message in history:
@@ -56,6 +57,47 @@ def get_chat_images(history):
56
  images.extend(message[0])
57
  return images
58
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
59
 
60
  def build_demo():
61
  with gr.Blocks() as demo:
@@ -82,6 +124,7 @@ def build_demo():
82
 
83
  chat_msg = chat_input.submit(add_message, [chatbot, chat_input], [chatbot, chat_input])
84
 
 
85
  with gr.Accordion(label='Advanced options', open=False):
86
  temperature = gr.Slider(
87
  label='Temperature',
@@ -99,46 +142,7 @@ def build_demo():
99
  value=1.0,
100
  interactive=True
101
  )
102
-
103
- def bot(history):
104
- print(history)
105
- cur_messages = {"text": "", "images": []}
106
- for message in history[::-1]:
107
- if message[1]:
108
- break
109
- if isinstance(message[0], str):
110
- cur_messages["text"] = message[0] + " " + cur_messages["text"]
111
- elif isinstance(message[0], tuple):
112
- cur_messages["images"].extend(message[0])
113
- cur_messages["text"] = cur_messages["text"].strip()
114
- cur_messages["images"] = cur_messages["images"][::-1]
115
- if not cur_messages["text"]:
116
- raise gr.Error("Please enter a message")
117
- if cur_messages['text'].count("<image>") < len(cur_messages['images']):
118
- gr.Warning("The number of images uploaded is more than the number of <image> placeholders in the text. Will automatically prepend <image> to the text.")
119
- cur_messages['text'] = "<image> "* (len(cur_messages['images']) - cur_messages['text'].count("<image>")) + cur_messages['text']
120
- history[-1][0] = cur_messages["text"]
121
- if cur_messages['text'].count("<image>") > len(cur_messages['images']):
122
- gr.Warning("The number of images uploaded is less than the number of <image> placeholders in the text. Will automatically remove extra <image> placeholders from the text.")
123
- cur_messages['text'] = cur_messages['text'][::-1].replace("<image>"[::-1], "", cur_messages['text'].count("<image>") - len(cur_messages['images']))[::-1]
124
- history[-1][0] = cur_messages["text"]
125
-
126
- chat_history = get_chat_history(history)
127
- chat_images = get_chat_images(history)
128
- generation_kwargs = {
129
- "max_new_tokens": 4096,
130
- "temperature": temperature,
131
- "top_p": top_p,
132
- "do_sample": True,
133
- }
134
- print(None, chat_images, chat_history, generation_kwargs)
135
- response = generate(None, chat_images, chat_history, **generation_kwargs)
136
-
137
- for _output in response:
138
- history[-1][1] = _output
139
- time.sleep(0.05)
140
- yield history
141
-
142
 
143
  bot_msg = chat_msg.success(bot, chatbot, chatbot, api_name="bot_response")
144
 
 
49
  chat_history.append({"role": "assistant", "text": ""})
50
  return chat_history
51
 
52
+
53
  def get_chat_images(history):
54
  images = []
55
  for message in history:
 
57
  images.extend(message[0])
58
  return images
59
 
60
+
61
+ def bot(history):
62
+ print(history)
63
+ cur_messages = {"text": "", "images": []}
64
+ for message in history[::-1]:
65
+ if message[1]:
66
+ break
67
+ if isinstance(message[0], str):
68
+ cur_messages["text"] = message[0] + " " + cur_messages["text"]
69
+ elif isinstance(message[0], tuple):
70
+ cur_messages["images"].extend(message[0])
71
+ cur_messages["text"] = cur_messages["text"].strip()
72
+ cur_messages["images"] = cur_messages["images"][::-1]
73
+ if not cur_messages["text"]:
74
+ raise gr.Error("Please enter a message")
75
+ if cur_messages['text'].count("<image>") < len(cur_messages['images']):
76
+ gr.Warning("The number of images uploaded is more than the number of <image> placeholders in the text. Will automatically prepend <image> to the text.")
77
+ cur_messages['text'] = "<image> "* (len(cur_messages['images']) - cur_messages['text'].count("<image>")) + cur_messages['text']
78
+ history[-1][0] = cur_messages["text"]
79
+ if cur_messages['text'].count("<image>") > len(cur_messages['images']):
80
+ gr.Warning("The number of images uploaded is less than the number of <image> placeholders in the text. Will automatically remove extra <image> placeholders from the text.")
81
+ cur_messages['text'] = cur_messages['text'][::-1].replace("<image>"[::-1], "", cur_messages['text'].count("<image>") - len(cur_messages['images']))[::-1]
82
+ history[-1][0] = cur_messages["text"]
83
+
84
+ chat_history = get_chat_history(history)
85
+ chat_images = get_chat_images(history)
86
+ generation_kwargs = {
87
+ "max_new_tokens": 4096,
88
+ "temperature": 0.2,
89
+ "top_p": 1.0,
90
+ "do_sample": True,
91
+ }
92
+ print(None, chat_images, chat_history, generation_kwargs)
93
+ response = generate(None, chat_images, chat_history, **generation_kwargs)
94
+
95
+ for _output in response:
96
+ history[-1][1] = _output
97
+ time.sleep(0.05)
98
+ yield history
99
+
100
+
101
 
102
  def build_demo():
103
  with gr.Blocks() as demo:
 
124
 
125
  chat_msg = chat_input.submit(add_message, [chatbot, chat_input], [chatbot, chat_input])
126
 
127
+ """
128
  with gr.Accordion(label='Advanced options', open=False):
129
  temperature = gr.Slider(
130
  label='Temperature',
 
142
  value=1.0,
143
  interactive=True
144
  )
145
+ """
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
146
 
147
  bot_msg = chat_msg.success(bot, chatbot, chatbot, api_name="bot_response")
148