lykeven commited on
Commit
9b9de11
·
1 Parent(s): ff1a337

update introduction

Browse files
Files changed (1) hide show
  1. app.py +45 -2
app.py CHANGED
@@ -5,12 +5,15 @@ import os
5
  import json
6
  import requests
7
  import time
 
 
 
8
  from concurrent.futures import ThreadPoolExecutor
9
  from utils import is_chinese, process_image_without_resize, parse_response, templates_agent_cogagent, template_grounding_cogvlm, postprocess_text
10
 
11
- DESCRIPTION = '''<h2 style='text-align: center'> <a href="https://github.com/THUDM/CogVLM"> CogVLM & CogAgent Chat Demo</a> </h2>'''
12
 
13
- NOTES = 'This app is adapted from <a href="https://github.com/THUDM/CogVLM">https://github.com/THUDM/CogVLM</a>. It would be recommended to check out the repo if you want to see the detail of our model.\n\n该demo仅作为测试使用,不支持批量请求。如有大批量需求,欢迎联系[智谱AI](mailto:business@zhipuai.cn)。\n\n请注意该Demo目前仅支持英文,<a href="http://36.103.203.44:7861/">备用网页</a>支持中文。'
14
 
15
  MAINTENANCE_NOTICE1 = 'Hint 1: If the app report "Something went wrong, connection error out", please turn off your proxy and retry.<br>Hint 2: If you upload a large size of image like 10MB, it may take some time to upload and process. Please be patient and wait.'
16
 
@@ -28,6 +31,41 @@ def make_request(URL, headers, data):
28
  response = requests.request("POST", URL, headers=headers, data=data, timeout=(60, 100))
29
  return response.json()
30
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31
  def post(
32
  input_text,
33
  temperature,
@@ -180,6 +218,10 @@ def main():
180
  with gr.Column(scale=5.5):
181
  result_text = gr.components.Chatbot(label='Multi-round conversation History', value=[("", "Hi, What do you want to know about this image?")], height=550)
182
  hidden_image_hash = gr.Textbox(visible=False)
 
 
 
 
183
 
184
  gr_examples = gr.Examples(examples=[[example["text"], example["image"], example["grounding"], example["cogagent"]] for example in examples],
185
  inputs=[input_text, image_prompt, grounding, cogagent],
@@ -196,6 +238,7 @@ def main():
196
  clear_button.click(fn=clear_fn, inputs=clear_button, outputs=[input_text, result_text, image_prompt])
197
  image_prompt.upload(fn=clear_fn2, inputs=clear_button, outputs=[result_text])
198
  image_prompt.clear(fn=clear_fn2, inputs=clear_button, outputs=[result_text])
 
199
 
200
  print(gr.__version__)
201
 
 
5
  import json
6
  import requests
7
  import time
8
+ from PIL import Image
9
+ import datetime
10
+
11
  from concurrent.futures import ThreadPoolExecutor
12
  from utils import is_chinese, process_image_without_resize, parse_response, templates_agent_cogagent, template_grounding_cogvlm, postprocess_text
13
 
14
+ DESCRIPTION = '''<h2 style='text-align: center'> <a href="https://github.com/THUDM/CogVLM2"> CogVLM2 </a>& <a href="https://github.com/THUDM/CogVLM">CogAgent Chat Demo</a> </h2>'''
15
 
16
+ NOTES = 'This app is adapted from <a href="https://github.com/THUDM/CogVLM">https://github.com/THUDM/CogVLM2</a> and <a href="https://github.com/THUDM/CogVLM">https://github.com/THUDM/CogVLM</a>. It would be recommended to check out the repo if you want to see the detail of our model.\n\n该demo仅作为测试使用,不支持批量请求。如有大批量需求,欢迎联系[智谱AI](mailto:business@zhipuai.cn)。\n\n请注意该Demo目前仅支持英文,<a href="http://36.103.203.44:7861/">备用网页</a>支持中文。'
17
 
18
  MAINTENANCE_NOTICE1 = 'Hint 1: If the app report "Something went wrong, connection error out", please turn off your proxy and retry.<br>Hint 2: If you upload a large size of image like 10MB, it may take some time to upload and process. Please be patient and wait.'
19
 
 
31
  response = requests.request("POST", URL, headers=headers, data=data, timeout=(60, 100))
32
  return response.json()
33
 
34
+ def report_fn(image_prompt, result_previous, report=False, base_dir='/home/wangyan/data/badcases'):
35
+ if not report:
36
+ return "Report failed! Checking Report when you want to report a bad case!"
37
+
38
+ os.makedirs(base_dir, exist_ok=True)
39
+ date_dir = datetime.datetime.now().strftime('%Y%m%d')
40
+ new_dir = os.path.join(base_dir, date_dir)
41
+ os.makedirs(new_dir, exist_ok=True)
42
+ conv_path = os.path.join(base_dir, f'cases_report_{date_dir}.jsonl')
43
+
44
+ result_text = [(ele[0], ele[1]) for ele in result_previous]
45
+ for i in range(len(result_text)-1, -1, -1):
46
+ if result_text[i][0] == "" or result_text[i][0] == None:
47
+ del result_text[i]
48
+
49
+ if image_prompt and len(result_text) > 0:
50
+ image = Image.open(image_prompt)
51
+ timestamp = time.time()
52
+ file_ext = os.path.splitext(image_prompt)[1]
53
+ filename = os.path.join(new_dir, f"{timestamp}{file_ext}")
54
+ image.save(filename)
55
+
56
+ conv_data = {
57
+ 'id': str(int(time.time())),
58
+ 'history': [filename, *[i for c in result_text for i in c]]
59
+ }
60
+
61
+ mode = 'a' if os.path.isfile(conv_path) else 'w'
62
+ with open(conv_path, mode) as f:
63
+ f.write(json.dumps(conv_data, ensure_ascii=False)+"\n")
64
+ return "Submit success!"
65
+ return "Submit failed!"
66
+
67
+
68
+
69
  def post(
70
  input_text,
71
  temperature,
 
218
  with gr.Column(scale=5.5):
219
  result_text = gr.components.Chatbot(label='Multi-round conversation History', value=[("", "Hi, What do you want to know about this image?")], height=550)
220
  hidden_image_hash = gr.Textbox(visible=False)
221
+ with gr.Row():
222
+ bad_case_button = gr.Button('Report bad case')
223
+ report = gr.Checkbox(label='Report')
224
+ report_text = gr.Textbox(value='', interactive=False, show_label=False)
225
 
226
  gr_examples = gr.Examples(examples=[[example["text"], example["image"], example["grounding"], example["cogagent"]] for example in examples],
227
  inputs=[input_text, image_prompt, grounding, cogagent],
 
238
  clear_button.click(fn=clear_fn, inputs=clear_button, outputs=[input_text, result_text, image_prompt])
239
  image_prompt.upload(fn=clear_fn2, inputs=clear_button, outputs=[result_text])
240
  image_prompt.clear(fn=clear_fn2, inputs=clear_button, outputs=[result_text])
241
+ bad_case_button.click(fn=report_fn, inputs=[image_prompt, result_text, report], outputs=[report_text])
242
 
243
  print(gr.__version__)
244