gulixin0922
commited on
Commit
•
ccc043d
1
Parent(s):
41e30d4
Update app.py
Browse files
app.py
CHANGED
@@ -120,33 +120,33 @@ def find_bounding_boxes(state, response):
|
|
120 |
return returned_image if len(matches) > 0 else None
|
121 |
|
122 |
|
123 |
-
def vote_last_response(state, liked,
|
124 |
conv_data = {
|
125 |
"tstamp": round(time.time(), 4),
|
126 |
"like": liked,
|
127 |
-
"model":
|
128 |
"state": state.dict(),
|
129 |
"ip": request.client.host,
|
130 |
}
|
131 |
write2file(get_log_filename(), json.dumps(conv_data) + "\n")
|
132 |
|
133 |
|
134 |
-
def upvote_last_response(state,
|
135 |
logger.info(f"upvote. ip: {request.client.host}")
|
136 |
-
vote_last_response(state, True,
|
137 |
textbox = gr.MultimodalTextbox(value=None, interactive=True)
|
138 |
return (textbox,) + (disable_btn,) * 3
|
139 |
|
140 |
|
141 |
-
def downvote_last_response(state,
|
142 |
logger.info(f"downvote. ip: {request.client.host}")
|
143 |
-
vote_last_response(state, False,
|
144 |
textbox = gr.MultimodalTextbox(value=None, interactive=True)
|
145 |
return (textbox,) + (disable_btn,) * 3
|
146 |
|
147 |
|
148 |
def vote_selected_response(
|
149 |
-
state,
|
150 |
):
|
151 |
logger.info(
|
152 |
f"Vote: {data.liked}, index: {data.index}, value: {data.value} , ip: {request.client.host}"
|
@@ -155,7 +155,7 @@ def vote_selected_response(
|
|
155 |
"tstamp": round(time.time(), 4),
|
156 |
"like": data.liked,
|
157 |
"index": data.index,
|
158 |
-
"model":
|
159 |
"state": state.dict(),
|
160 |
"ip": request.client.host,
|
161 |
}
|
@@ -163,9 +163,9 @@ def vote_selected_response(
|
|
163 |
return
|
164 |
|
165 |
|
166 |
-
def flag_last_response(state,
|
167 |
logger.info(f"flag. ip: {request.client.host}")
|
168 |
-
vote_last_response(state, "flag",
|
169 |
textbox = gr.MultimodalTextbox(value=None, interactive=True)
|
170 |
return (textbox,) + (disable_btn,) * 3
|
171 |
|
@@ -189,13 +189,7 @@ def clear_history(request: gr.Request):
|
|
189 |
return (state, state.to_gradio_chatbot(), textbox) + (disable_btn,) * 5
|
190 |
|
191 |
|
192 |
-
def
|
193 |
-
logger.info(f"Change system prompt. ip: {request.client.host}")
|
194 |
-
state.set_system_message(system_prompt)
|
195 |
-
return state
|
196 |
-
|
197 |
-
|
198 |
-
def add_text(state, message, system_prompt, model_selector, request: gr.Request):
|
199 |
print(f"state: {state}")
|
200 |
if not state:
|
201 |
state = init_state()
|
@@ -222,26 +216,23 @@ def add_text(state, message, system_prompt, model_selector, request: gr.Request)
|
|
222 |
state.set_system_message(system_prompt)
|
223 |
state.append_message(Conversation.USER, text, images)
|
224 |
state.skip_next = False
|
225 |
-
return (state, state.to_gradio_chatbot(), textbox
|
226 |
disable_btn,
|
227 |
) * 5
|
228 |
|
229 |
|
230 |
def http_bot(
|
231 |
state,
|
232 |
-
model_selector,
|
233 |
temperature,
|
234 |
top_p,
|
235 |
repetition_penalty,
|
236 |
max_new_tokens,
|
237 |
max_input_tiles,
|
238 |
-
# bbox_threshold,
|
239 |
-
# mask_threshold,
|
240 |
request: gr.Request,
|
241 |
):
|
|
|
242 |
logger.info(f"http_bot. ip: {request.client.host}")
|
243 |
start_tstamp = time.time()
|
244 |
-
model_name = model_selector
|
245 |
if hasattr(state, "skip_next") and state.skip_next:
|
246 |
# This generate call is skipped due to invalid inputs
|
247 |
yield (
|
@@ -343,7 +334,7 @@ def http_bot(
|
|
343 |
) + (enable_btn,) * 5
|
344 |
|
345 |
finish_tstamp = time.time()
|
346 |
-
logger.info(f"{
|
347 |
data = {
|
348 |
"tstamp": round(finish_tstamp, 4),
|
349 |
"like": None,
|
@@ -358,29 +349,14 @@ def http_bot(
|
|
358 |
|
359 |
|
360 |
title_html = """
|
361 |
-
<h2> <span class="gradient-text" id="text">InternVL2</span><span class="plain-text">: Better than the Best—Expanding Performance Boundaries of Open-Source Multimodal Models with the Progressive Scaling Strategy</span></h2>
|
362 |
<a href="https://internvl.github.io/blog/2024-07-02-InternVL-2.0/">[📜 InternVL2 Blog]</a>
|
363 |
<a href="https://internvl.opengvlab.com/">[🌟 Official Demo]</a>
|
364 |
<a href="https://github.com/OpenGVLab/InternVL?tab=readme-ov-file#quick-start-with-huggingface">[🚀 Quick Start]</a>
|
365 |
<a href="https://github.com/OpenGVLab/InternVL/blob/main/document/How_to_use_InternVL_API.md">[🌐 API]</a>
|
366 |
"""
|
367 |
|
368 |
-
tos_markdown = """
|
369 |
-
### Terms of use
|
370 |
-
By using this service, users are required to agree to the following terms:
|
371 |
-
The service is a research preview intended for non-commercial use only. It only provides limited safety measures and may generate offensive content. It must not be used for any illegal, harmful, violent, racist, or sexual purposes. The service may collect user dialogue data for future research.
|
372 |
-
Please click the "Flag" button if you get any inappropriate answer! We will collect those to keep improving our moderator.
|
373 |
-
For an optimal experience, please use desktop computers for this demo, as mobile devices may compromise its quality.
|
374 |
-
"""
|
375 |
-
|
376 |
|
377 |
-
learn_more_markdown = """
|
378 |
-
### License
|
379 |
-
The service is a research preview intended for non-commercial use only, subject to the model [License](https://github.com/facebookresearch/llama/blob/main/MODEL_CARD.md) of LLaMA, [Terms of Use](https://openai.com/policies/terms-of-use) of the data generated by OpenAI, and [Privacy Practices](https://chrome.google.com/webstore/detail/sharegpt-share-your-chatg/daiacboceoaocpibfodeljbdfacokfjb) of ShareGPT. Please contact us if you find any potential violation.
|
380 |
-
|
381 |
-
### Acknowledgement
|
382 |
-
This demo is modified from LLaVA's demo. Thanks for their awesome work!
|
383 |
-
"""
|
384 |
# .gradio-container {margin: 5px 10px 0 10px !important};
|
385 |
block_css = """
|
386 |
.gradio-container {margin: 0.1% 1% 0 1% !important; max-width: 98% !important;};
|
@@ -459,33 +435,20 @@ def build_demo(embed_mode):
|
|
459 |
theme=gr.themes.Default(),
|
460 |
css=block_css,
|
461 |
) as demo:
|
462 |
-
models = ['InternVL2-Pro']
|
463 |
state = gr.State()
|
464 |
|
465 |
if not embed_mode:
|
466 |
-
# gr.Markdown(title_markdown)
|
467 |
gr.HTML(title_html)
|
468 |
|
469 |
with gr.Row():
|
470 |
with gr.Column(scale=2):
|
471 |
|
472 |
-
with gr.
|
473 |
-
model_selector = gr.Dropdown(
|
474 |
-
choices=models,
|
475 |
-
value=models[0] if len(models) > 0 else "",
|
476 |
-
# value="InternVL-Chat-V1-5",
|
477 |
-
interactive=True,
|
478 |
-
show_label=False,
|
479 |
-
container=False,
|
480 |
-
)
|
481 |
-
|
482 |
-
with gr.Accordion("System Prompt", open=False) as system_prompt_row:
|
483 |
system_prompt = gr.Textbox(
|
484 |
value="请尽可能详细地回答用户的问题。",
|
485 |
label="System Prompt",
|
486 |
interactive=True,
|
487 |
)
|
488 |
-
with gr.Accordion("Parameters", open=False) as parameter_row:
|
489 |
temperature = gr.Slider(
|
490 |
minimum=0.0,
|
491 |
maximum=1.0,
|
@@ -539,25 +502,17 @@ def build_demo(embed_mode):
|
|
539 |
[
|
540 |
{
|
541 |
"files": [
|
542 |
-
"gallery/
|
543 |
],
|
544 |
-
"text": "
|
545 |
}
|
546 |
],
|
547 |
[
|
548 |
{
|
549 |
"files": [
|
550 |
-
"gallery/
|
551 |
],
|
552 |
-
"text": "
|
553 |
-
}
|
554 |
-
],
|
555 |
-
[
|
556 |
-
{
|
557 |
-
"files": [
|
558 |
-
"gallery/water.jpg",
|
559 |
-
],
|
560 |
-
"text": "Please describe this image.",
|
561 |
}
|
562 |
],
|
563 |
],
|
@@ -592,31 +547,28 @@ def build_demo(embed_mode):
|
|
592 |
)
|
593 |
clear_btn = gr.Button(value="🗑️ Clear", interactive=False)
|
594 |
|
595 |
-
if not embed_mode:
|
596 |
-
gr.Markdown(tos_markdown)
|
597 |
-
gr.Markdown(learn_more_markdown)
|
598 |
url_params = gr.JSON(visible=False)
|
599 |
|
600 |
# Register listeners
|
601 |
btn_list = [upvote_btn, downvote_btn, flag_btn, regenerate_btn, clear_btn]
|
602 |
upvote_btn.click(
|
603 |
upvote_last_response,
|
604 |
-
[state
|
605 |
[textbox, upvote_btn, downvote_btn, flag_btn],
|
606 |
)
|
607 |
downvote_btn.click(
|
608 |
downvote_last_response,
|
609 |
-
[state
|
610 |
[textbox, upvote_btn, downvote_btn, flag_btn],
|
611 |
)
|
612 |
chatbot.like(
|
613 |
vote_selected_response,
|
614 |
-
[state
|
615 |
[],
|
616 |
)
|
617 |
flag_btn.click(
|
618 |
flag_last_response,
|
619 |
-
[state
|
620 |
[textbox, upvote_btn, downvote_btn, flag_btn],
|
621 |
)
|
622 |
regenerate_btn.click(
|
@@ -627,14 +579,11 @@ def build_demo(embed_mode):
|
|
627 |
http_bot,
|
628 |
[
|
629 |
state,
|
630 |
-
model_selector,
|
631 |
temperature,
|
632 |
top_p,
|
633 |
repetition_penalty,
|
634 |
max_output_tokens,
|
635 |
max_input_tiles,
|
636 |
-
# bbox_threshold,
|
637 |
-
# mask_threshold,
|
638 |
],
|
639 |
[state, chatbot, textbox] + btn_list,
|
640 |
)
|
@@ -642,39 +591,33 @@ def build_demo(embed_mode):
|
|
642 |
|
643 |
textbox.submit(
|
644 |
add_text,
|
645 |
-
[state, textbox, system_prompt
|
646 |
-
[state, chatbot, textbox
|
647 |
).then(
|
648 |
http_bot,
|
649 |
[
|
650 |
state,
|
651 |
-
model_selector,
|
652 |
temperature,
|
653 |
top_p,
|
654 |
repetition_penalty,
|
655 |
max_output_tokens,
|
656 |
max_input_tiles,
|
657 |
-
# bbox_threshold,
|
658 |
-
# mask_threshold,
|
659 |
],
|
660 |
[state, chatbot, textbox] + btn_list,
|
661 |
)
|
662 |
submit_btn.click(
|
663 |
add_text,
|
664 |
-
[state, textbox, system_prompt
|
665 |
-
[state, chatbot, textbox
|
666 |
).then(
|
667 |
http_bot,
|
668 |
[
|
669 |
state,
|
670 |
-
model_selector,
|
671 |
temperature,
|
672 |
top_p,
|
673 |
repetition_penalty,
|
674 |
max_output_tokens,
|
675 |
max_input_tiles,
|
676 |
-
# bbox_threshold,
|
677 |
-
# mask_threshold,
|
678 |
],
|
679 |
[state, chatbot, textbox] + btn_list,
|
680 |
)
|
|
|
120 |
return returned_image if len(matches) > 0 else None
|
121 |
|
122 |
|
123 |
+
def vote_last_response(state, liked, request: gr.Request):
|
124 |
conv_data = {
|
125 |
"tstamp": round(time.time(), 4),
|
126 |
"like": liked,
|
127 |
+
"model": 'InternVL2.5-78B',
|
128 |
"state": state.dict(),
|
129 |
"ip": request.client.host,
|
130 |
}
|
131 |
write2file(get_log_filename(), json.dumps(conv_data) + "\n")
|
132 |
|
133 |
|
134 |
+
def upvote_last_response(state, request: gr.Request):
|
135 |
logger.info(f"upvote. ip: {request.client.host}")
|
136 |
+
vote_last_response(state, True, request)
|
137 |
textbox = gr.MultimodalTextbox(value=None, interactive=True)
|
138 |
return (textbox,) + (disable_btn,) * 3
|
139 |
|
140 |
|
141 |
+
def downvote_last_response(state, request: gr.Request):
|
142 |
logger.info(f"downvote. ip: {request.client.host}")
|
143 |
+
vote_last_response(state, False, request)
|
144 |
textbox = gr.MultimodalTextbox(value=None, interactive=True)
|
145 |
return (textbox,) + (disable_btn,) * 3
|
146 |
|
147 |
|
148 |
def vote_selected_response(
|
149 |
+
state, request: gr.Request, data: gr.LikeData
|
150 |
):
|
151 |
logger.info(
|
152 |
f"Vote: {data.liked}, index: {data.index}, value: {data.value} , ip: {request.client.host}"
|
|
|
155 |
"tstamp": round(time.time(), 4),
|
156 |
"like": data.liked,
|
157 |
"index": data.index,
|
158 |
+
"model": 'InternVL2.5-78B',
|
159 |
"state": state.dict(),
|
160 |
"ip": request.client.host,
|
161 |
}
|
|
|
163 |
return
|
164 |
|
165 |
|
166 |
+
def flag_last_response(state, request: gr.Request):
|
167 |
logger.info(f"flag. ip: {request.client.host}")
|
168 |
+
vote_last_response(state, "flag", request)
|
169 |
textbox = gr.MultimodalTextbox(value=None, interactive=True)
|
170 |
return (textbox,) + (disable_btn,) * 3
|
171 |
|
|
|
189 |
return (state, state.to_gradio_chatbot(), textbox) + (disable_btn,) * 5
|
190 |
|
191 |
|
192 |
+
def add_text(state, message, system_prompt, request: gr.Request):
|
|
|
|
|
|
|
|
|
|
|
|
|
193 |
print(f"state: {state}")
|
194 |
if not state:
|
195 |
state = init_state()
|
|
|
216 |
state.set_system_message(system_prompt)
|
217 |
state.append_message(Conversation.USER, text, images)
|
218 |
state.skip_next = False
|
219 |
+
return (state, state.to_gradio_chatbot(), textbox) + (
|
220 |
disable_btn,
|
221 |
) * 5
|
222 |
|
223 |
|
224 |
def http_bot(
|
225 |
state,
|
|
|
226 |
temperature,
|
227 |
top_p,
|
228 |
repetition_penalty,
|
229 |
max_new_tokens,
|
230 |
max_input_tiles,
|
|
|
|
|
231 |
request: gr.Request,
|
232 |
):
|
233 |
+
model_name = 'InternVL2.5-78B'
|
234 |
logger.info(f"http_bot. ip: {request.client.host}")
|
235 |
start_tstamp = time.time()
|
|
|
236 |
if hasattr(state, "skip_next") and state.skip_next:
|
237 |
# This generate call is skipped due to invalid inputs
|
238 |
yield (
|
|
|
334 |
) + (enable_btn,) * 5
|
335 |
|
336 |
finish_tstamp = time.time()
|
337 |
+
logger.info(f"{finnal_output}")
|
338 |
data = {
|
339 |
"tstamp": round(finish_tstamp, 4),
|
340 |
"like": None,
|
|
|
349 |
|
350 |
|
351 |
title_html = """
|
352 |
+
<h2> <span class="gradient-text" id="text">InternVL2.5</span><span class="plain-text">: Better than the Best—Expanding Performance Boundaries of Open-Source Multimodal Models with the Progressive Scaling Strategy</span></h2>
|
353 |
<a href="https://internvl.github.io/blog/2024-07-02-InternVL-2.0/">[📜 InternVL2 Blog]</a>
|
354 |
<a href="https://internvl.opengvlab.com/">[🌟 Official Demo]</a>
|
355 |
<a href="https://github.com/OpenGVLab/InternVL?tab=readme-ov-file#quick-start-with-huggingface">[🚀 Quick Start]</a>
|
356 |
<a href="https://github.com/OpenGVLab/InternVL/blob/main/document/How_to_use_InternVL_API.md">[🌐 API]</a>
|
357 |
"""
|
358 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
359 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
360 |
# .gradio-container {margin: 5px 10px 0 10px !important};
|
361 |
block_css = """
|
362 |
.gradio-container {margin: 0.1% 1% 0 1% !important; max-width: 98% !important;};
|
|
|
435 |
theme=gr.themes.Default(),
|
436 |
css=block_css,
|
437 |
) as demo:
|
|
|
438 |
state = gr.State()
|
439 |
|
440 |
if not embed_mode:
|
|
|
441 |
gr.HTML(title_html)
|
442 |
|
443 |
with gr.Row():
|
444 |
with gr.Column(scale=2):
|
445 |
|
446 |
+
with gr.Accordion("Settings", open=False) as setting_row:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
447 |
system_prompt = gr.Textbox(
|
448 |
value="请尽可能详细地回答用户的问题。",
|
449 |
label="System Prompt",
|
450 |
interactive=True,
|
451 |
)
|
|
|
452 |
temperature = gr.Slider(
|
453 |
minimum=0.0,
|
454 |
maximum=1.0,
|
|
|
502 |
[
|
503 |
{
|
504 |
"files": [
|
505 |
+
"gallery/1-2.PNG",
|
506 |
],
|
507 |
+
"text": "用python实现这个流程图",
|
508 |
}
|
509 |
],
|
510 |
[
|
511 |
{
|
512 |
"files": [
|
513 |
+
"gallery/15.PNG",
|
514 |
],
|
515 |
+
"text": "请帮我分析一下这张图内容",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
516 |
}
|
517 |
],
|
518 |
],
|
|
|
547 |
)
|
548 |
clear_btn = gr.Button(value="🗑️ Clear", interactive=False)
|
549 |
|
|
|
|
|
|
|
550 |
url_params = gr.JSON(visible=False)
|
551 |
|
552 |
# Register listeners
|
553 |
btn_list = [upvote_btn, downvote_btn, flag_btn, regenerate_btn, clear_btn]
|
554 |
upvote_btn.click(
|
555 |
upvote_last_response,
|
556 |
+
[state],
|
557 |
[textbox, upvote_btn, downvote_btn, flag_btn],
|
558 |
)
|
559 |
downvote_btn.click(
|
560 |
downvote_last_response,
|
561 |
+
[state],
|
562 |
[textbox, upvote_btn, downvote_btn, flag_btn],
|
563 |
)
|
564 |
chatbot.like(
|
565 |
vote_selected_response,
|
566 |
+
[state],
|
567 |
[],
|
568 |
)
|
569 |
flag_btn.click(
|
570 |
flag_last_response,
|
571 |
+
[state],
|
572 |
[textbox, upvote_btn, downvote_btn, flag_btn],
|
573 |
)
|
574 |
regenerate_btn.click(
|
|
|
579 |
http_bot,
|
580 |
[
|
581 |
state,
|
|
|
582 |
temperature,
|
583 |
top_p,
|
584 |
repetition_penalty,
|
585 |
max_output_tokens,
|
586 |
max_input_tiles,
|
|
|
|
|
587 |
],
|
588 |
[state, chatbot, textbox] + btn_list,
|
589 |
)
|
|
|
591 |
|
592 |
textbox.submit(
|
593 |
add_text,
|
594 |
+
[state, textbox, system_prompt],
|
595 |
+
[state, chatbot, textbox] + btn_list,
|
596 |
).then(
|
597 |
http_bot,
|
598 |
[
|
599 |
state,
|
|
|
600 |
temperature,
|
601 |
top_p,
|
602 |
repetition_penalty,
|
603 |
max_output_tokens,
|
604 |
max_input_tiles,
|
|
|
|
|
605 |
],
|
606 |
[state, chatbot, textbox] + btn_list,
|
607 |
)
|
608 |
submit_btn.click(
|
609 |
add_text,
|
610 |
+
[state, textbox, system_prompt],
|
611 |
+
[state, chatbot, textbox] + btn_list,
|
612 |
).then(
|
613 |
http_bot,
|
614 |
[
|
615 |
state,
|
|
|
616 |
temperature,
|
617 |
top_p,
|
618 |
repetition_penalty,
|
619 |
max_output_tokens,
|
620 |
max_input_tiles,
|
|
|
|
|
621 |
],
|
622 |
[state, chatbot, textbox] + btn_list,
|
623 |
)
|