feat: fire demo ready
Browse files- src/model/__pycache__/__init__.cpython-310.pyc +0 -0
- src/model/__pycache__/compression.cpython-310.pyc +0 -0
- src/model/__pycache__/llama_condense_monkey_patch.cpython-310.pyc +0 -0
- src/model/__pycache__/model_adapter.cpython-310.pyc +0 -0
- src/model/__pycache__/model_chatglm.cpython-310.pyc +0 -0
- src/model/__pycache__/model_cllm.cpython-310.pyc +0 -0
- src/model/__pycache__/model_codet5p.cpython-310.pyc +0 -0
- src/model/__pycache__/model_exllama.cpython-310.pyc +0 -0
- src/model/__pycache__/model_falcon.cpython-310.pyc +0 -0
- src/model/__pycache__/model_registry.cpython-310.pyc +0 -0
- src/model/__pycache__/model_xfastertransformer.cpython-310.pyc +0 -0
- src/model/__pycache__/model_yuan2.cpython-310.pyc +0 -0
- src/model/__pycache__/monkey_patch_non_inplace.cpython-310.pyc +0 -0
- src/serve/gradio_block_arena_vision_named.py +26 -26
- vision-tmp-2024-07-26-conv.json +4 -0
src/model/__pycache__/__init__.cpython-310.pyc
CHANGED
Binary files a/src/model/__pycache__/__init__.cpython-310.pyc and b/src/model/__pycache__/__init__.cpython-310.pyc differ
|
|
src/model/__pycache__/compression.cpython-310.pyc
CHANGED
Binary files a/src/model/__pycache__/compression.cpython-310.pyc and b/src/model/__pycache__/compression.cpython-310.pyc differ
|
|
src/model/__pycache__/llama_condense_monkey_patch.cpython-310.pyc
CHANGED
Binary files a/src/model/__pycache__/llama_condense_monkey_patch.cpython-310.pyc and b/src/model/__pycache__/llama_condense_monkey_patch.cpython-310.pyc differ
|
|
src/model/__pycache__/model_adapter.cpython-310.pyc
CHANGED
Binary files a/src/model/__pycache__/model_adapter.cpython-310.pyc and b/src/model/__pycache__/model_adapter.cpython-310.pyc differ
|
|
src/model/__pycache__/model_chatglm.cpython-310.pyc
CHANGED
Binary files a/src/model/__pycache__/model_chatglm.cpython-310.pyc and b/src/model/__pycache__/model_chatglm.cpython-310.pyc differ
|
|
src/model/__pycache__/model_cllm.cpython-310.pyc
CHANGED
Binary files a/src/model/__pycache__/model_cllm.cpython-310.pyc and b/src/model/__pycache__/model_cllm.cpython-310.pyc differ
|
|
src/model/__pycache__/model_codet5p.cpython-310.pyc
CHANGED
Binary files a/src/model/__pycache__/model_codet5p.cpython-310.pyc and b/src/model/__pycache__/model_codet5p.cpython-310.pyc differ
|
|
src/model/__pycache__/model_exllama.cpython-310.pyc
CHANGED
Binary files a/src/model/__pycache__/model_exllama.cpython-310.pyc and b/src/model/__pycache__/model_exllama.cpython-310.pyc differ
|
|
src/model/__pycache__/model_falcon.cpython-310.pyc
CHANGED
Binary files a/src/model/__pycache__/model_falcon.cpython-310.pyc and b/src/model/__pycache__/model_falcon.cpython-310.pyc differ
|
|
src/model/__pycache__/model_registry.cpython-310.pyc
CHANGED
Binary files a/src/model/__pycache__/model_registry.cpython-310.pyc and b/src/model/__pycache__/model_registry.cpython-310.pyc differ
|
|
src/model/__pycache__/model_xfastertransformer.cpython-310.pyc
CHANGED
Binary files a/src/model/__pycache__/model_xfastertransformer.cpython-310.pyc and b/src/model/__pycache__/model_xfastertransformer.cpython-310.pyc differ
|
|
src/model/__pycache__/model_yuan2.cpython-310.pyc
CHANGED
Binary files a/src/model/__pycache__/model_yuan2.cpython-310.pyc and b/src/model/__pycache__/model_yuan2.cpython-310.pyc differ
|
|
src/model/__pycache__/monkey_patch_non_inplace.cpython-310.pyc
CHANGED
Binary files a/src/model/__pycache__/monkey_patch_non_inplace.cpython-310.pyc and b/src/model/__pycache__/monkey_patch_non_inplace.cpython-310.pyc differ
|
|
src/serve/gradio_block_arena_vision_named.py
CHANGED
@@ -286,7 +286,7 @@ def build_side_by_side_vision_ui_named(models, random_questions=None):
|
|
286 |
with gr.Column():
|
287 |
model_names_dict = {
|
288 |
"llava-fire": 'FIRE-LLaVA',
|
289 |
-
"llava-original": "LLaVA-
|
290 |
}
|
291 |
model_choices = []
|
292 |
for model_value in models:
|
@@ -326,7 +326,9 @@ def build_side_by_side_vision_ui_named(models, random_questions=None):
|
|
326 |
)
|
327 |
with gr.Row():
|
328 |
recommendation = gr.Textbox(
|
329 |
-
visible=
|
|
|
|
|
330 |
)
|
331 |
with gr.Row():
|
332 |
textbox = gr.MultimodalTextbox(
|
@@ -352,20 +354,10 @@ def build_side_by_side_vision_ui_named(models, random_questions=None):
|
|
352 |
{
|
353 |
"files": ["assets/image_50.png"],
|
354 |
"text": "Please directly answer the question and provide the correct option letter, e.g., A, B, C, D.\nQuestion: As shown in the figure, then angle COE = ()\nChoices:\nA:30°\nB:140°\nC:50°\nD:60°"
|
355 |
-
}
|
356 |
-
|
357 |
-
[
|
358 |
-
{
|
359 |
-
"files": ["assets/test_11407.jpg"],
|
360 |
-
"text": """Hint: Please answer the question and provide the correct option letter, e.g., A, B, C, D, at the end.
|
361 |
-
Question: 如图,△ABC中,AD为中线,AD⊥AC,∠BAD=30°,AB=3,则AC长()
|
362 |
-
Choices:
|
363 |
-
A. 2.5
|
364 |
-
B. 2
|
365 |
-
C. 1
|
366 |
-
D. 1.5"""
|
367 |
-
}
|
368 |
],
|
|
|
369 |
[
|
370 |
{
|
371 |
"files": ["assets/magnetic.png"],
|
@@ -374,7 +366,8 @@ Question: Will these magnets attract or repel each other?
|
|
374 |
Choices:
|
375 |
A. repel
|
376 |
B. attract"""
|
377 |
-
}
|
|
|
378 |
],
|
379 |
[
|
380 |
{
|
@@ -385,9 +378,23 @@ Choices:
|
|
385 |
A. Arctic fox
|
386 |
B. rough-legged hawk
|
387 |
C. mushroom"""
|
388 |
-
}
|
389 |
-
|
390 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
391 |
with gr.Accordion("Parameters", open=False) as parameter_row:
|
392 |
temperature = gr.Slider(
|
393 |
minimum=0.0,
|
@@ -486,11 +493,6 @@ function (a, b, c, d) {
|
|
486 |
textbox.input(add_image, [textbox], [imagebox]).then(
|
487 |
set_visible_image, [textbox], [image_column]
|
488 |
).then(clear_history_example, None, states + chatbots + btn_list)
|
489 |
-
def get_recommendation(chatbots):
|
490 |
-
|
491 |
-
logger.info(f"chatbots {chatbots}")
|
492 |
-
|
493 |
-
return [gr.Textbox(visible=True, value="Teacher Feedback Recommendation Content")]
|
494 |
|
495 |
textbox.submit(
|
496 |
add_text,
|
@@ -502,8 +504,6 @@ function (a, b, c, d) {
|
|
502 |
states + chatbots + btn_list,
|
503 |
).then(
|
504 |
flash_buttons, [], btn_list
|
505 |
-
).then(
|
506 |
-
get_recommendation, chatbots, [recommendation]
|
507 |
)
|
508 |
|
509 |
if random_questions:
|
|
|
286 |
with gr.Column():
|
287 |
model_names_dict = {
|
288 |
"llava-fire": 'FIRE-LLaVA',
|
289 |
+
"llava-original": "LLaVA-NeXT-LLaMA-3-8B"
|
290 |
}
|
291 |
model_choices = []
|
292 |
for model_value in models:
|
|
|
326 |
)
|
327 |
with gr.Row():
|
328 |
recommendation = gr.Textbox(
|
329 |
+
visible=True,
|
330 |
+
label="Teacher generated feedback:",
|
331 |
+
show_copy_button=True,
|
332 |
)
|
333 |
with gr.Row():
|
334 |
textbox = gr.MultimodalTextbox(
|
|
|
354 |
{
|
355 |
"files": ["assets/image_50.png"],
|
356 |
"text": "Please directly answer the question and provide the correct option letter, e.g., A, B, C, D.\nQuestion: As shown in the figure, then angle COE = ()\nChoices:\nA:30°\nB:140°\nC:50°\nD:60°"
|
357 |
+
},
|
358 |
+
"Your answer is incorrect. The question asks for the angle COE in the context of the figure provided. Consider the relationships between the angles and the lines in the figure to find the correct answer. Try again by analyzing the given diagram more carefully."
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
359 |
],
|
360 |
+
|
361 |
[
|
362 |
{
|
363 |
"files": ["assets/magnetic.png"],
|
|
|
366 |
Choices:
|
367 |
A. repel
|
368 |
B. attract"""
|
369 |
+
},
|
370 |
+
"""You correctly identified that the letters "N" and "S" represent opposite poles of a magnet. However, your conclusion that they repel each other is incorrect. Please reconsider your answer with this information in mind."""
|
371 |
],
|
372 |
[
|
373 |
{
|
|
|
378 |
A. Arctic fox
|
379 |
B. rough-legged hawk
|
380 |
C. mushroom"""
|
381 |
+
},
|
382 |
+
"""You correctly identified that the primary consumers are the organisms that feed directly on the producers. However, your answer is incorrect. The mushroom is not a primary consumer; it is a decomposer. Look again at the food web and identify which organisms are shown as consuming the producers directly. Try to find the correct option among the given choices."""
|
383 |
+
],
|
384 |
+
[
|
385 |
+
{
|
386 |
+
"files": ["assets/test_11407.jpg"],
|
387 |
+
"text": """Hint: Please answer the question and provide the correct option letter, e.g., A, B, C, D, at the end.
|
388 |
+
Question: 如图,△ABC中,AD为中线,AD⊥AC,∠BAD=30°,AB=3,则AC长()
|
389 |
+
Choices:
|
390 |
+
A. 2.5
|
391 |
+
B. 2
|
392 |
+
C. 1
|
393 |
+
D. 1.5"""
|
394 |
+
},
|
395 |
+
""
|
396 |
+
],
|
397 |
+
],inputs=[textbox, recommendation])
|
398 |
with gr.Accordion("Parameters", open=False) as parameter_row:
|
399 |
temperature = gr.Slider(
|
400 |
minimum=0.0,
|
|
|
493 |
textbox.input(add_image, [textbox], [imagebox]).then(
|
494 |
set_visible_image, [textbox], [image_column]
|
495 |
).then(clear_history_example, None, states + chatbots + btn_list)
|
|
|
|
|
|
|
|
|
|
|
496 |
|
497 |
textbox.submit(
|
498 |
add_text,
|
|
|
504 |
states + chatbots + btn_list,
|
505 |
).then(
|
506 |
flash_buttons, [], btn_list
|
|
|
|
|
507 |
)
|
508 |
|
509 |
if random_questions:
|
vision-tmp-2024-07-26-conv.json
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{"tstamp": 1721958976.4837, "type": "chat", "model": "llava-fire", "gen_params": {"temperature": 0.7, "top_p": 1.0, "max_new_tokens": 1024}, "start": 1721958941.2352, "finish": 1721958976.4837, "state": {"template_name": "llava-fire", "system_message": "You are a helpful language and vision assistant. You are able to understand the visual content that the user provides, and assist the user with a variety of tasks using natural language.", "roles": ["user", "assistant"], "messages": [["user", ["Please directly answer the question and provide the correct option letter, e.g., A, B, C, D.\nQuestion: As shown in the figure, then angle COE = ()\nChoices:\nA:30\u00b0\nB:140\u00b0\nC:50\u00b0\nD:60\u00b0", ["4cb5a5234624686ecd8b85a1fab041f9"]]], ["assistant", "The angle COE is given as 50 degrees in the image. Therefore, the correct answer to the question is:\n\nC: 50\u00b0"]], "offset": 0, "conv_id": "889c4b7b86ff4619bdec10b03e841e5c", "model_name": "llava-fire", "has_csam_image": false}, "ip": "123.127.249.92"}
|
2 |
+
{"tstamp": 1721958976.4895, "type": "chat", "model": "llava-original", "gen_params": {"temperature": 0.7, "top_p": 1.0, "max_new_tokens": 1024}, "start": 1721958958.0537, "finish": 1721958976.4895, "state": {"template_name": "llava-original", "system_message": "You are a helpful language and vision assistant. You are able to understand the visual content that the user provides, and assist the user with a variety of tasks using natural language.", "roles": ["user", "assistant"], "messages": [["user", ["Please directly answer the question and provide the correct option letter, e.g., A, B, C, D.\nQuestion: As shown in the figure, then angle COE = ()\nChoices:\nA:30\u00b0\nB:140\u00b0\nC:50\u00b0\nD:60\u00b0", ["4cb5a5234624686ecd8b85a1fab041f9"]]], ["assistant", "The angle COE is formed by the intersection of two lines, and it is equal to the sum of the angles at point E formed by the intersecting lines. Since the angle at point E is given as 50\u00b0, and the lines intersect at a right angle (90\u00b0), the angle COE is the sum of these two angles. Therefore, the correct answer to the question is:\n\nC: 50\u00b0"]], "offset": 0, "conv_id": "32159bf2426c45229202bd09a3b4e489", "model_name": "llava-original", "has_csam_image": false}, "ip": "123.127.249.92"}
|
3 |
+
{"tstamp": 1721959459.3784, "type": "chat", "model": "llava-fire", "gen_params": {"temperature": 0.7, "top_p": 1.0, "max_new_tokens": 1024}, "start": 1721959423.9259, "finish": 1721959459.3784, "state": {"template_name": "llava-fire", "system_message": "You are a helpful language and vision assistant. You are able to understand the visual content that the user provides, and assist the user with a variety of tasks using natural language.", "roles": ["user", "assistant"], "messages": [["user", ["Please directly answer the question and provide the correct option letter, e.g., A, B, C, D.\nQuestion: As shown in the figure, then angle COE = ()\nChoices:\nA:30\u00b0\nB:140\u00b0\nC:50\u00b0\nD:60\u00b0", ["4cb5a5234624686ecd8b85a1fab041f9"]]], ["assistant", "The angle COE is given as 50 degrees in the image. Therefore, the correct answer to the question is:\n\nC: 50\u00b0"]], "offset": 0, "conv_id": "11dde82ce02c4b36aa018359a4b73c01", "model_name": "llava-fire", "has_csam_image": false}, "ip": "123.127.249.92"}
|
4 |
+
{"tstamp": 1721959459.3843, "type": "chat", "model": "llava-original", "gen_params": {"temperature": 0.7, "top_p": 1.0, "max_new_tokens": 1024}, "start": 1721959441.1134, "finish": 1721959459.3843, "state": {"template_name": "llava-original", "system_message": "You are a helpful language and vision assistant. You are able to understand the visual content that the user provides, and assist the user with a variety of tasks using natural language.", "roles": ["user", "assistant"], "messages": [["user", ["Please directly answer the question and provide the correct option letter, e.g., A, B, C, D.\nQuestion: As shown in the figure, then angle COE = ()\nChoices:\nA:30\u00b0\nB:140\u00b0\nC:50\u00b0\nD:60\u00b0", ["4cb5a5234624686ecd8b85a1fab041f9"]]], ["assistant", "The angle COE is formed by the intersection of two lines, and it is equal to the sum of the angles at point E formed by the intersecting lines. Since the angle at point E is given as 50\u00b0, and the lines intersect at a right angle (90\u00b0), the angle COE is the sum of these two angles. Therefore, the correct answer to the question is:\n\nC: 50\u00b0"]], "offset": 0, "conv_id": "1e5b56eb93fb4e25827120018166ea2e", "model_name": "llava-original", "has_csam_image": false}, "ip": "123.127.249.92"}
|