File size: 9,142 Bytes
3878434 1570281 3878434 1570281 209c930 3878434 209c930 3878434 1570281 3878434 1570281 3878434 1570281 3878434 147e798 3878434 a26a4c4 3878434 1570281 3878434 a26a4c4 3878434 1570281 209c930 1570281 209c930 1570281 a26a4c4 115f611 147e798 a26a4c4 115f611 3878434 1570281 3878434 115f611 3878434 115f611 3878434 115f611 3878434 115f611 3878434 115f611 3878434 1570281 3878434 a26a4c4 1570281 209c930 1694c73 1570281 209c930 1570281 a26a4c4 3878434 aea7f9b 1570281 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 |
import os
import re
import base64
from typing import Dict, List, Optional, Tuple
import gradio as gr
import sambanova_gradio
import modelscope_studio.components.base as ms
import modelscope_studio.components.legacy as legacy
import modelscope_studio.components.antd as antd
from config import DEMO_LIST, SystemPrompt
YOUR_API_TOKEN = os.getenv('SAMBANOVA_API_KEY')
History = List[Tuple[str, str]]
Messages = List[Dict[str, str]]
def history_to_messages(history: History, system: str) -> Messages:
messages = [{'role': 'system', 'content': system}]
for h in history:
messages.append({'role': 'user', 'content': h[0]})
messages.append({'role': 'assistant', 'content': h[1]})
return messages
def messages_to_history(messages: Messages) -> History:
assert messages[0]['role'] == 'system'
history = []
for q, r in zip(messages[1::2], messages[2::2]):
history.append([q['content'], r['content']])
return history
def remove_code_block(text):
pattern = r'```html\n(.+?)\n```'
match = re.search(pattern, text, re.DOTALL)
if match:
return match.group(1).strip()
else:
return text.strip()
def history_render(history: History):
return gr.update(open=True), history
def clear_history():
return []
def send_to_sandbox(code):
encoded_html = base64.b64encode(code.encode('utf-8')).decode('utf-8')
data_uri = f"data:text/html;charset=utf-8;base64,{encoded_html}"
return f"<iframe src=\"{data_uri}\" width=\"100%\" height=\"920px\"></iframe>"
def demo_card_click(e: gr.EventData):
index = e._data['component']['index']
return DEMO_LIST[index]['description']
with gr.Blocks(css_paths="app.css") as demo:
history_state = gr.State([])
setting = gr.State({
"system": SystemPrompt,
})
with ms.Application() as app:
with antd.ConfigProvider():
with antd.Row(gutter=[32, 12]) as layout:
with antd.Col(span=24, md=8):
with antd.Flex(vertical=True, gap="middle", wrap=True):
header = gr.HTML("""
<div style="text-align: center;">
<!-- Container for GIFs -->
<div style="display: flex; justify-content: center; gap: 20px; margin-bottom: 20px;">
<!-- First GIF -->
<img src="https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcSqrdNhqy-xaE9EXRGQdiHF4Fu0aPRWnFvjJA&s" width="200px" />
<!-- Second GIF -->
<img src="//img.alicdn.com/imgextra/i2/O1CN01KDhOma1DUo8oa7OIU_!!6000000000220-1-tps-240-240.gif" width="200px" />
</div>
<!-- Header Text -->
<h1>Qwen2.5-Coder-32B with SambaNova Cloud API</h1>
</div>
""")
input = antd.InputTextarea(
size="large", allow_clear=True, placeholder="Please enter what kind of application you want")
btn = antd.Button("send", type="primary", size="large")
clear_btn = antd.Button("clear history", type="default", size="large")
antd.Divider("examples")
with antd.Flex(gap="small", wrap=True):
with ms.Each(DEMO_LIST):
with antd.Card(hoverable=True, as_item="card") as demoCard:
antd.CardMeta()
demoCard.click(demo_card_click, outputs=[input])
antd.Divider("setting")
with antd.Flex(gap="small", wrap=True):
settingPromptBtn = antd.Button(
"⚙️ set system Prompt", type="default")
codeBtn = antd.Button("🧑💻 view code", type="default")
historyBtn = antd.Button("📜 history", type="default")
with antd.Modal(open=False, title="set system Prompt", width="800px") as system_prompt_modal:
systemPromptInput = antd.InputTextarea(
SystemPrompt, auto_size=True)
settingPromptBtn.click(lambda: gr.update(
open=True), inputs=[], outputs=[system_prompt_modal])
system_prompt_modal.ok(lambda input: ({"system": input}, gr.update(
open=False)), inputs=[systemPromptInput], outputs=[setting, system_prompt_modal])
system_prompt_modal.cancel(lambda: gr.update(
open=False), outputs=[system_prompt_modal])
with antd.Drawer(open=False, title="code", placement="left", width="750px") as code_drawer:
code_output = legacy.Markdown()
codeBtn.click(lambda: gr.update(open=True),
inputs=[], outputs=[code_drawer])
code_drawer.close(lambda: gr.update(
open=False), inputs=[], outputs=[code_drawer])
with antd.Drawer(open=False, title="history", placement="left", width="900px") as history_drawer:
history_output = legacy.Chatbot(show_label=False, flushing=False, height=960, elem_classes="history_chatbot")
historyBtn.click(history_render, inputs=[history_state], outputs=[history_drawer, history_output])
history_drawer.close(lambda: gr.update(
open=False), inputs=[], outputs=[history_drawer])
with antd.Col(span=24, md=16):
with ms.Div(elem_classes="right_panel"):
gr.HTML('<div class="render_header"><span class="header_btn"></span><span class="header_btn"></span><span class="header_btn"></span></div>')
with antd.Tabs(active_key="empty", render_tab_bar="() => null") as state_tab:
with antd.Tabs.Item(key="empty"):
empty = antd.Empty(description="empty input", elem_classes="right_content")
with antd.Tabs.Item(key="loading"):
loading = antd.Spin(True, tip="coding...", size="large", elem_classes="right_content")
with antd.Tabs.Item(key="render"):
sandbox = gr.HTML(elem_classes="html_content")
def generation_code(query: Optional[str], _setting: Dict[str, str], _history: Optional[History]):
if query is None:
query = ''
if _history is None:
_history = []
# Prepare the preprocess and postprocess functions
def preprocess(message, history):
messages = [{'role': 'system', 'content': _setting['system']}]
for user_msg, assistant_msg in history:
messages.append({'role': 'user', 'content': user_msg})
messages.append({'role': 'assistant', 'content': assistant_msg})
messages.append({'role': 'user', 'content': message})
return {'messages': messages}
def postprocess(response_text):
return response_text
# Get the model from sambanova_gradio
fn = sambanova_gradio.get_fn(
model_name='Qwen2.5-Coder-32B-Instruct',
preprocess=preprocess,
postprocess=postprocess,
api_key=YOUR_API_TOKEN,
base_url="https://fast-cloud-snova-ai-dev-0-api.cloud.snova.ai/v1"
)
response_text = ''
assistant_response = ''
local_history = _history.copy()
for content in fn(query, local_history):
response_text = content
# Update code_output
yield {
code_output: response_text,
state_tab: gr.update(active_key="loading"),
code_drawer: gr.update(open=True),
}
assistant_response = response_text
local_history.append([query, assistant_response])
code = remove_code_block(assistant_response)
yield {
code_output: assistant_response,
history_state: local_history,
sandbox: send_to_sandbox(code),
state_tab: gr.update(active_key="render"),
code_drawer: gr.update(open=False),
}
btn.click(generation_code,
inputs=[input, setting, history_state],
outputs=[code_output, history_state, sandbox, state_tab, code_drawer])
clear_btn.click(clear_history, inputs=[], outputs=[history_state])
if __name__ == "__main__":
demo.queue(default_concurrency_limit=20).launch(ssr_mode=False)
|