import gradio as gr from lmdeploy.serve.gradio.app import * async def chat_stream_test( instruction: str, state_chatbot: Sequence, cancel_btn: gr.Button, reset_btn: gr.Button, request: gr.Request, ): """Chat with AI assistant. Args: instruction (str): user's prompt state_chatbot (Sequence): the chatting history request (gr.Request): the request from a user """ yield (state_chatbot, state_chatbot, disable_btn, disable_btn, '') session_id = threading.current_thread().ident if request is not None: session_id = int(request.kwargs['client']['host'].replace('.', '')) print(request.kwargs['client']) else: print('Warning, could not get a request ip') print(f'session_id {session_id}') bot_summarized_response = '' state_chatbot = state_chatbot + [(instruction, None)] yield (state_chatbot, state_chatbot, disable_btn, enable_btn, f'{bot_summarized_response}'.strip()) yield (state_chatbot, state_chatbot, disable_btn, enable_btn, f'{bot_summarized_response}'.strip()) with gr.Blocks(css=CSS, theme=THEME) as demo: state_chatbot = gr.State([]) with gr.Column(elem_id='container'): gr.Markdown('## LMDeploy Playground') chatbot = gr.Chatbot( elem_id='chatbot', label="test") instruction_txtbox = gr.Textbox( placeholder='Please input the instruction', label='Instruction') with gr.Row(): cancel_btn = gr.Button(value='Cancel', interactive=False) reset_btn = gr.Button(value='Reset') send_event = instruction_txtbox.submit( chat_stream_test, [instruction_txtbox, state_chatbot, cancel_btn, reset_btn], [state_chatbot, chatbot, cancel_btn, reset_btn]) instruction_txtbox.submit( lambda: gr.Textbox.update(value=''), [], [instruction_txtbox], ) cancel_btn.click(cancel_local_func, [state_chatbot, cancel_btn, reset_btn], [state_chatbot, cancel_btn, reset_btn], cancels=[send_event]) reset_btn.click(reset_local_func, [instruction_txtbox, state_chatbot], [state_chatbot, chatbot, instruction_txtbox], cancels=[send_event]) # print(f'server is gonna mount on: http://{server_name}:{server_port}') demo.queue(concurrency_count=4, max_size=100).launch()