from settings import * from typing import Iterator import gradio as gr from model import get_input_token_length, run def clear_and_save_textbox(message: str) -> tuple[str, str]: return '', message def display_input(message: str, history: list[tuple[str, str]]) -> list[tuple[str, str]]: history.append((message, '')) return history def delete_prev_fn( history: list[tuple[str, str]]) -> tuple[list[tuple[str, str]], str]: try: message, _ = history.pop() except IndexError: message = '' return history, message or '' def generate( message: str, history_with_input: list[tuple[str, str]], system_prompt: str, max_new_tokens: int, temperature: float, top_p: float, top_k: int, repeat_penalty: float, ) -> Iterator[list[tuple[str, str]]]: if max_new_tokens > MAX_MAX_NEW_TOKENS: raise ValueError history = history_with_input[:-1] generator = run(message, history, system_prompt, max_new_tokens, temperature, top_p, top_k, repeat_penalty) try: first_response = next(generator) yield history + [(message, first_response)] except StopIteration: yield history + [(message, '')] for response in generator: yield history + [(message, response)] def process_example(message: str) -> tuple[str, list[tuple[str, str]]]: generator = generate(message, [], DEFAULT_SYSTEM_PROMPT, 1024, 0.6, 0.9, 49, 1.0) for x in generator: pass return '', x def check_input_token_length(message: str, chat_history: list[tuple[str, str]], system_prompt: str) -> None: input_token_length = get_input_token_length(message, chat_history, system_prompt) if input_token_length > MAX_INPUT_TOKEN_LENGTH: raise gr.Error(f'The accumulated input is too long ({input_token_length} > {MAX_INPUT_TOKEN_LENGTH}). Clear your chat history and try again.') with gr.Blocks(css='style.css') as demo: gr.Markdown(DESCRIPTION) with gr.Group(): chatbot = gr.Chatbot(label='Chatbot').style(height=400) with gr.Row(): textbox = gr.Textbox( show_label=False, placeholder='Type a message...', ) submit_button = gr.Button('Submit', variant='primary') with gr.Row(): retry_button = gr.Button('🔄 Retry', variant='secondary') undo_button = gr.Button('↩ī¸ Undo', variant='secondary') clear_button = gr.Button('🗑ī¸ Clear', variant='secondary') saved_input = gr.State() with gr.Accordion(label='Advanced options', open=False): system_prompt = gr.Textbox(label='System prompt', value=DEFAULT_SYSTEM_PROMPT, lines=6, visible=False) max_new_tokens = gr.Slider( label='Max new tokens', minimum=1, maximum=MAX_MAX_NEW_TOKENS, step=1, value=DEFAULT_MAX_NEW_TOKENS, ) temperature = gr.Slider( label='Temperature', minimum=0.1, maximum=4.0, step=0.1, value=0.6, ) top_p = gr.Slider( label='Top-p (nucleus sampling)', minimum=0.05, maximum=1.0, step=0.05, value=0.9, ) top_k = gr.Slider( label='Top-k', minimum=1, maximum=1000, step=1, value=40, ) repeat_penalty = gr.Slider( label='Repetition-penalty', minimum=1.0, maximum=1.5, step=0.01, value=1.05, ) gr.Examples( examples=[ "A client runs a platform that stores and retrieves numerous files in different formats (documents, audio, visual, etc.) and requires a more efficient way of storing and retrieving these tiles. The plattorm is getting slower due to the rapidly increasing amount ot data. Currently, all the tiles are stored on a single server, but server space is a signiticant issue. The client's backend system is primarily built using PostgreSQL; this makes the problem complex because, while PostgreSQL is powerful, it is not designed to handle large file storage efficiently.", "A client runs a platform based on an esxi bare metal hypervisor on his own hardware. This includes various large MySQL databases, websites, email services and numerous files in different formats (documents, audio, visual, etc.). The client wants to migrate to a more serverless design and utilize benefits of cloud server as his hardware is getting old and harder to maintain. The MySQL databases are very large (over 1 Tib), also the amount of files (several million files, several Tib); This makes the problem complex, because the platform must not be down while migration.", "A client runs a CRM system, that is written in PHP5 and requires a migration to python. Unfortunately he has no manpower to to this. He researches ways to migrate his codes via ai or online services.", "A client runs a complex corporate structure. The tax authority found a mistake in his bookings that will result in back tax payments in the millions. This is not negotiable. Therefore the client wants to withdraw from the corporate structure, to continue his business model in a new one. Also, he wants to transfer all relevant assets to the new corporate structure to continue business. Although this is a legal procedure in principle, the asset shift easily creates the appearance of an offence, if badly timed. In order to avoid any trouble, the tax authority must not know about his plan until it is implemented. The assets must be transferred before the tax authority issues a legally binding notice. In order not to be associated with the impending bankruptcy of the old corporate structure, he will have to rename the old structure in time, and resign as a shareholder and director. The difficulty is to time the renaming of the old corporate structures, his resigning and the asset shift with the establishment and shifting of the new business operations.", ], inputs=textbox, outputs=[textbox, chatbot], fn=process_example, cache_examples=False, ) textbox.submit( fn=clear_and_save_textbox, inputs=textbox, outputs=[textbox, saved_input], queue=False, ).then( fn=display_input, inputs=[saved_input, chatbot], outputs=chatbot, ).then( fn=check_input_token_length, inputs=[saved_input, chatbot, system_prompt], ).success( fn=generate, inputs=[ saved_input, chatbot, system_prompt, max_new_tokens, temperature, top_p, top_k, repeat_penalty, ], outputs=chatbot, api_name="submit" ) button_event_preprocess = submit_button.click( fn=clear_and_save_textbox, inputs=textbox, outputs=[textbox, saved_input], queue=False, ).then( fn=display_input, inputs=[saved_input, chatbot], outputs=chatbot, ).then( fn=check_input_token_length, inputs=[saved_input, chatbot, system_prompt], ).success( fn=generate, inputs=[ saved_input, chatbot, system_prompt, max_new_tokens, temperature, top_p, top_k, repeat_penalty, ], outputs=chatbot, api_name="submit_pre", ) retry_button.click( fn=delete_prev_fn, inputs=chatbot, outputs=[chatbot, saved_input], queue=False, ).then( fn=display_input, inputs=[saved_input, chatbot], outputs=chatbot, ).then( fn=generate, inputs=[ saved_input, chatbot, system_prompt, max_new_tokens, temperature, top_p, top_k, repeat_penalty, ], outputs=chatbot, api_name="retry", ) undo_button.click( fn=delete_prev_fn, inputs=chatbot, outputs=[chatbot, saved_input], queue=False, ).then( fn=lambda x: x, inputs=[saved_input], outputs=textbox, ) clear_button.click( fn=lambda: ([], ''), outputs=[chatbot, saved_input], ) demo.queue(concurrency_count=1).launch(server_name="0.0.0.0") #demo.launch(server_name="0.0.0.0")