# Попробую писать коментарии на двух языках. # I'll try to write bilingual comments. #========= #========= # Library Import print("=========\nBegin import library\n") # default import random import gradio as gr from huggingface_hub import InferenceClient # added import os from google import genai print("\nEnd import library\n=========") #========= #========= # Backend Logic print("=========\nBegin definition Backend Logic\n") print("Create default API settings") gemini_API_key=os.getenv("GEMINI_API_KEY") client = genai.Client( api_key=gemini_API_key, http_options=genai.types.HttpOptions(api_version='v1alpha'), ) print("Set default model") used_model = "gemini-2.5-flash-preview-04-17" print("define class format_history") class format_history: @staticmethod def format_for_gemini(history): print("Format history") gemini_history = [] for message_to_check in history: if message_to_check.get("role") == "user" : gemini_history.append({ "role": "user", "parts": [{"text": message_to_check.get("content", "")}] }) elif message_to_check.get("role") == "assistant" : gemini_history.append({ "role": "model", "parts": [{"text": message_to_check.get("content", "")}] }) return gemini_history print("define class gemini api actions") class gemini_API_actions: apy_key = "" used_model = "" @staticmethod def set_used_model(new_model: str): used_model = new_model return @staticmethod def set_API_key(new_api_key: str): apy_key = new_api_key return @staticmethod def response_to_model(user_message, history): print(f"\n=========\nUser message\n{user_message}\n") formated_history = format_history.format_for_gemini(history) print(f"his: {formated_history}") print("Create chat") chat = client.chats.create(model=this.used_model, history=formated_history) print("Start response") response = chat.send_message(message["text"]) print(f"\nResponse\n{response}=========\n") return response.text print("Define response work body") def model_response(message, history): print(f"\n=========\nUser message\n{message}\n") formated_history = format_history.format_for_gemini(history) print(f"his: {formated_history}") print("Create chat") chat = client.chats.create(model=used_model, history=formated_history) try: print("Start response") response = chat.send_message(message["text"]) print(f"\nResponse\n{response}=========\n") return response.text except Exception as e: print(f"\n=== Error ===\n{str(e)}") return f"I apologize, but I encountered an error: {str(e)}" print("Define test1 response work") def random_response(message, history): return random.choice(["Yes", "No"]) print("\nEnd definition Backend Logic\n=========") # ========= # ========= # User Interface (UI) Definition print("=========\nBegin definition User Interface (UI)\n") with gr.Blocks(theme=gr.themes.Soft(primary_hue="teal", secondary_hue="slate", neutral_hue="neutral")) as demo: # Using Soft theme with adjusted hues for a refined look print("Create visitor badge") gr.HTML(""" """) print("Create API block") with gr.Accordion( "API", open=False, ): print("Create API key textbox row") with gr.Row(): print("Create API key textbox") Textbox_Block_API_key = gr.Textbox( label="API key", scale=4, ) print("Create API block button row") with gr.Row(): print("Create API apply button") Button_Block_Apply_API_key = gr.Button( value="Apply", scale=1, ) print("Create API reset button") Button_Block_Reset_API_key = gr.Button( value="Reset", scale=1, ) print("Create API state markdown") Markdown_Block_API_key_State = gr.Markdown("API key State: False") # Возможные варианты, используеться стандартный API ключь и используеться выбранный API ключь. # Possible options, standard API key is used and selected API key is used. print("Create provider dropdown") Dropdown_Block_Choose_provider = gr.Dropdown(label="Choose provider") # На данный момент времени единственным провайдером будет Google и Google API для доступа к Gemini. (19.04.2025) # At this point in time, the only provider will be Google and the Google API to access Gemini. (19.04.2025) print("Create provider state") Markdown_Block_Povider_State = gr.Markdown("Provider State: False") # Варианты используеться, выбранный провайдер и указание его наименования и провайдер не используеться, и причины. # The options are used, the selected provider and the name of the provider and the provider not used, and the reasons. print("Create main chat window") ChatIntarface_Block_Main_chat_window = gr.ChatInterface(model_response, multimodal=True, chatbot=gr.Chatbot( label="output", type="messages", ), type="messages", textbox=gr.MultimodalTextbox( label="input", max_plain_text_length=9999999, ), editable=True, title="Chat interface test", save_history=True, ) print("Create output token markdown") Markdown_Block_Output_token = gr.Markdown("Token in output: False") print("Create input token markdown") Markdown_Block_Input_token = gr.Markdown("Token in input: False") print("Create ssettings block") with gr.Accordion( "Settings", open=False, ): print("Create model dropdown") Dropdown_Block_Choose_model = gr.Dropdown(label="Choose model") # От Google будут использованы следующие модели: Gemini 2.5 Flash Preview 04-17 (10 зпаросов в минуту, 250.000 токенов в минуту, 500 запросов в день) (19.04.2025) # Gemini 2.5 Pro Experimental (5 зпаросов в минуту, 250.000 токенов в минуту, 25 запросов в день) (19.04.2025) # Gemini 2.0 Flash (15 зпаросов в минуту, 1.000.000 токенов в минуту, 1.500 запросов в день) (19.04.2025) # Gemini 2.0 Flash Experimental (10 зпаросов в минуту, 1.000.000 токенов в минуту, 1.500 запросов в день) (19.04.2025) # Gemini 2.0 Flash-Lite (30 зпаросов в минуту, 1.000.000 токенов в минуту, 1.500 запросов в день) (19.04.2025) # Gemini 1.5 Flash (15 зпаросов в минуту, 1.000.000 токенов в минуту, 1.500 запросов в день) (19.04.2025) # Gemini 1.5 Flash-8B (15 зпаросов в минуту, 1.000.000 токенов в минуту, 1.500 запросов в день) (19.04.2025) # Gemini 1.5 Pro (2 зпаросов в минуту, 32.000 токенов в минуту, 50 запросов в день) (19.04.2025) # Gemma 3 (30 зпаросов в минуту, 15.000 токенов в минуту, 14.400 запросов в день) (19.04.2025) # По умалчанию будет выбрана модель: Gemini 2.0 Flash Experimental (19.04.2025) # # The following models will be used from Google: Gemini 2.5 Flash Preview 04-17 (10 zparos per minute, 250,000 tokens per minute, 500 requests per day) (19.04.2025) # Gemini 2.5 Pro Experimental (5 drops per minute, 250,000 tokens per minute, 25 requests per day) (19.04.2025) # Gemini 2.0 Flash (15 drops per minute, 1,000,000 tokens per minute, 1,500 requests per day) (19.04.2025) # Gemini 2.0 Flash Experimental (10 drops per minute, 1,000,000 tokens per minute, 1,500 requests per day) (19.04.2025) # Gemini 2.0 Flash-Lite (30 drops per minute, 1,000,000 tokens per minute, 1,500 requests per day) (19.04.2025) # Gemini 1.5 Flash (15 drops per minute, 1,000,000 tokens per minute, 1,500 requests per day) (19.04.2025) # Gemini 1.5 Flash-8B (15 drops per minute, 1,000,000 tokens per minute, 1,500 requests per day) (19.04.2025) # Gemini 1.5 Pro (2 drops per minute, 32,000 tokens per minute, 50 requests per day) (19.04.2025) # Gemma 3 (30 zparos per minute, 15.000 tokens per minute, 14.400 requests per day) (19.04.2025) # Gemini 2.0 Flash Experimental will be chosen by default. (19.04.2025) # Translated with www.DeepL.com/Translator (free version) # Я даже не буду исправлять этот преревод. # I'm not even gonna fix this pre-translation # print("Create system instructions textbox") Textbox_Block_System_instructions = gr.Textbox(label="System instructions",) print("Create slider model temperature") Slier_Block_Model_Temperature = gr.Slider(label="temperature", interactive=True, minimum=0, maximum=2, value=0.95) print("Create slider model topP") Slier_Block_Model_topP = gr.Slider(label="topP", interactive=True, minimum=0, maximum=1, value=0.5) print("Create slider model topK") Slier_Block_Model_topK = gr.Slider(label="topK", interactive=True, value=100) print("Create checkbox output stream") Checkbox_Block_Output_Stream = gr.Checkbox( label="Enable output stream" ) print("Create checkbox Grounding with Google Search") Checkbox_Block_Google_Grounding_Search = gr.Checkbox( label="Grounding with Google Search" ) print("\nEnd definition User Interface (UI)\n=========") print("=========\nBegin launch demo\n") if __name__ == "__main__": demo.launch() print("\nEnd launch demo\n=========")