push fix
Browse files- app_paligemma.py +106 -3
    	
        app_paligemma.py
    CHANGED
    
    | @@ -1,10 +1,113 @@ | |
|  | |
| 1 | 
             
            import gradio as gr
         | 
|  | |
| 2 |  | 
| 3 | 
            -
             | 
| 4 | 
            -
             | 
|  | |
| 5 |  | 
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
| 6 |  | 
| 7 | 
            -
             | 
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
| 8 | 
             
            if hasattr(demo, 'fns'):
         | 
| 9 | 
             
                for fn in demo.fns.values():
         | 
| 10 | 
             
                    fn.api_name = False
         | 
|  | |
|  | 
|  | |
| 1 | 
            +
            from gradio_client import Client, handle_file
         | 
| 2 | 
             
            import gradio as gr
         | 
| 3 | 
            +
            import os
         | 
| 4 |  | 
| 5 | 
            +
            MODELS = {
         | 
| 6 | 
            +
                "Paligemma-10B": "akhaliq/paligemma2-10b-ft-docci-448"
         | 
| 7 | 
            +
            }
         | 
| 8 |  | 
| 9 | 
            +
            def create_chat_fn(client, system_prompt, temperature, max_tokens, top_k, rep_penalty, top_p):
         | 
| 10 | 
            +
                def chat(message, history):
         | 
| 11 | 
            +
                    text = message.get("text", "")
         | 
| 12 | 
            +
                    files = message.get("files", [])
         | 
| 13 | 
            +
                    processed_files = [handle_file(f) for f in files]
         | 
| 14 | 
            +
                    
         | 
| 15 | 
            +
                    response = client.predict(
         | 
| 16 | 
            +
                        message={"text": text, "files": processed_files},
         | 
| 17 | 
            +
                        system_prompt=system_prompt,
         | 
| 18 | 
            +
                        temperature=temperature,
         | 
| 19 | 
            +
                        max_new_tokens=max_tokens,
         | 
| 20 | 
            +
                        top_k=top_k,
         | 
| 21 | 
            +
                        repetition_penalty=rep_penalty,
         | 
| 22 | 
            +
                        top_p=top_p,
         | 
| 23 | 
            +
                        api_name="/chat"
         | 
| 24 | 
            +
                    )
         | 
| 25 | 
            +
                    return response
         | 
| 26 | 
            +
                return chat
         | 
| 27 |  | 
| 28 | 
            +
            def set_client_for_session(model_name, request: gr.Request):
         | 
| 29 | 
            +
                headers = {}
         | 
| 30 | 
            +
                if request and hasattr(request, 'headers'):
         | 
| 31 | 
            +
                    x_ip_token = request.headers.get('x-ip-token')
         | 
| 32 | 
            +
                    if x_ip_token:
         | 
| 33 | 
            +
                        headers["X-IP-Token"] = x_ip_token
         | 
| 34 | 
            +
                
         | 
| 35 | 
            +
                return Client(MODELS[model_name], headers=headers)
         | 
| 36 | 
            +
             | 
| 37 | 
            +
            def safe_chat_fn(message, history, client, system_prompt, temperature, 
         | 
| 38 | 
            +
                             max_tokens, top_k, rep_penalty, top_p):
         | 
| 39 | 
            +
                if client is None:
         | 
| 40 | 
            +
                    return "Error: Client not initialized. Please refresh the page."
         | 
| 41 | 
            +
                try:
         | 
| 42 | 
            +
                    return create_chat_fn(client, system_prompt, temperature, 
         | 
| 43 | 
            +
                                        max_tokens, top_k, rep_penalty, top_p)(message, history)
         | 
| 44 | 
            +
                except Exception as e:
         | 
| 45 | 
            +
                    print(f"Error during chat: {str(e)}")
         | 
| 46 | 
            +
                    return f"Error during chat: {str(e)}"
         | 
| 47 | 
            +
             | 
| 48 | 
            +
            with gr.Blocks() as demo:
         | 
| 49 | 
            +
                client = gr.State()
         | 
| 50 | 
            +
                
         | 
| 51 | 
            +
                with gr.Row():
         | 
| 52 | 
            +
                    model_dropdown = gr.Dropdown(
         | 
| 53 | 
            +
                        choices=list(MODELS.keys()),
         | 
| 54 | 
            +
                        value="Paligemma-10B",
         | 
| 55 | 
            +
                        label="Select Model",
         | 
| 56 | 
            +
                        interactive=True
         | 
| 57 | 
            +
                    )
         | 
| 58 | 
            +
                
         | 
| 59 | 
            +
                with gr.Accordion("Advanced Settings", open=False):
         | 
| 60 | 
            +
                    system_prompt = gr.Textbox(
         | 
| 61 | 
            +
                        value="You are a helpful AI assistant.",
         | 
| 62 | 
            +
                        label="System Prompt"
         | 
| 63 | 
            +
                    )
         | 
| 64 | 
            +
                    with gr.Row():
         | 
| 65 | 
            +
                        temperature = gr.Slider(
         | 
| 66 | 
            +
                            minimum=0.0, maximum=2.0, value=0.7,
         | 
| 67 | 
            +
                            label="Temperature"
         | 
| 68 | 
            +
                        )
         | 
| 69 | 
            +
                        top_p = gr.Slider(
         | 
| 70 | 
            +
                            minimum=0.0, maximum=1.0, value=0.95,
         | 
| 71 | 
            +
                            label="Top P"
         | 
| 72 | 
            +
                        )
         | 
| 73 | 
            +
                    with gr.Row():
         | 
| 74 | 
            +
                        top_k = gr.Slider(
         | 
| 75 | 
            +
                            minimum=1, maximum=100, value=40, step=1,
         | 
| 76 | 
            +
                            label="Top K"
         | 
| 77 | 
            +
                        )
         | 
| 78 | 
            +
                        rep_penalty = gr.Slider(
         | 
| 79 | 
            +
                            minimum=1.0, maximum=2.0, value=1.1,
         | 
| 80 | 
            +
                            label="Repetition Penalty"
         | 
| 81 | 
            +
                        )
         | 
| 82 | 
            +
                    max_tokens = gr.Slider(
         | 
| 83 | 
            +
                        minimum=64, maximum=4096, value=1024, step=64,
         | 
| 84 | 
            +
                        label="Max Tokens"
         | 
| 85 | 
            +
                    )
         | 
| 86 | 
            +
                
         | 
| 87 | 
            +
                chat_interface = gr.ChatInterface(
         | 
| 88 | 
            +
                    fn=safe_chat_fn,
         | 
| 89 | 
            +
                    additional_inputs=[client, system_prompt, temperature, 
         | 
| 90 | 
            +
                                     max_tokens, top_k, rep_penalty, top_p],
         | 
| 91 | 
            +
                    multimodal=True
         | 
| 92 | 
            +
                )
         | 
| 93 | 
            +
                
         | 
| 94 | 
            +
                # Add model change handler
         | 
| 95 | 
            +
                model_dropdown.change(
         | 
| 96 | 
            +
                    fn=set_client_for_session,
         | 
| 97 | 
            +
                    inputs=[model_dropdown],
         | 
| 98 | 
            +
                    outputs=[client]
         | 
| 99 | 
            +
                )
         | 
| 100 | 
            +
                
         | 
| 101 | 
            +
                # Initialize client on page load
         | 
| 102 | 
            +
                demo.load(
         | 
| 103 | 
            +
                    fn=set_client_for_session,
         | 
| 104 | 
            +
                    inputs=[gr.State("Paligemma-10B")],
         | 
| 105 | 
            +
                    outputs=[client]
         | 
| 106 | 
            +
                )
         | 
| 107 | 
            +
             | 
| 108 | 
            +
            # Move the API access check here, after demo is defined
         | 
| 109 | 
             
            if hasattr(demo, 'fns'):
         | 
| 110 | 
             
                for fn in demo.fns.values():
         | 
| 111 | 
             
                    fn.api_name = False
         | 
| 112 | 
            +
             | 
| 113 | 
            +
            demo.launch()
         | 
 
			
