Spaces:
				
			
			
	
			
			
		Runtime error
		
	
	
	
			
			
	
	
	
	
		
		
		Runtime error
		
	Update app.py
Browse files
    	
        app.py
    CHANGED
    
    | @@ -179,7 +179,7 @@ def evaluate( | |
| 179 | 
             
            model_deplot = Pix2StructForConditionalGeneration.from_pretrained("google/deplot", torch_dtype=torch.bfloat16).to(0)
         | 
| 180 | 
             
            processor_deplot = Pix2StructProcessor.from_pretrained("google/deplot")
         | 
| 181 |  | 
| 182 | 
            -
            def process_document( | 
| 183 | 
             
                # image = Image.open(image)
         | 
| 184 | 
             
                inputs = processor_deplot(images=image, text="Generate the underlying data table for the figure below:", return_tensors="pt").to(0, torch.bfloat16)
         | 
| 185 | 
             
                predictions = model_deplot.generate(**inputs, max_new_tokens=512)
         | 
| @@ -198,14 +198,15 @@ article = "<p style='text-align: center'><a href='https://arxiv.org/abs/2212.105 | |
| 198 | 
             
            demo = gr.Interface(
         | 
| 199 | 
             
                fn=process_document,
         | 
| 200 | 
             
                inputs=[
         | 
|  | |
|  | |
| 201 | 
             
                    gr.Dropdown(
         | 
| 202 | 
             
                        ["alpaca-lora", "flan-ul2"], label="LLM", info="Will add more LLMs later!"
         | 
| 203 | 
             
                    ),
         | 
| 204 | 
             
                    gr.Dropdown(
         | 
| 205 | 
             
                        ["0-shot", "1-shot"], label="#shots", info="How many example tables in the prompt?"
         | 
| 206 | 
             
                    ),
         | 
| 207 | 
            -
             | 
| 208 | 
            -
                    "text"],
         | 
| 209 | 
             
                outputs=[
         | 
| 210 | 
             
                    gr.inputs.Textbox(
         | 
| 211 | 
             
                        lines=8,
         | 
| @@ -221,10 +222,12 @@ demo = gr.Interface( | |
| 221 | 
             
                article=article,
         | 
| 222 | 
             
                enable_queue=True,
         | 
| 223 | 
             
                examples=[["alpaca-lora", "1-shot", "deplot_case_study_m1.png", "What is the sum of numbers of Indonesia and Ireland? Remember to think step by step."],
         | 
| 224 | 
            -
                          ["alpaca-lora", " | 
| 225 | 
             
                          ["alpaca-lora", "1-shot", "deplot_case_study_3.png", "By how much did China's growth rate drop? Think step by step."],
         | 
| 226 | 
             
                          ["alpaca-lora", "1-shot", "deplot_case_study_4.png", "How many papers are submitted in 2020?"],
         | 
| 227 | 
            -
                          ["alpaca-lora", " | 
|  | |
|  | |
| 228 | 
             
                cache_examples=True)
         | 
| 229 |  | 
| 230 | 
             
            demo.launch(debug=True)
         | 
|  | |
| 179 | 
             
            model_deplot = Pix2StructForConditionalGeneration.from_pretrained("google/deplot", torch_dtype=torch.bfloat16).to(0)
         | 
| 180 | 
             
            processor_deplot = Pix2StructProcessor.from_pretrained("google/deplot")
         | 
| 181 |  | 
| 182 | 
            +
            def process_document(image, question, llm, num_shot):
         | 
| 183 | 
             
                # image = Image.open(image)
         | 
| 184 | 
             
                inputs = processor_deplot(images=image, text="Generate the underlying data table for the figure below:", return_tensors="pt").to(0, torch.bfloat16)
         | 
| 185 | 
             
                predictions = model_deplot.generate(**inputs, max_new_tokens=512)
         | 
|  | |
| 198 | 
             
            demo = gr.Interface(
         | 
| 199 | 
             
                fn=process_document,
         | 
| 200 | 
             
                inputs=[
         | 
| 201 | 
            +
                    "image",
         | 
| 202 | 
            +
                    "text",
         | 
| 203 | 
             
                    gr.Dropdown(
         | 
| 204 | 
             
                        ["alpaca-lora", "flan-ul2"], label="LLM", info="Will add more LLMs later!"
         | 
| 205 | 
             
                    ),
         | 
| 206 | 
             
                    gr.Dropdown(
         | 
| 207 | 
             
                        ["0-shot", "1-shot"], label="#shots", info="How many example tables in the prompt?"
         | 
| 208 | 
             
                    ),
         | 
| 209 | 
            +
                ],
         | 
|  | |
| 210 | 
             
                outputs=[
         | 
| 211 | 
             
                    gr.inputs.Textbox(
         | 
| 212 | 
             
                        lines=8,
         | 
|  | |
| 222 | 
             
                article=article,
         | 
| 223 | 
             
                enable_queue=True,
         | 
| 224 | 
             
                examples=[["alpaca-lora", "1-shot", "deplot_case_study_m1.png", "What is the sum of numbers of Indonesia and Ireland? Remember to think step by step."],
         | 
| 225 | 
            +
                          ["alpaca-lora", "0-shot", "deplot_case_study_m1.png", "Summarise the chart for me please."],
         | 
| 226 | 
             
                          ["alpaca-lora", "1-shot", "deplot_case_study_3.png", "By how much did China's growth rate drop? Think step by step."],
         | 
| 227 | 
             
                          ["alpaca-lora", "1-shot", "deplot_case_study_4.png", "How many papers are submitted in 2020?"],
         | 
| 228 | 
            +
                          ["alpaca-lora", "0-shot", "deplot_case_study_x2.png", "Summarise the chart for me please."],
         | 
| 229 | 
            +
                          ["flan-ul2", "0-shot", "deplot_case_study_4.png", "How many papers are submitted in 2020?"],
         | 
| 230 | 
            +
                          ["flan-ul2", "0-shot", "deplot_case_study_m1.png", "Summarise the chart for me please."],
         | 
| 231 | 
             
                cache_examples=True)
         | 
| 232 |  | 
| 233 | 
             
            demo.launch(debug=True)
         | 
 
			
