testspace / app.py
doshan1250's picture
Update desc
afdb77b
import gradio as gr
import torch
from PIL import Image
from donut import DonutModel
def demo_process(input_img):
global pretrained_model, task_prompt, task_name
# input_img = Image.fromarray(input_img)
output = pretrained_model.inference(image=input_img, prompt=task_prompt)["predictions"][0]
return output
task_name = "preparedFinetuneData"
# task_name = "cord-v2"
task_prompt = f"<s_{task_name}>"
image = Image.open("sample_receipt1.png")
image.save("sample_receipt1.png")
image = Image.open("sample_receipt2.png")
image.save("sample_receipt2.png")
PATH = 'epochs30_base_on_donut_base/'
# pretrained_model = DonutModel.from_pretrained(PATH, local_files_only=True)
# pretrained_model = DonutModel.from_pretrained("doshan1250/p9OcrAiV1", revision="main")
pretrained_model = DonutModel.from_pretrained("doshan1250/p9OcrAiV1")
# pretrained_model = DonutModel.from_pretrained("naver-clova-ix/donut-base-finetuned-cord-v2")
pretrained_model.eval()
demo = gr.Interface(
fn=demo_process,
inputs= gr.Image(type="pil"),
outputs="json",
title=f"Goodarc p9 Donut for `{task_name}` task, epochs30",
description="""Goodarc p9 使用 100 個英文收據訓練. <br> 訓練格式
{
"company": "SYARIKAT PERNIAGAAN GIN KEE",
"date": "02/12/2017",
"address": "NO 290, JALAN AIR PANAS. SETAPAK. 53200, KUALA LUMPUR.",
"total": "7.42"
}""",
examples=[["sample_receipt1.png"], ["sample_receipt2.png"]],
cache_examples=False,
)
demo.launch()