File size: 1,963 Bytes
c413da6
 
 
 
 
 
 
 
 
 
 
 
0f1b500
c413da6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
"""
Donut
Copyright (c) 2022-present NAVER Corp.
MIT License
"""
import argparse

import gradio as gr
import torch

from donut import DonutModel


def demo_process(input_img):
    global pretrained_model, task_prompt, task_name
    output = pretrained_model.inference(image=input_img, prompt=task_prompt)["predictions"][0]
    return output["text_sequence"].replace(" ", "") # temp

task_prompt = f"<s_kuzushiji>"
pretrained_model = DonutModel.from_pretrained("naver-clova-ix/donut-base-finetuned-kuzushiji")
pretrained_model.eval()

demo = gr.Interface(
    fn=demo_process,
    inputs= gr.inputs.Image(type="pil"),
    outputs="text",
    title=f"Donut 🍩 demonstration for Kuzushiji Decoding",
    description="""This is a toy example for decoding kuzushiji (old Japanese cursive characters, くずし字) documents with a single E2E model, `Document Understanding Transformer` (Donut 🍩, ECCV-22). This particular model is fine-tuned on <a href="http://codh.rois.ac.jp/char-shape/">Kuzushiji Dataset</a>. To use it, simply upload a kuzushiji document image or use one of the examples below and click `Submit`. Results will show up in a few seconds.<br>* Note that this demo is running on a small resource environment, `basic CPU plan`  (`2 vCPU, 16GiB RAM`).<br>* Demonstrations for other types of documents/tasks are available at https://github.com/clovaai/donut<br>
    * More details of Donut are available at <a href="https://arxiv.org/abs/2111.15664">Paper</a>, <a href="https://github.com/clovaai/donut">GitHub</a>, and <a href="https://huggingface.co/docs/transformers/model_doc/donut">Huggingface 🤗 Implementation Page</a>.<br>
    * Kuzushiji Dataset is from <a href="http://codh.rois.ac.jp/char-shape/">Dataset Link</a> (Reference: 『日本古典籍くずし字データセット』(国文研ほか所蔵/CODH加工)doi:10.20676/00000340).""",
    examples=[["sample1.jpg"], ["sample2.jpg"]],
    cache_examples=False,
)
demo.launch()