File size: 1,736 Bytes
4d5648e
 
06eb913
4d5648e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
06eb913
 
4d5648e
 
 
06eb913
4d5648e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
06eb913
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
import gradio as gr
from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("vinai/phobert-base")
# Define your models
models = {
    "Luc Bat": AutoModelForCausalLM.from_pretrained(
        "Libosa2707/vietnamese-poem-luc-bat-gpt2"
    ),
    "Bay Chu": AutoModelForCausalLM.from_pretrained(
        "Libosa2707/vietnamese-poem-bay-chu-gpt2"
    ),
    "Tam Chu": AutoModelForCausalLM.from_pretrained(
        "Libosa2707/vietnamese-poem-tam-chu-gpt2"
    ),
    "Nam Chu": AutoModelForCausalLM.from_pretrained(
        "Libosa2707/vietnamese-poem-nam-chu-gpt2"
    ),
}


def generate_poem(text, style):
    # Choose the model based on the selected style
    model = models[style]

    # Tokenize the input line
    input_ids = tokenizer.encode(text, return_tensors="pt")

    # Generate text
    output = model.generate(input_ids, max_length=100, do_sample=True, temperature=0.7)

    # Decode the output
    generated_text = tokenizer.decode(
        output[:, input_ids.shape[-1] :][0], skip_special_tokens=True
    )

    text = text + generated_text

    # Post-process the output
    text = text.replace("<unk>", "\n")
    pretty_text = ""
    for idx, line in enumerate(text.split("\n")):
        line = line.strip()
        if not line:
            continue
        line = line[0].upper() + line[1:]
        pretty_text += line + "\n"

    return pretty_text


gradio_interface = gr.Interface(
    fn=generate_poem,
    inputs=[
        gr.inputs.Textbox(lines=1, placeholder="First words of the poem"),
        gr.inputs.Dropdown(
            choices=["Luc Bat", "Bay Chu", "Tam Chu", "Nam Chu"], label="Style"
        ),
    ],
    outputs="text",
)
gradio_interface.launch()