File size: 2,569 Bytes
ae38b35
743877a
ae38b35
8b6af5b
743877a
 
 
 
 
ae38b35
 
8d0121d
 
ae38b35
 
 
8d0121d
505d57d
 
 
8d0121d
505d57d
 
743877a
 
 
 
8d0121d
505d57d
 
 
 
 
 
 
 
 
 
 
 
ae38b35
 
743877a
ae38b35
 
8d0121d
 
 
 
8b6af5b
 
ae38b35
 
 
 
 
 
 
505d57d
ae38b35
 
8b6af5b
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
from transformers import pipeline, set_seed
from transformers import MarianMTModel, MarianTokenizer
import gradio as grad, random, re

# Inisialisasi model terjemahan
model_name = "Helsinki-NLP/opus-mt-id-en"
tokenizer = MarianTokenizer.from_pretrained(model_name)
model = MarianMTModel.from_pretrained(model_name)


gpt2_pipe = pipeline('text-generation', model='Gustavosta/MagicPrompt-Stable-Diffusion', tokenizer='gpt2')
with open("ideas.txt", "r") as f:
    line = f.readlines()


def generate(starting_text):
    seed = random.randint(100, 1000000)
    set_seed(seed)

    if starting_text == "":
        starting_text: str = line[random.randrange(0, len(line))].replace("\n", "").lower().capitalize()
        starting_text: str = re.sub(r"[,:\-–.!;?_]", '', starting_text)

    inputs = tokenizer(starting_text, return_tensors="pt", padding=True, truncation=True)
    outputs = model.generate(**inputs)
    starting_text = tokenizer.decode(outputs[0], skip_special_tokens=True)

    response = gpt2_pipe(starting_text, max_length=(len(starting_text) + random.randint(60, 90)), num_return_sequences=4)
    response_list = []
    for x in response:
        resp = x['generated_text'].strip()
        if resp != starting_text and len(resp) > (len(starting_text) + 4) and resp.endswith((":", "-", "—")) is False:
            response_list.append(resp+'\n')

    response_end = "\n".join(response_list)
    response_end = re.sub('[^ ]+\.[^ ]+','', response_end)
    response_end = response_end.replace("<", "").replace(">", "")

    if response_end != "":
        return response_end


txt = grad.Textbox(lines=1, label="Initial Text", placeholder="Masukkan Input Bahasa Indonesia")
out = grad.Textbox(lines=4, label="Generated Prompts")

examples = []
for x in range(8):
    examples.append(line[random.randrange(0, len(line))].replace("\n", "").lower().capitalize())

title = "Stable Diffusion Prompt Generator"
description = 'This is a demo of the model series: "MagicPrompt", in this case, aimed at: "Stable Diffusion". To use it, simply submit your text or click on one of the examples. To learn more about the model, [click here](https://huggingface.co/Gustavosta/MagicPrompt-Stable-Diffusion).<br>'

grad.Interface(fn=generate,
               inputs=txt,
               outputs=out,
               examples=examples,
               title=title,
               description=description,
               article='',
               allow_flagging='never',
               cache_examples=False,
               theme="default").launch(enable_queue=True, debug=True)