Spaces:
Runtime error
Runtime error
Logan Zoellner
commited on
Commit
Β·
4c68c58
1
Parent(s):
9ef9a8a
copy app from https://huggingface.co/spaces/Gradio-Blocks/GPTJ6B_Poetry_LatentDiff_Illustration
Browse files
app.py
ADDED
@@ -0,0 +1,82 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
import requests
|
3 |
+
import os
|
4 |
+
|
5 |
+
# GPT-J-6B API
|
6 |
+
API_URL = "https://api-inference.huggingface.co/models/EleutherAI/gpt-j-6B"
|
7 |
+
HF_TOKEN = os.environ["HF_TOKEN"]
|
8 |
+
headers = {"Authorization": f"Bearer {HF_TOKEN}"}
|
9 |
+
|
10 |
+
prompt = """
|
11 |
+
Bilbo is a hobbit rogue who wears a brown cloak and carries a ring.
|
12 |
+
|
13 |
+
Bremen is a human wizard, he wears a blue robe and carries a wand.
|
14 |
+
"""
|
15 |
+
|
16 |
+
examples = [["river"], ["night"], ["trees"],["table"],["laughs"]]
|
17 |
+
|
18 |
+
|
19 |
+
def npc_generate(name,race,characterClass):
|
20 |
+
|
21 |
+
p = prompt + "\n{name} is a {race} {characterClass}".format(name=name,race=race,characterClass=characterClass)
|
22 |
+
print(f"*****Inside poem_generate - Prompt is :{p}")
|
23 |
+
json_ = {"inputs": p,
|
24 |
+
"parameters":
|
25 |
+
{
|
26 |
+
"top_p": 0.9,
|
27 |
+
"temperature": 1.1,
|
28 |
+
"max_new_tokens": 50,
|
29 |
+
"return_full_text": False
|
30 |
+
}}
|
31 |
+
response = requests.post(API_URL, headers=headers, json=json_)
|
32 |
+
output = response.json()
|
33 |
+
print(f"If there was an error? Reason is : {output}")
|
34 |
+
output_tmp = output[0]['generated_text']
|
35 |
+
print(f"GPTJ response without splits is: {output_tmp}")
|
36 |
+
#poem = output[0]['generated_text'].split("\n\n")[0] # +"."
|
37 |
+
if "\n\n" not in output_tmp:
|
38 |
+
if output_tmp.find('.') != -1:
|
39 |
+
idx = output_tmp.find('.')
|
40 |
+
poem = output_tmp[:idx+1]
|
41 |
+
else:
|
42 |
+
idx = output_tmp.rfind('\n')
|
43 |
+
poem = output_tmp[:idx]
|
44 |
+
else:
|
45 |
+
poem = output_tmp.split("\n\n")[0] # +"."
|
46 |
+
poem = poem.replace('?','')
|
47 |
+
print(f"Poem being returned is: {poem}")
|
48 |
+
return poem
|
49 |
+
|
50 |
+
def poem_to_image(poem):
|
51 |
+
print("*****Inside Poem_to_image")
|
52 |
+
poem = " ".join(poem.split('\n'))
|
53 |
+
poem = poem + " oil on canvas."
|
54 |
+
steps, width, height, images, diversity = '50','256','256','1',15
|
55 |
+
img = gr.Interface.load("spaces/multimodalart/latentdiffusion")(poem, steps, width, height, images, diversity)[0]
|
56 |
+
return img
|
57 |
+
|
58 |
+
demo = gr.Blocks()
|
59 |
+
|
60 |
+
with demo:
|
61 |
+
gr.Markdown("<h1><center>Generate Short Poem along with an Illustration</center></h1>")
|
62 |
+
gr.Markdown(
|
63 |
+
"<div>Enter a single word you would want GPTJ-6B to write Poetry π€ on.</div>"
|
64 |
+
"<div>Generate an illustration π¨ provided by Latent Diffusion model.</div><div>GPJ-6B is a 6 Billion parameter autoregressive language model. It generates the Poem based on how it has been 'prompt-engineered' π€ The complete text of generated poem then goes in as a prompt to the amazing Latent Diffusion Art space by <a href='https://huggingface.co/spaces/multimodalart/latentdiffusion' target='_blank'>Multimodalart</a>.</div>Please note that some of the Poems/Illustrations might not look at par, and well, this is what happens when you can't 'cherry-pick' and post π <div> Some of the example words that you can use are 'river', 'night', 'trees', 'table', 'laughs' or maybe on similar lines to get best results!"
|
65 |
+
)
|
66 |
+
with gr.Row():
|
67 |
+
|
68 |
+
input_name = gr.Textbox(placeholder="Drizzt")
|
69 |
+
input_race = gr.Textbox(placeholder="dark elf")
|
70 |
+
input_class = gr.Textbox(placeholder="ranger")
|
71 |
+
|
72 |
+
poem_txt = gr.Textbox(lines=7)
|
73 |
+
output_image = gr.Image(type="filepath", shape=(256,256))
|
74 |
+
|
75 |
+
b1 = gr.Button("Generate NPC")
|
76 |
+
b2 = gr.Button("Generate Image")
|
77 |
+
|
78 |
+
b1.click(npc_generate, inputs=[ input_name,input_race,input_class], outputs=poem_txt)
|
79 |
+
b2.click(poem_to_image, poem_txt, output_image)
|
80 |
+
#examples=examples
|
81 |
+
|
82 |
+
demo.launch(enable_queue=True, debug=True)
|