Spaces:
Runtime error
Runtime error
Logan Zoellner
commited on
Commit
•
5cee86f
1
Parent(s):
e1634de
initial commit
Browse files
app.py
ADDED
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from asyncio import constants
|
2 |
+
import gradio as gr
|
3 |
+
import requests
|
4 |
+
import os
|
5 |
+
import re
|
6 |
+
import random
|
7 |
+
from words import *
|
8 |
+
|
9 |
+
basePrompt="""
|
10 |
+
The following session was recorded from a text adventure game.
|
11 |
+
----
|
12 |
+
"""
|
13 |
+
|
14 |
+
story="""
|
15 |
+
Computer: You approach the enterance of the dungeon.
|
16 |
+
"""
|
17 |
+
|
18 |
+
|
19 |
+
def fallbackResponse():
|
20 |
+
"You are attacked by a {monster}!".format(monster=random.choice(monsters))
|
21 |
+
|
22 |
+
def continue_story(prompt):
|
23 |
+
|
24 |
+
|
25 |
+
p=basePrompt+story+"player:"+prompt
|
26 |
+
|
27 |
+
print(f"*****Inside desc_generate - Prompt is :{p}")
|
28 |
+
json_ = {"inputs": p,
|
29 |
+
"parameters":
|
30 |
+
{
|
31 |
+
"top_p": 0.9,
|
32 |
+
"temperature": 1.1,
|
33 |
+
"max_new_tokens": 50,
|
34 |
+
"return_full_text": False,
|
35 |
+
}}
|
36 |
+
#response = requests.post(API_URL, headers=headers, json=json_)
|
37 |
+
response = requests.post(API_URL, json=json_)
|
38 |
+
output = response.json()
|
39 |
+
print(f"If there was an error? Reason is : {output}")
|
40 |
+
|
41 |
+
|
42 |
+
#error handling
|
43 |
+
if "error" in output:
|
44 |
+
print("using fallback description method!")
|
45 |
+
#fallback method
|
46 |
+
computerResponse=fallbackResponse()
|
47 |
+
else:
|
48 |
+
output_tmp = output[0]['generated_text']
|
49 |
+
#truncate response at first newline
|
50 |
+
if "\n" in output_tmp:
|
51 |
+
idx = output_tmp.find('\n')
|
52 |
+
output_tmp = output_tmp[:idx]
|
53 |
+
#check if response starts with "computer:", if not add it
|
54 |
+
if ":" not in output_tmp:
|
55 |
+
output_tmp = "computer:"+output_tmp
|
56 |
+
story=story+"player:"+prompt+"\n"+output_tmp+"\n"
|
57 |
+
|
58 |
+
|
59 |
+
demo = gr.Blocks()
|
60 |
+
|
61 |
+
with demo:
|
62 |
+
gr.Markdown("<h1><center>NPC Generator</center></h1>")
|
63 |
+
gr.Markdown(
|
64 |
+
"based on <a href=https://huggingface.co/spaces/Gradio-Blocks/GPTJ6B_Poetry_LatentDiff_Illustration> Gradio poetry generator</a>."
|
65 |
+
"<div>first input name, race and class (or generate them randomly)</div>"
|
66 |
+
"<div>Next, use GPT-J to generate a short description</div>"
|
67 |
+
"<div>Finally, Generate an illustration 🎨 provided by <a href=https://huggingface.co/spaces/multimodalart/latentdiffusion>Latent Diffusion model</a>.</div>"
|
68 |
+
)
|
69 |
+
|
70 |
+
with gr.Row():
|
71 |
+
b0 = gr.Button("Submit")
|
72 |
+
|
73 |
+
with gr.Row():
|
74 |
+
input_command = gr.Textbox(label="input",placeholder="look around")
|
75 |
+
|
76 |
+
with gr.Row():
|
77 |
+
story = gr.Textbox(label="description",lines=7)
|
78 |
+
|
79 |
+
b0.click(continue_story,inputs=[],outputs=[input_name,input_race,input_class,input_pronoun])
|
80 |
+
b1.click(npc_generate, inputs=[ input_name,input_race,input_class,input_pronoun], outputs=desc_txt)
|
81 |
+
b2.click(desc_to_image, desc_txt, output_image)
|
82 |
+
#examples=examples
|
83 |
+
|
84 |
+
demo.launch(enable_queue=True, debug=True)
|
words.py
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
monsters="""dragon
|
2 |
+
ogre
|
3 |
+
goblin
|
4 |
+
orc
|
5 |
+
troll
|
6 |
+
giant
|
7 |
+
vampire
|
8 |
+
zombie
|
9 |
+
skeleton
|
10 |
+
ghoul
|
11 |
+
werewolf""".split("\n")
|