awacke1 commited on
Commit
27027e2
1 Parent(s): 2c65a04

Create new file

Browse files
Files changed (1) hide show
  1. app.py +106 -0
app.py ADDED
@@ -0,0 +1,106 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import requests
3
+
4
+ # GPT-J-6B API
5
+ API_URL = "https://api-inference.huggingface.co/models/EleutherAI/gpt-j-6B"
6
+ headers = {"Authorization": "Bearer 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"}
7
+ prompt = """Oh, my life
8
+ is changing every day
9
+ Every possible way
10
+ And oh, my dreams,
11
+ it's never quite as it seems
12
+ Never quite as it seems"""
13
+
14
+ examples = [["mind"], ["memory"], ["sleep"],["wellness"],["nutrition"]]
15
+
16
+
17
+ def poem2_generate(word):
18
+ p = word.lower() + "\n" + "poem using word: "
19
+ print(f"*****Inside poem_generate - Prompt is :{p}")
20
+ json_ = {"inputs": p,
21
+ "parameters":
22
+ {
23
+ "top_p": 0.9,
24
+ "temperature": 1.1,
25
+ "max_new_tokens": 50,
26
+ "return_full_text": False
27
+ }}
28
+ response = requests.post(API_URL, headers=headers, json=json_)
29
+ output = response.json()
30
+ print(f"If there was an error? Reason is : {output}")
31
+ output_tmp = output[0]['generated_text']
32
+ print(f"GPTJ response without splits is: {output_tmp}")
33
+ #poem = output[0]['generated_text'].split("\n\n")[0] # +"."
34
+ if "\n\n" not in output_tmp:
35
+ if output_tmp.find('.') != -1:
36
+ idx = output_tmp.find('.')
37
+ poem = output_tmp[:idx+1]
38
+ else:
39
+ idx = output_tmp.rfind('\n')
40
+ poem = output_tmp[:idx]
41
+ else:
42
+ poem = output_tmp.split("\n\n")[0] # +"."
43
+ poem = poem.replace('?','')
44
+ print(f"Poem being returned is: {poem}")
45
+ return poem
46
+
47
+
48
+ def poem_generate(word):
49
+
50
+ p = prompt + word.lower() + "\n" + "poem using word: "
51
+ print(f"*****Inside poem_generate - Prompt is :{p}")
52
+ json_ = {"inputs": p,
53
+ "parameters":
54
+ {
55
+ "top_p": 0.9,
56
+ "temperature": 1.1,
57
+ "max_new_tokens": 50,
58
+ "return_full_text": False
59
+ }}
60
+ response = requests.post(API_URL, headers=headers, json=json_)
61
+ output = response.json()
62
+ print(f"If there was an error? Reason is : {output}")
63
+ output_tmp = output[0]['generated_text']
64
+ print(f"GPTJ response without splits is: {output_tmp}")
65
+ #poem = output[0]['generated_text'].split("\n\n")[0] # +"."
66
+ if "\n\n" not in output_tmp:
67
+ if output_tmp.find('.') != -1:
68
+ idx = output_tmp.find('.')
69
+ poem = output_tmp[:idx+1]
70
+ else:
71
+ idx = output_tmp.rfind('\n')
72
+ poem = output_tmp[:idx]
73
+ else:
74
+ poem = output_tmp.split("\n\n")[0] # +"."
75
+ poem = poem.replace('?','')
76
+ print(f"Poem being returned is: {poem}")
77
+ return poem
78
+
79
+ def poem_to_image(poem):
80
+ print("*****Inside Poem_to_image")
81
+ poem = " ".join(poem.split('\n'))
82
+ poem = poem + " oil on canvas."
83
+ steps, width, height, images, diversity = '50','256','256','1',15
84
+ img = gr.Interface.load("spaces/multimodalart/latentdiffusion")(poem, steps, width, height, images, diversity)[0]
85
+ return img
86
+
87
+ demo = gr.Blocks()
88
+
89
+ with demo:
90
+ gr.Markdown("<h1><center>Few Shot Learning for Text - Word Image Search</center></h1>")
91
+ gr.Markdown(
92
+ "https://huggingface.co/blog/few-shot-learning-gpt-neo-and-inference-api, https://github.com/EleutherAI/the-pile"
93
+ )
94
+ with gr.Row():
95
+ input_word = gr.Textbox(lines=7, value=prompt)
96
+ poem_txt = gr.Textbox(lines=7)
97
+ output_image = gr.Image(type="filepath", shape=(256,256))
98
+
99
+ b1 = gr.Button("Generate Text")
100
+ b2 = gr.Button("Generate Image")
101
+
102
+ b1.click(poem2_generate, input_word, poem_txt)
103
+ b2.click(poem_to_image, poem_txt, output_image)
104
+ examples=[["living, loving, feeling good"], ["I want to live. I want to give."],["Ive been to Hollywood. Ive been to Redwood"]]
105
+
106
+ demo.launch(enable_queue=True, debug=True)