decodemai commited on
Commit
b461c2f
1 Parent(s): ea2aae3

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +100 -0
app.py ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import requests
3
+ import gradio as gr
4
+ import random
5
+ import time
6
+ import os
7
+ import datetime
8
+ from datetime import datetime
9
+
10
+
11
+ API_TOKEN = os.getenv("API_TOKEN")
12
+ DECODEM_TOKEN=os.getenv("DECODEM_TOKEN")
13
+
14
+
15
+ from huggingface_hub import InferenceApi
16
+ inference = InferenceApi("bigscience/bloom",token=API_TOKEN)
17
+
18
+ headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}
19
+ url_decodemprompts='https://us-central1-createinsightsproject.cloudfunctions.net/getdecodemprompts'
20
+
21
+ data={"prompt_type":'market_size',"decodem_token":DECODEM_TOKEN}
22
+ try:
23
+ r = requests.post(url_decodemprompts, data=json.dumps(data), headers=headers)
24
+ except requests.exceptions.ReadTimeout as e:
25
+ print(e)
26
+ #print(r.content)
27
+
28
+ prompt=str(r.content, 'UTF-8')
29
+
30
+
31
+ def infer(prompt,
32
+ max_length = 250,
33
+ top_k = 0,
34
+ num_beams = 0,
35
+ no_repeat_ngram_size = 2,
36
+ top_p = 0.9,
37
+ seed=42,
38
+ temperature=0.7,
39
+ greedy_decoding = False,
40
+ return_full_text = False):
41
+
42
+ print(seed)
43
+ top_k = None if top_k == 0 else top_k
44
+ do_sample = False if num_beams > 0 else not greedy_decoding
45
+ num_beams = None if (greedy_decoding or num_beams == 0) else num_beams
46
+ no_repeat_ngram_size = None if num_beams is None else no_repeat_ngram_size
47
+ top_p = None if num_beams else top_p
48
+ early_stopping = None if num_beams is None else num_beams > 0
49
+
50
+ params = {
51
+ "max_new_tokens": max_length,
52
+ "top_k": top_k,
53
+ "top_p": top_p,
54
+ "temperature": temperature,
55
+ "do_sample": do_sample,
56
+ "seed": seed,
57
+ "early_stopping":early_stopping,
58
+ "no_repeat_ngram_size":no_repeat_ngram_size,
59
+ "num_beams":num_beams,
60
+ "return_full_text":return_full_text
61
+ }
62
+
63
+ s = time.time()
64
+ response = inference(prompt, params=params)
65
+ #print(response)
66
+ proc_time = time.time()-s
67
+ #print(f"Processing time was {proc_time} seconds")
68
+ return response
69
+
70
+ def getideas(text_inp):
71
+ print(text_inp)
72
+ print(datetime.today().strftime("%d-%m-%Y"))
73
+
74
+ text = prompt+"\nInput:"+text_inp + "\nOutput:"
75
+ resp = infer(text,seed=random.randint(0,100))
76
+
77
+ generated_text=resp[0]['generated_text']
78
+ result = generated_text.replace(text,'').strip()
79
+ result = result.replace("Output:","")
80
+ parts = result.split("###")
81
+ topic = parts[0].strip()
82
+ topic="\n".join(topic.split('\n')[:3])
83
+ print(topic)
84
+ return(topic)
85
+
86
+ with gr.Blocks() as demo:
87
+ gr.Markdown("<h1><center>Market Sizing Framework for Your Business</center></h1>")
88
+ gr.Markdown(
89
+ """ChatGPT based Insights from <a href="https://www.decodem.ai">Decodem.ai</a> for businesses.\nWhile ChatGPT has multiple use cases we have evolved specific use cases/ templates for businesses \n\n This template provides ideas on how a business can size a market they are entering. Enter a business area to size and get the results. Use examples as a guide. We use a equally powerful AI model bigscience/bloom."""
90
+ )
91
+ textbox = gr.Textbox(placeholder="Enter market size focus for business here...", lines=1,label='Your business area')
92
+ btn = gr.Button("Generate")
93
+ output1 = gr.Textbox(lines=2,label='The future')
94
+
95
+ btn.click(getideas,inputs=[textbox], outputs=[output1])
96
+ examples = gr.Examples(examples=['icecream parlor in London','HR saas for fintech','book shops in NYC','Starbucks cafe in Bangalore','organic vegetables via ecommerce','grocery delivery'],
97
+ inputs=[textbox])
98
+
99
+
100
+ demo.launch()