File size: 3,565 Bytes
b461c2f
 
 
 
 
 
 
 
 
7a64a88
b461c2f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a6ace6d
b461c2f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a6ace6d
b461c2f
 
a6ace6d
b461c2f
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
import json
import requests
import gradio as gr
import random
import time
import os
import datetime
from datetime import datetime

#print('for update')

API_TOKEN = os.getenv("API_TOKEN")
DECODEM_TOKEN=os.getenv("DECODEM_TOKEN")


from huggingface_hub import InferenceApi
inference = InferenceApi("bigscience/bloom",token=API_TOKEN)

headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}
url_decodemprompts='https://us-central1-createinsightsproject.cloudfunctions.net/getdecodemprompts'

data={"prompt_type":'market_size',"decodem_token":DECODEM_TOKEN}
try:
    r = requests.post(url_decodemprompts, data=json.dumps(data), headers=headers)
except requests.exceptions.ReadTimeout as e:
    print(e)
#print(r.content)

prompt=str(r.content, 'UTF-8')
print(prompt)

def infer(prompt,
          max_length = 250,
          top_k = 0,
          num_beams = 0,
          no_repeat_ngram_size = 2,
          top_p = 0.9,
          seed=42,
          temperature=0.7,
          greedy_decoding = False,
          return_full_text = False):
    
    print(seed)
    top_k = None if top_k == 0 else top_k
    do_sample = False if num_beams > 0 else not greedy_decoding
    num_beams = None if (greedy_decoding or num_beams == 0) else num_beams
    no_repeat_ngram_size = None if num_beams is None else no_repeat_ngram_size
    top_p = None if num_beams else top_p
    early_stopping = None if num_beams is None else num_beams > 0

    params = {
        "max_new_tokens": max_length,
        "top_k": top_k,
        "top_p": top_p,
        "temperature": temperature,
        "do_sample": do_sample,
        "seed": seed,
        "early_stopping":early_stopping,
        "no_repeat_ngram_size":no_repeat_ngram_size,
        "num_beams":num_beams,
        "return_full_text":return_full_text
    }
    
    s = time.time()
    response = inference(prompt, params=params)
    #print(response)
    proc_time = time.time()-s
    #print(f"Processing time was {proc_time} seconds")
    return response

def getideas(text_inp):
  print(text_inp)
  print(datetime.today().strftime("%d-%m-%Y"))
  
  text = prompt+"\nInput:"+text_inp + "\nOutput:"
  resp = infer(text,seed=random.randint(0,100))

  generated_text=resp[0]['generated_text']
  result = generated_text.replace(text,'').strip()
  result = result.replace("Output:","")
  parts = result.split("###")
  topic = parts[0].strip()
  topic="\n".join(topic.split('\n')[:3])
  print(topic)
  return(topic)

with gr.Blocks() as demo:
    gr.Markdown("<h1><center>Market Sizing Framework for Your Business</center></h1>")
    gr.Markdown(
        """ChatGPT based Insights from <a href="https://www.decodem.ai">Decodem.ai</a> for businesses.\nWhile ChatGPT has multiple use cases we have evolved specific use cases/ templates for businesses \n\n This template provides ideas on how a business can size a market they are entering. Enter a business area to size and get the results. Use examples as a guide. We use a equally powerful AI model bigscience/bloom."""
        )
    textbox = gr.Textbox(placeholder="Enter market size focus for business here...", lines=1,label='Your business area')
    btn = gr.Button("Generate")    
    output1 = gr.Textbox(lines=2,label='Market Sizing Framework')

    btn.click(getideas,inputs=[textbox], outputs=[output1])
    examples = gr.Examples(examples=['ice cream parlor in London','HR saas for fintech','book shops in NYC','Starbucks cafe in Bangalore','organic vegetables via ecommerce','grocery delivery'],
                           inputs=[textbox])
    

demo.launch()