File size: 2,508 Bytes
bf80129
 
 
 
4b227cd
a3e8af4
 
 
bf80129
e98ee39
bf80129
 
 
590eb29
 
 
 
 
 
 
 
 
 
 
 
 
9375f6b
9cd9daf
bf80129
 
 
 
 
a3e8af4
bf80129
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
404d16f
bf80129
5352cae
bf80129
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
import json
import requests
import gradio as gr
import random
import os
import datetime
from datetime import datetime


API_TOKEN = os.getenv("API_TOKEN")
headers = {"Authorization": f"Bearer {API_TOKEN}"}
API_URL = "https://api-inference.huggingface.co/models/bigscience/bloom"

DECODEM_TOKEN=os.getenv("DECODEM_TOKEN")

headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}
url_decodemprompts='https://us-central1-createinsightsproject.cloudfunctions.net/getdecodemprompts'

data={"prompt_type":'future_in_words',"decodem_token":DECODEM_TOKEN}
try:
    r = requests.post(url_decodemprompts, data=json.dumps(data), headers=headers)
except requests.exceptions.ReadTimeout as e:
    print(e)
#print(r.content)

prompt=str(r.content, 'UTF-8')

#print(prompt)
  

def query(text):
    #global prompt
    print(text)
    print(datetime.today().strftime("%d-%m-%Y"))
    text = prompt+"\nInput:"+text + "\nOutput:"
    #print(text)
    payload =     {
    "inputs": text,
    "parameters": {
      "max_length": 250,
      "temperature": 0.9,
        "seed":random.randint(0, 100),
      },
    }

    data = json.dumps(payload)
    response = requests.request("POST", API_URL, headers=headers, data=data)
    #print('Response here\n',response.content)
    generated_text=json.loads(response.content.decode("utf-8"))[0]['generated_text']
    result = generated_text.replace(text,'').strip()
    result = result.replace("Output:","")
    parts = result.split("###")
    topic = parts[0].strip()
    topic=topic.split("Input:")[0]
    print(topic)
    return topic


with gr.Blocks() as demo:
    gr.Markdown("<h1><center>Ideas of the Future</center></h1>")
    gr.Markdown(
        """ChatGPT based Insights from <a href="https://www.decodem.ai">Decodem.ai</a> for businesses.\nWhile ChatGPT has multiple use cases we have evolved specific use cases/ templates for businesses \n\n This template provides ideas on how a business would look like in the future. Enter a business area and get the results. We use a equally powerful AI model bigscience/bloom"""
        )
    textbox = gr.Textbox(placeholder="Enter business type here...", lines=1,label='Your business area')
    btn = gr.Button("Generate")    
    output1 = gr.Textbox(lines=2,label='The future')

    btn.click(query,inputs=[textbox], outputs=[output1])
    examples = gr.Examples(examples=['icecream parlor','space travel','book shop','ecommerce','grocery delivery'],
                           inputs=[textbox])
    

demo.launch()