testing_llm / app.py
ysharma's picture
ysharma HF staff
1
712d883
raw
history blame
6.73 kB
import gradio as gr
import requests
import os
##Bloom
API_URL = "https://api-inference.huggingface.co/models/bigscience/bloom"
HF_TOKEN = os.environ["HF_TOKEN"]
headers = {"Authorization": f"Bearer {HF_TOKEN}"}
prompt1 = """
word: risk
poem using word: And then the day came,
when the risk
to remain tight
in a bud
was more painful
than the risk
it took
to blossom.
word: """
prompt2 = """
Q: Joy has 5 balls. He buys 2 more cans of balls. Each can has 3 balls. How many balls he has now?
A: Joy had 5 balls. 2 cans of 3 balls each is 6 balls. 5 + 6 = 11. Answer is 11.
Q: Jane has 16 balls. Half balls are golf balls, and half golf balls are red. How many red golf balls are there?
A: """
prompt3 = """Q: A juggler can juggle 16 balls. Half of the balls are golf balls, and half of the golf balls are blue. How many blue golf balls are there?
A: Let’s think step by step.
"""
def text_generate(prompt, generated_txt): #, input_prompt_sql ): #, input_prompt_dalle2):
print(f"*****Inside text_generate - Prompt is :{prompt}")
#if input_prompt_sql != '':
# prompt = input_prompt_sql #"Instruction: Given an input question, respond with syntactically correct PostgreSQL\nInput: " +input_prompt_sql + "\nPostgreSQL query: "
#elif input_prompt_dalle2 !='':
# prompt = "Dalle Prompt: " + input_prompt_dalle2 + "\nNew Dalle Prompt: "
json_ = {"inputs": prompt,
"parameters":
{
"top_p": 0.9,
"temperature": 1.1,
"max_new_tokens": 250,
"return_full_text": True,
"do_sample":True,
},
"options":
{"use_cache": True,
"wait_for_model": True,
},}
response = requests.post(API_URL, headers=headers, json=json_)
print(f"Response is : {response}")
output = response.json()
print(f"output is : {output}")
output_tmp = output[0]['generated_text']
print(f"output_tmp is: {output_tmp}")
solution = output_tmp.split("\nQ:")[0]
print(f"Final response after splits is: {solution}")
if '\nOutput:' in solution:
final_solution = solution.split("\nOutput:")[0]
print(f"Response after removing output is: {final_solution}")
elif '\n\n' in solution:
final_solution = solution.split("\n\n")[0]
print(f"Response after removing new line entries is: {final_solution}")
else:
final_solution = solution
if len(generated_txt) == 0 :
display_output = final_solution
else:
display_output = generated_txt[:-len(prompt)] + final_solution
new_prompt = final_solution[len(prompt):]
print(f"new prompt for next cycle is : {new_prompt}")
print(f"display_output for printing on screen is : {display_output}")
if len(new_prompt) == 0:
temp_text = display_output[::-1]
print(f"What is the last character of sentence? : {temp_text[0]}")
if temp_text[1] == '.':
first_period_loc = temp_text[2:].find('.') + 1
print(f"Location of last Period is: {first_period_loc}")
new_prompt = display_output[-first_period_loc:-1]
print(f"Not sending blank as prompt so new prompt for next cycle is : {new_prompt}")
else:
print("HERE")
first_period_loc = temp_text.find('.')
print(f"Location of last Period is : {first_period_loc}")
new_prompt = display_output[-first_period_loc:-1]
print(f"Not sending blank as prompt so new prompt for next cycle is : {new_prompt}")
display_output = display_output[:-1]
return display_output, new_prompt #generated_txt+prompt #final_solution
demo = gr.Blocks()
with demo:
gr.Markdown("<h1><center>Bloom</center></h1>")
gr.Markdown(
"""Testing Bloom for SQL generation """
)
with gr.Row():
#example_prompt = gr.Radio( ["Q: A juggler can juggle 16 balls. Half of the balls are golf balls, and half of the golf balls are blue. How many blue golf balls are there?\nA: Let’s think step by step.\n"], label= "Choose a sample Prompt")
#example_prompt = gr.Radio( [
#"Instruction: Given an input question, respond with syntactically correct PostgreSQL\nInput: How many users signed up in the past month?\nPostgreSQL query: ",
#"Instruction: Given an input question, respond with syntactically correct PostgreSQL\nInput: Create a query that displays empfname, emplname, deptid, deptname, location from employee table. Results should be in the ascending order based on the empfname and location.\nPostgreSQL query: ",
#"Instruction: Given an input question, respond with syntactically correct PostgreSQL. Only use tables called 'employees'.\nInput: What is the total salary paid to all the employees?\nPostgreSQL query: ",
#"Instruction: Given an input question, respond with syntactically correct PostgreSQL. Only use tables called 'employees'.\nInput: List names of all the employees whose name end with 'r'.\nPostgreSQL query: ",
#"Instruction: Given an input question, respond with syntactically correct PostgreSQL. Only use tables called 'employees'.\nInput: What are the number of employees in each department?\nPostgreSQL query: ",
#"Instruction: Given an input question, respond with syntactically correct PostgreSQL. Only use table called 'employees'.\nInput: Select names of all theemployees who have third character in their name as 't'.\nPostgreSQL query: ",
#"Instruction: Given an input question, respond with syntactically correct PostgreSQL. Only use table called 'employees'.\nInput: Select names of all the employees who are working under 'Peter'\nPostgreSQL query: ", ], label= "Choose a sample Prompt")
#"Dalle Prompt: Cyberwave vaporpunk art of a kneeling figure, looking up at a glowing neon book icon, smoke and mist, pink and blue lighting, cybernetic sci-fi render\nNew Dalle Prompt: " ], label= "Choose a sample Prompt")
#with gr.Row():
input_prompt = gr.Textbox(label="Write some text to get started...", lines=3) #input_prompt_sql
#input_prompt_dalle2 = gr.Textbox(label="Or Write sample Dalle2 prompts to get more Prompt ideas...")
#input_prompt2 = gr.Textbox(label="Write some text to get started...", lines=3, visible=False) #input_prompt_sql
#input_word = gr.Textbox(placeholder="Enter a word here to generate text ...")
with gr.Row():
generated_txt = gr.Textbox(lines=7, visible = True)
#output_image = gr.Image(type="filepath", shape=(256,256))
b1 = gr.Button("Generate Text")
#b2 = gr.Button("Generate Image")
b1.click(text_generate, inputs=[input_prompt, generated_txt], outputs=[generated_txt, input_prompt]) #input_word #input_prompt_dalle2 #input_prompt_sql #example_prompt
#b2.click(poem_to_image, poem_txt, output_image)
#examples=examples
demo.launch(enable_queue=True, debug=True)