bilgeyucel's picture
Improve UI further
7c3f534
import gradio as gr
import concurrent.futures
from haystack.nodes import PromptNode
from utils import lemmatizer_func
def run_prompt(prompt, api_key, model_name, max_length):
prompt_node = PromptNode(model_name_or_path=model_name, api_key=api_key, max_length=max_length)
lemmatized_prompt = lemmatizer_func(prompt)
with concurrent.futures.ThreadPoolExecutor() as executor:
future_plain = executor.submit(prompt_node, prompt)
future_lemmatized = executor.submit(prompt_node, lemmatized_prompt)
response_plain = future_plain.result()
response_lemmatized = future_lemmatized.result()
return lemmatized_prompt, response_plain[0][0], response_plain[1]["prompt_tokens"], response_plain[1]["completion_tokens"], response_lemmatized[0][0], response_lemmatized[1]["prompt_tokens"], response_lemmatized[1]["completion_tokens"]
description = """
# Prompt Lemmatizer 🐢
## Lemmatize your prompts and compare the outputs of lemmatized and non-lemmatized versions.
Enter an OpenAI or Cohere key, choose your model and set the `max_length`.
Built by [Bilge Yucel](https://twitter.com/bilgeycl) and [Stefano Fiorucci](https://github.com/anakin87), with [Haystack](https://github.com/deepset-ai/haystack).
"""
with gr.Blocks(theme="default") as demo:
gr.Markdown(value=description)
with gr.Row():
api_key = gr.Textbox(label="Enter your api key", type="password")
model_name = gr.Dropdown(["text-davinci-003", "gpt-3.5-turbo", "gpt-4", "gpt-4-32k", "command", "command-light", "base", "base-light"], value="gpt-3.5-turbo", label="Choose your model!")
max_length = gr.Slider(100, 500, value=100, step=10, label="Max Length", info="Max token length of the response. Choose between 100 and 500")
with gr.Row():
prompt = gr.TextArea(label="Prompt", value="Rachel has 17 apples. She gives 9 to Sarah. How many apples does Rachel have now?")
gr.Examples(
[
"I want you to act as a travel guide. I will write you my location and you will suggest a place to visit near my location. In some cases, I will also give you the type of places I will visit. You will also suggest me places of similar type that are close to my first location. My first suggestion request is \"I am in Italy and I want to visit only museums.\"",
"Antibiotics are a type of medication used to treat bacterial infections. They work by either killing the bacteria or preventing them from reproducing, allowing the body’s immune system to fight off the infection. Antibiotics are usually taken orally in the form of pills, capsules, or liquid solutions, or sometimes administered intravenously. They are not effective against viral infections, and using them inappropriately can lead to antibiotic resistance. Explain the above in one sentence:",
"Please give a sentiment for this context. Answer with positive, negative or neutral. Context: A flicker in the dark started of interesting and I was glued to the novel. It was just a little longer that I had anticipated to get to the bottom of the story. I felt sorry for the Chloe's mother but I had thought there was something odd about her brother. Well being a murderer was it any wonder he did not like this sister's boyfriend because I think he knew what happened. I love the cover of the book and the title is good. If only hardbacks had the cover printed onto them. Answer:",
],
examples_per_page=1,
inputs=prompt,
label="Click on any example"
)
submit_btn = gr.Button("✂️ Let's lemmatize and see!")
with gr.Row():
with gr.Column():
with gr.Row():
token_count_plain = gr.Number(label="Prompt Token Count")
token_count_plain_completion = gr.Number(label="Output Token Count")
with gr.Row():
prompt_response = gr.TextArea(label="Output", show_copy_button=True)
with gr.Column():
with gr.Row():
token_count_lemmatized = gr.Number(label="Lemmatized Prompt Token Count")
token_count_lemmatized_completion = gr.Number(label="Output Token Count (Lemmatized Prompt)")
lemmatized_prompt_response = gr.TextArea(label="Output (Lemmatized Prompt)", show_copy_button=True)
with gr.Accordion("See Lemmatized Prompt", open=False):
lemmatized_prompt = gr.TextArea(show_copy_button=True, show_label=False, container=False)
submit_btn.click(fn=run_prompt, inputs=[prompt, api_key, model_name, max_length], outputs=[lemmatized_prompt, prompt_response, token_count_plain, token_count_plain_completion, lemmatized_prompt_response, token_count_lemmatized, token_count_lemmatized_completion])
if __name__ == "__main__":
demo.launch()