bilgeyucel commited on
Commit
09b81f2
1 Parent(s): cbca659

Add demo code

Browse files
Files changed (3) hide show
  1. app.py +24 -0
  2. requirements.txt +2 -0
  3. utils.py +6 -0
app.py ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from haystack.nodes import PromptNode
3
+
4
+ from utils import lemmatizer_func
5
+
6
+ def run_prompt(prompt, api_key):
7
+ prompt_node = PromptNode(model_name_or_path="gpt-3.5-turbo", api_key=api_key)
8
+ lemmatized_prompt = lemmatizer_func(prompt)
9
+ response_plain = prompt_node(prompt)
10
+ response_lemmatized = prompt_node(lemmatized_prompt)
11
+ return response_plain[0][0], response_plain[1]["total_tokens"], response_lemmatized[0][0], response_lemmatized[1]["total_tokens"]
12
+
13
+ with gr.Blocks() as demo:
14
+ api_key = gr.Textbox(label="Enter your api key")
15
+ prompt = gr.Textbox(label="Prompt", value="Rachel has 17 apples. She gives 9 to Sarah. How many apples does Rachel have now?")
16
+ submit_btn = gr.Button("Submit")
17
+ token_count_plain = gr.Number(label="Plain Text Token Count")
18
+ token_count_lemmatized = gr.Number(label="Lemmatized Text Token Count")
19
+ prompt_response = gr.Textbox(label="Answer", show_copy_button=True)
20
+ lemmatized_prompt_response = gr.Textbox(label="Lemm Answer", show_copy_button=True)
21
+ submit_btn.click(fn=run_prompt, inputs=[prompt, api_key], outputs=[prompt_response, token_count_plain, lemmatized_prompt_response, token_count_lemmatized])
22
+
23
+ demo.launch()
24
+
requirements.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ simplemma
2
+ farm-haystack @ git+https://github.com/anakin87/haystack.git@hacky-tokens-exp
utils.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ from simplemma import text_lemmatizer
2
+
3
+ def lemmatizer_func(plain_text):
4
+ words = text_lemmatizer(plain_text, lang="en")
5
+ lemmatized_promt = ' '.join(words)
6
+ return lemmatized_promt