FlipTip commited on
Commit
a216104
1 Parent(s): 76bf245

Upload app.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. app.py +78 -0
app.py ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ SYSTEM_PROMPT = "As an LLM, your primary function is to deliver witty and scathing critiques of technology products. Keep it entertaining and humorous, but also make sure your jokes aren't too mean-spirited or factually incorrect."
3
+ TITLE = "Tech Ripper"
4
+ EXAMPLE_INPUT = "iPhone 12"
5
+ import gradio as gr
6
+ import os
7
+ import requests
8
+
9
+ zephyr_7b_beta = "https://api-inference.huggingface.co/models/HuggingFaceH4/zephyr-7b-beta/"
10
+
11
+ HF_TOKEN = os.getenv("HF_TOKEN")
12
+ HEADERS = {"Authorization": f"Bearer {HF_TOKEN}"}
13
+
14
+ def build_input_prompt(message, chatbot, system_prompt):
15
+ """
16
+ Constructs the input prompt string from the chatbot interactions and the current message.
17
+ """
18
+ input_prompt = "<|system|>\n" + system_prompt + "</s>\n<|user|>\n"
19
+ for interaction in chatbot:
20
+ input_prompt = input_prompt + str(interaction[0]) + "</s>\n<|assistant|>\n" + str(interaction[1]) + "\n</s>\n<|user|>\n"
21
+
22
+ input_prompt = input_prompt + str(message) + "</s>\n<|assistant|>"
23
+ return input_prompt
24
+
25
+
26
+ def post_request_beta(payload):
27
+ """
28
+ Sends a POST request to the predefined Zephyr-7b-Beta URL and returns the JSON response.
29
+ """
30
+ response = requests.post(zephyr_7b_beta, headers=HEADERS, json=payload)
31
+ response.raise_for_status() # Will raise an HTTPError if the HTTP request returned an unsuccessful status code
32
+ return response.json()
33
+
34
+
35
+ def predict_beta(message, chatbot=[], system_prompt=""):
36
+ input_prompt = build_input_prompt(message, chatbot, system_prompt)
37
+ data = {
38
+ "inputs": input_prompt
39
+ }
40
+
41
+ try:
42
+ response_data = post_request_beta(data)
43
+ json_obj = response_data[0]
44
+
45
+ if 'generated_text' in json_obj and len(json_obj['generated_text']) > 0:
46
+ bot_message = json_obj['generated_text']
47
+ return bot_message
48
+ elif 'error' in json_obj:
49
+ raise gr.Error(json_obj['error'] + ' Please refresh and try again with smaller input prompt')
50
+ else:
51
+ warning_msg = f"Unexpected response: {json_obj}"
52
+ raise gr.Error(warning_msg)
53
+ except requests.HTTPError as e:
54
+ error_msg = f"Request failed with status code {e.response.status_code}"
55
+ raise gr.Error(error_msg)
56
+ except json.JSONDecodeError as e:
57
+ error_msg = f"Failed to decode response as JSON: {str(e)}"
58
+ raise gr.Error(error_msg)
59
+
60
+ def test_preview_chatbot(message, history):
61
+ response = predict_beta(message, history, SYSTEM_PROMPT)
62
+ text_start = response.rfind("<|assistant|>", ) + len("<|assistant|>")
63
+ response = response[text_start:]
64
+ return response
65
+
66
+
67
+ welcome_preview_message = f"""
68
+ Welcome to **{TITLE}**! Say something like:
69
+
70
+ "{EXAMPLE_INPUT}"
71
+ """
72
+
73
+ chatbot_preview = gr.Chatbot(layout="panel", value=[(None, welcome_preview_message)])
74
+ textbox_preview = gr.Textbox(scale=7, container=False, value=EXAMPLE_INPUT)
75
+
76
+ demo = gr.ChatInterface(test_preview_chatbot, chatbot=chatbot_preview, textbox=textbox_preview)
77
+
78
+ demo.launch()