aaaaa
Browse files
app.py
CHANGED
|
@@ -12,7 +12,7 @@ stop_inference = False
|
|
| 12 |
|
| 13 |
def respond(
|
| 14 |
message,
|
| 15 |
-
history
|
| 16 |
system_message="You are a friendly Chatbot.",
|
| 17 |
max_tokens=512,
|
| 18 |
temperature=0.7,
|
|
@@ -123,45 +123,30 @@ custom_css = """
|
|
| 123 |
}
|
| 124 |
"""
|
| 125 |
|
| 126 |
-
#
|
| 127 |
-
|
| 128 |
-
|
| 129 |
-
|
| 130 |
|
| 131 |
-
|
| 132 |
-
|
| 133 |
-
|
| 134 |
|
| 135 |
-
|
| 136 |
-
|
| 137 |
-
|
| 138 |
-
|
| 139 |
|
| 140 |
-
|
| 141 |
|
| 142 |
-
|
| 143 |
|
| 144 |
-
|
| 145 |
|
| 146 |
-
#
|
| 147 |
-
|
| 148 |
-
|
| 149 |
-
|
| 150 |
-
# if __name__ == "__main__":
|
| 151 |
-
# demo.launch(share=False) # Remove share=True because it's not supported on HF Spaces
|
| 152 |
-
|
| 153 |
-
demo = gr.ChatInterface(
|
| 154 |
-
fn=respond,
|
| 155 |
-
additional_inputs=[
|
| 156 |
-
gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
|
| 157 |
-
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
|
| 158 |
-
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
|
| 159 |
-
gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)"),
|
| 160 |
-
gr.Checkbox(label="Use Local Model", value=False)
|
| 161 |
-
],
|
| 162 |
-
cancel=gr.Button("Cancel Inference", variant="danger"),
|
| 163 |
-
css=custom_css # Apply the custom CSS styling
|
| 164 |
-
)
|
| 165 |
|
| 166 |
if __name__ == "__main__":
|
| 167 |
-
demo.launch(share=True
|
|
|
|
|
|
| 12 |
|
| 13 |
def respond(
|
| 14 |
message,
|
| 15 |
+
history: list[tuple[str, str]],
|
| 16 |
system_message="You are a friendly Chatbot.",
|
| 17 |
max_tokens=512,
|
| 18 |
temperature=0.7,
|
|
|
|
| 123 |
}
|
| 124 |
"""
|
| 125 |
|
| 126 |
+
# Define the interface
|
| 127 |
+
with gr.Blocks(css=custom_css) as demo:
|
| 128 |
+
gr.Markdown("<h1 style='text-align: center;'>🌟 Fancy AI Chatbot 🌟</h1>")
|
| 129 |
+
gr.Markdown("Interact with the AI chatbot using customizable settings below.")
|
| 130 |
|
| 131 |
+
with gr.Row():
|
| 132 |
+
system_message = gr.Textbox(value="You are a friendly Chatbot.", label="System message", interactive=True)
|
| 133 |
+
use_local_model = gr.Checkbox(label="Use Local Model", value=False)
|
| 134 |
|
| 135 |
+
with gr.Row():
|
| 136 |
+
max_tokens = gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens")
|
| 137 |
+
temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature")
|
| 138 |
+
top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)")
|
| 139 |
|
| 140 |
+
chat_history = gr.Chatbot(label="Chat")
|
| 141 |
|
| 142 |
+
user_input = gr.Textbox(show_label=False, placeholder="Type your message here...")
|
| 143 |
|
| 144 |
+
cancel_button = gr.Button("Cancel Inference", variant="danger")
|
| 145 |
|
| 146 |
+
# Adjusted to ensure history is maintained and passed correctly
|
| 147 |
+
user_input.submit(respond, [user_input, chat_history, system_message, max_tokens, temperature, top_p, use_local_model], chat_history)
|
| 148 |
+
cancel_button.click(cancel_inference)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 149 |
|
| 150 |
if __name__ == "__main__":
|
| 151 |
+
demo.launch(share=False) # Remove share=True because it's not supported on HF Spaces
|
| 152 |
+
|