Spaces:
Sleeping
Sleeping
william4416
commited on
Commit
•
7617e4a
1
Parent(s):
6620e7b
Update app.py
Browse files
app.py
CHANGED
@@ -38,21 +38,18 @@ def predict(input, history=[]):
|
|
38 |
response = tokenizer.decode(history[0], skip_special_tokens=True)
|
39 |
|
40 |
print("Response:", response) # Debug statement
|
41 |
-
return response
|
42 |
|
43 |
def main():
|
44 |
# You can add logic here to read known question-answer pairs, for example, from a JSON file
|
45 |
pass
|
46 |
|
47 |
-
textbox_output = gr.outputs.Textbox(label="Chatbot Response")
|
48 |
-
state_output = "state"
|
49 |
-
|
50 |
gr.Interface(
|
51 |
fn=predict,
|
52 |
title=title,
|
53 |
description=description,
|
54 |
examples=examples,
|
55 |
inputs=["text", "state"],
|
56 |
-
outputs=
|
57 |
theme="finlaymacklon/boxy_violet",
|
58 |
).launch()
|
|
|
38 |
response = tokenizer.decode(history[0], skip_special_tokens=True)
|
39 |
|
40 |
print("Response:", response) # Debug statement
|
41 |
+
return response
|
42 |
|
43 |
def main():
|
44 |
# You can add logic here to read known question-answer pairs, for example, from a JSON file
|
45 |
pass
|
46 |
|
|
|
|
|
|
|
47 |
gr.Interface(
|
48 |
fn=predict,
|
49 |
title=title,
|
50 |
description=description,
|
51 |
examples=examples,
|
52 |
inputs=["text", "state"],
|
53 |
+
outputs="text",
|
54 |
theme="finlaymacklon/boxy_violet",
|
55 |
).launch()
|