Spaces:
Sleeping
Sleeping
jonathanjordan21
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -47,7 +47,8 @@ codes_emb = model.encode(codes)
|
|
47 |
def respond(
|
48 |
message,
|
49 |
history: list[tuple[str, str]],
|
50 |
-
threshold
|
|
|
51 |
):
|
52 |
global codes_emb
|
53 |
global undetected
|
@@ -74,6 +75,24 @@ def respond(
|
|
74 |
text_emb = model.encode(message)
|
75 |
scores = cos_sim(codes_emb, text_emb)[:,0]
|
76 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
77 |
s_max = scores.argmax()
|
78 |
|
79 |
if scores[s_max] < threshold:
|
@@ -82,7 +101,7 @@ def respond(
|
|
82 |
else:
|
83 |
request_code = codes[scores.argmax()]
|
84 |
|
85 |
-
return "Request code number: " + request_code[:3] + "\nRequest detail: " + request_code[6:] + "\
|
86 |
|
87 |
# for val in history:
|
88 |
# if val[0]:
|
@@ -126,7 +145,8 @@ with gr.Blocks() as demo:
|
|
126 |
chat_interface = gr.ChatInterface(
|
127 |
respond,
|
128 |
additional_inputs=[
|
129 |
-
gr.Number(0.5, label="confidence threshold", show_label=True, minimum=0., maximum=1.0, step=0.1)
|
|
|
130 |
]
|
131 |
)
|
132 |
|
|
|
47 |
def respond(
|
48 |
message,
|
49 |
history: list[tuple[str, str]],
|
50 |
+
threshold,
|
51 |
+
is_multiple
|
52 |
):
|
53 |
global codes_emb
|
54 |
global undetected
|
|
|
75 |
text_emb = model.encode(message)
|
76 |
scores = cos_sim(codes_emb, text_emb)[:,0]
|
77 |
|
78 |
+
if is_multiple:
|
79 |
+
request_details = []
|
80 |
+
request_numbers = []
|
81 |
+
for i,score in enumerate(scores):
|
82 |
+
if score > threshold:
|
83 |
+
request_details.append(codes[i][6:])
|
84 |
+
request_numbers.append(codes[i][:3])
|
85 |
+
|
86 |
+
if not request_details:
|
87 |
+
request_details.append(undetected[6:])
|
88 |
+
request_numbers.append(undetected_code)
|
89 |
+
|
90 |
+
request_numbers = "\n".join(request_numbers)
|
91 |
+
request_details = "\n".join(request_details)
|
92 |
+
|
93 |
+
return "Request code number:\n" + request_numbers + "\nRequest detail:\n" + request_details + "\nPlate numbers: " + plate_numbers
|
94 |
+
|
95 |
+
|
96 |
s_max = scores.argmax()
|
97 |
|
98 |
if scores[s_max] < threshold:
|
|
|
101 |
else:
|
102 |
request_code = codes[scores.argmax()]
|
103 |
|
104 |
+
return "Request code number: " + request_code[:3] + "\nRequest detail: " + request_code[6:] + "\nPlate numbers: " + plate_numbers
|
105 |
|
106 |
# for val in history:
|
107 |
# if val[0]:
|
|
|
145 |
chat_interface = gr.ChatInterface(
|
146 |
respond,
|
147 |
additional_inputs=[
|
148 |
+
gr.Number(0.5, label="confidence threshold", show_label=True, minimum=0., maximum=1.0, step=0.1),
|
149 |
+
gr.Checkbox(label="multiple", info="Allow multiple request code numbers"),
|
150 |
]
|
151 |
)
|
152 |
|