Update app.py
Browse files
app.py
CHANGED
@@ -9,9 +9,7 @@ from resultados_consulta import resultados_consulta, detalles_resultados
|
|
9 |
import tensorflow as tf
|
10 |
import tensorflow.python.ops.numpy_ops.np_config as np_config
|
11 |
from math import ceil
|
12 |
-
import torch
|
13 |
|
14 |
-
np_config.enable_numpy_behavior(prefer_float32=False, dtype_conversion_mode='all')
|
15 |
|
16 |
def split_frame(input_df, rows):
|
17 |
df=[]
|
@@ -140,8 +138,8 @@ def main():
|
|
140 |
answer=tokenizer.decode(predict_answer_tokens)
|
141 |
|
142 |
if (len(answer)>0):
|
143 |
-
answer_start_scores =
|
144 |
-
answer_end_scores =
|
145 |
score = answer_start_scores*answer_end_scores
|
146 |
st.write(score)
|
147 |
cantidad_respuestas = cantidad_respuestas + 1
|
|
|
9 |
import tensorflow as tf
|
10 |
import tensorflow.python.ops.numpy_ops.np_config as np_config
|
11 |
from math import ceil
|
|
|
12 |
|
|
|
13 |
|
14 |
def split_frame(input_df, rows):
|
15 |
df=[]
|
|
|
138 |
answer=tokenizer.decode(predict_answer_tokens)
|
139 |
|
140 |
if (len(answer)>0):
|
141 |
+
answer_start_scores = tf.nn.softmax(outputs.start_logits)
|
142 |
+
answer_end_scores = tf.nn.softmax(outputs.end_logits)
|
143 |
score = answer_start_scores*answer_end_scores
|
144 |
st.write(score)
|
145 |
cantidad_respuestas = cantidad_respuestas + 1
|