Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -263,8 +263,8 @@ def generate_response():
|
|
263 |
|
264 |
|
265 |
app.logger.info('Prompt:')
|
266 |
-
|
267 |
-
app.logger.info(
|
268 |
|
269 |
stop_generation = False
|
270 |
app.logger.info('Generate started')
|
@@ -282,7 +282,7 @@ def generate_response():
|
|
282 |
def generate_and_log_tokens(model, generator):
|
283 |
for token in generate_tokens(model, generator):
|
284 |
if token == model.token_eos(): # or (max_new_tokens is not None and i >= max_new_tokens):
|
285 |
-
log(
|
286 |
break
|
287 |
response_tokens.append(token)
|
288 |
yield token
|
|
|
263 |
|
264 |
|
265 |
app.logger.info('Prompt:')
|
266 |
+
user_request = model.detokenize(tokens[:CONTEXT_SIZE]).decode("utf-8", errors="ignore")
|
267 |
+
app.logger.info(user_request)
|
268 |
|
269 |
stop_generation = False
|
270 |
app.logger.info('Generate started')
|
|
|
282 |
def generate_and_log_tokens(model, generator):
|
283 |
for token in generate_tokens(model, generator):
|
284 |
if token == model.token_eos(): # or (max_new_tokens is not None and i >= max_new_tokens):
|
285 |
+
log(user_request, model.detokenize(response_tokens).decode("utf-8", errors="ignore"))
|
286 |
break
|
287 |
response_tokens.append(token)
|
288 |
yield token
|