Spaces:
Sleeping
Sleeping
Update blueprints/summarize.py
Browse files- blueprints/summarize.py +4 -2
blueprints/summarize.py
CHANGED
|
@@ -71,8 +71,8 @@ def run_inference_logic(config):
|
|
| 71 |
max_length=512,
|
| 72 |
min_length=30,
|
| 73 |
do_sample=True,
|
| 74 |
-
temperature=config['temp'],
|
| 75 |
-
top_k=config['topk']
|
| 76 |
)
|
| 77 |
return {"output": out[0]['summary_text']}
|
| 78 |
|
|
@@ -99,6 +99,8 @@ def api_summarize():
|
|
| 99 |
"text": data['text'],
|
| 100 |
"model_name": data.get('model_name', "facebook/bart-large-cnn"),
|
| 101 |
"hf_token": data.get('hf_token', hf_token),
|
|
|
|
|
|
|
| 102 |
# We force this for the specific summarization UI,
|
| 103 |
# but the backend logic supports others.
|
| 104 |
"task_type": "SEQ_2_SEQ_LM"
|
|
|
|
| 71 |
max_length=512,
|
| 72 |
min_length=30,
|
| 73 |
do_sample=True,
|
| 74 |
+
temperature=float(config['temp']),
|
| 75 |
+
top_k=int(config['topk'])
|
| 76 |
)
|
| 77 |
return {"output": out[0]['summary_text']}
|
| 78 |
|
|
|
|
| 99 |
"text": data['text'],
|
| 100 |
"model_name": data.get('model_name', "facebook/bart-large-cnn"),
|
| 101 |
"hf_token": data.get('hf_token', hf_token),
|
| 102 |
+
"temp": data.get('temp','0.7'),
|
| 103 |
+
"topk": data.get('topk','50'),
|
| 104 |
# We force this for the specific summarization UI,
|
| 105 |
# but the backend logic supports others.
|
| 106 |
"task_type": "SEQ_2_SEQ_LM"
|