Ritesh Khanna commited on
Commit
82905a4
1 Parent(s): 5a35674

log the prompt

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -69,7 +69,7 @@ def generate_text(prompt, extra=False, top_k=100, top_p=0.95, temperature=0.85,
69
  samples = []
70
  try:
71
  for i in range(1):
72
- print(f"Generating sample {i+1}")
73
  outputs = model.generate(**inputs, max_length=256, do_sample=True, top_k=top_k, top_p=top_p, temperature=temperature, num_return_sequences=4, pad_token_id=tokenizer.eos_token_id)
74
  print(f"Generated {len(outputs)} samples.")
75
  for output in outputs:
 
69
  samples = []
70
  try:
71
  for i in range(1):
72
+ print(f"Generating sample for prompt: {prompt}")
73
  outputs = model.generate(**inputs, max_length=256, do_sample=True, top_k=top_k, top_p=top_p, temperature=temperature, num_return_sequences=4, pad_token_id=tokenizer.eos_token_id)
74
  print(f"Generated {len(outputs)} samples.")
75
  for output in outputs: