theostos commited on
Commit
19a41cb
1 Parent(s): 8a18adb
app.py CHANGED
@@ -36,7 +36,7 @@ def respond(
36
  model_inputs = generate_custom_mask(tokenizer, [prompt], device)
37
 
38
  outputs = model.generate(temperature=0.7, max_tokens=32, **model_inputs)
39
- outputs = outputs[:, model_inputs['input_ids'].shape[1]:]
40
  result = tokenizer.batch_decode(outputs, skip_special_tokens=True)
41
 
42
  return result
 
36
  model_inputs = generate_custom_mask(tokenizer, [prompt], device)
37
 
38
  outputs = model.generate(temperature=0.7, max_tokens=32, **model_inputs)
39
+ # outputs = outputs[:, model_inputs['input_ids'].shape[1]:]
40
  result = tokenizer.batch_decode(outputs, skip_special_tokens=True)
41
 
42
  return result
model/__pycache__/modeling_llamask.cpython-39.pyc ADDED
Binary file (2.84 kB). View file
 
model/__pycache__/tokenizer_utils.cpython-39.pyc ADDED
Binary file (2.45 kB). View file