Prompt format?

#1
by Hoioi - opened

What's the prompt format for using this model?

Owner

def make_inference(instruction, context = None):
if context:
prompt = f"Below is an instruction that describes a task, paired with an input that provides further context.\n\n### Instruction: \n{instruction}\n\n### Input: \n{context}\n\n### Response: \n"
else:
prompt = f"Below is an instruction that describes a task. Write a response that appropriately completes the request.\n\n### Instruction: \n{instruction}\n\n### Response: \n"
inputs = tokenizer(prompt, return_tensors="pt", return_token_type_ids=False).to("cuda:0")
outputs = quant_model.generate(**inputs, max_new_tokens=500)
display(Markdown((tokenizer.decode(outputs[0], skip_special_tokens=True))))

make_inference("Summarize the context.", "Context... ...")

Sign up or log in to comment