Edit model card
from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("RootYuan/opt-1.3b-alpaca")
model = AutoModelForCausalLM.from_pretrained("RootYuan/opt-1.3b-alpaca")

usage:

instruction = "Classify the following into animals, plants, and minerals"
input = "Oak tree, copper ore, elephant"

prompts_no_input = f"### Instruction:\n{instruction}\n\n### Response:"
prompts_with_input = f"### Instruction:\n{instruction}\n\n### Input:\n{input}\n\n### Response:"
prompts = prompts_no_input if input is None else prompts_with_input

inputs = tokenizer.encode(prompts, return_tensors="pt")
outputs = model.generate(inputs, max_new_tokens=64)
ans = tokenizer.decode(outputs[0]).strip('</s>')[len(prompts):]
if input is None:
    print(f"Human: {instruction}")
else:
    print(f"Human: {instruction}\nInput: {input}")
print(f"Assistant: {ans}")

outputs:

Human: Classify the following into animals, plants, and minerals
Input: Oak tree, copper ore, elephant
Assistant: Oak tree: Plant
           Copper ore: Mineral
           Elephant: Animal
Downloads last month
7