from transformers import AutoTokenizer, AutoModelForCausalLM

model_id = "microsoft/phi-2"

model = AutoModelForCausalLM.from_pretrained(model_id)
tokenizer = AutoTokenizer.from_pretrained(model_id, add_eos_token=True, padding_side='left')
input_text = "Who are you?"
input_ids = tokenizer(input_text, return_tensors="pt")
print(input_ids)
outputs = model.generate(input_ids["input_ids"], max_new_tokens=100)
decoded_outputs = tokenizer.decode(outputs[0])
print(decoded_outputs)
print(outputs)