Update README.md
Browse files
README.md
CHANGED
|
@@ -42,7 +42,4 @@ from transformers import AutoTokenizer, AutoModelForCausalLM
|
|
| 42 |
model = AutoModelForCausalLM.from_pretrained("pAce576/llama3.2-1b-Instruct")
|
| 43 |
tokenizer = AutoTokenizer.from_pretrained("pAce576/llama3.2-1b-Instruct")
|
| 44 |
|
| 45 |
-
|
| 46 |
-
inputs = tokenizer(prompt, return_tensors="pt")
|
| 47 |
-
outputs = model.generate(**inputs, max_new_tokens=100)
|
| 48 |
-
print(tokenizer.decode(outputs[0], skip_special_tokens=True))
|
|
|
|
| 42 |
model = AutoModelForCausalLM.from_pretrained("pAce576/llama3.2-1b-Instruct")
|
| 43 |
tokenizer = AutoTokenizer.from_pretrained("pAce576/llama3.2-1b-Instruct")
|
| 44 |
|
| 45 |
+
#Your own generation function
|
|
|
|
|
|
|
|
|