File size: 467 Bytes
ce3c9da
 
 
dbd557b
ce3c9da
 
dbd557b
0a6aa83
 
 
 
 
 
 
dbd557b
ce3c9da
dbd557b
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
import os
HF_TOKEN = os.getenv('HF_TOKEN')
print("Token loaded")

import transformers
import torch

# Load the tokenizer and model
tokenizer = AutoTokenizer.from_pretrained("meta-llama/Meta-Llama-3-8B-Instruct")
model = AutoModelForCausalLM.from_pretrained("meta-llama/Meta-Llama-3-8B-Instruct", device_map="auto")  # to("cuda:0") 
terminators = [
    tokenizer.eos_token_id,
    tokenizer.convert_tokens_to_ids("<|eot_id|>")
]

print("llama download successfully")