File size: 358 Bytes
ce3c9da
 
 
dbd557b
ce3c9da
 
dbd557b
ddaaa7d
 
 
 
 
 
 
 
dbd557b
ce3c9da
dbd557b
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
import os
HF_TOKEN = os.getenv('HF_TOKEN')
print("Token loaded")

import transformers
import torch

model_id = "meta-llama/Meta-Llama-3-8B-Instruct"

pipeline = transformers.pipeline(
  "text-generation",
  model="meta-llama/Meta-Llama-3-8B-Instruct",
  model_kwargs={"torch_dtype": torch.bfloat16},
  device="cuda",
)

print("llama download successfully")