File size: 422 Bytes
ce3c9da
 
 
dbd557b
ce3c9da
 
dbd557b
7798838
 
 
5fd9135
ddaaa7d
 
 
 
 
 
 
dbd557b
ce3c9da
dbd557b
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
import os
HF_TOKEN = os.getenv('HF_TOKEN')
print("Token loaded")

import transformers
import torch

# Set the device to CPU
device = torch.device('cpu')

model_id = "meta-llama/Meta-Llama-3-8B-Instruct/tree/main"

pipeline = transformers.pipeline(
  "text-generation",
  model="meta-llama/Meta-Llama-3-8B-Instruct",
  model_kwargs={"torch_dtype": torch.bfloat16},
  device="cuda",
)

print("llama download successfully")