ariG23498 HF Staff commited on
Commit
f7558b0
·
verified ·
1 Parent(s): 83b1e55

Upload meta-llama_Llama-3.2-1B_1.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. meta-llama_Llama-3.2-1B_1.py +51 -0
meta-llama_Llama-3.2-1B_1.py ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # /// script
2
+ # requires-python = ">=3.12"
3
+ # dependencies = [
4
+ # "torch",
5
+ # "torchvision",
6
+ # "transformers",
7
+ # "diffusers",
8
+ # "sentence-transformers",
9
+ # "accelerate",
10
+ # "peft",
11
+ # "slack-sdk",
12
+ # ]
13
+ # ///
14
+
15
+ try:
16
+ # Use a pipeline as a high-level helper
17
+ from transformers import pipeline
18
+
19
+ pipe = pipeline("text-generation", model="meta-llama/Llama-3.2-1B")
20
+ with open('meta-llama_Llama-3.2-1B_1.txt', 'w', encoding='utf-8') as f:
21
+ f.write('Everything was good in meta-llama_Llama-3.2-1B_1.txt')
22
+ except Exception as e:
23
+ import os
24
+ from slack_sdk import WebClient
25
+ client = WebClient(token=os.environ['SLACK_TOKEN'])
26
+ client.chat_postMessage(
27
+ channel='#exp-slack-alerts',
28
+ text='Problem in <https://huggingface.co/datasets/model-metadata/code_execution_files/blob/main/meta-llama_Llama-3.2-1B_1.txt|meta-llama_Llama-3.2-1B_1.txt>',
29
+ )
30
+
31
+ with open('meta-llama_Llama-3.2-1B_1.txt', 'a', encoding='utf-8') as f:
32
+ import traceback
33
+ f.write('''```CODE:
34
+ # Use a pipeline as a high-level helper
35
+ from transformers import pipeline
36
+
37
+ pipe = pipeline("text-generation", model="meta-llama/Llama-3.2-1B")
38
+ ```
39
+
40
+ ERROR:
41
+ ''')
42
+ traceback.print_exc(file=f)
43
+
44
+ finally:
45
+ from huggingface_hub import upload_file
46
+ upload_file(
47
+ path_or_fileobj='meta-llama_Llama-3.2-1B_1.txt',
48
+ repo_id='model-metadata/code_execution_files',
49
+ path_in_repo='meta-llama_Llama-3.2-1B_1.txt',
50
+ repo_type='dataset',
51
+ )