ariG23498 HF Staff commited on
Commit
528169e
·
verified ·
1 Parent(s): dde343f

Upload ArliAI_gpt-oss-120b-Derestricted_1.py with huggingface_hub

Browse files
ArliAI_gpt-oss-120b-Derestricted_1.py ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # /// script
2
+ # requires-python = ">=3.12"
3
+ # dependencies = [
4
+ # "numpy",
5
+ # "einops",
6
+ # "pandas",
7
+ # "matplotlib",
8
+ # "protobuf",
9
+ # "torch",
10
+ # "sentencepiece",
11
+ # "torchvision",
12
+ # "transformers",
13
+ # "timm",
14
+ # "diffusers",
15
+ # "sentence-transformers",
16
+ # "accelerate",
17
+ # "peft",
18
+ # "slack-sdk",
19
+ # ]
20
+ # ///
21
+
22
+ try:
23
+ # Load model directly
24
+ from transformers import AutoTokenizer, AutoModelForCausalLM
25
+
26
+ tokenizer = AutoTokenizer.from_pretrained("ArliAI/gpt-oss-120b-Derestricted")
27
+ model = AutoModelForCausalLM.from_pretrained("ArliAI/gpt-oss-120b-Derestricted")
28
+ messages = [
29
+ {"role": "user", "content": "Who are you?"},
30
+ ]
31
+ inputs = tokenizer.apply_chat_template(
32
+ messages,
33
+ add_generation_prompt=True,
34
+ tokenize=True,
35
+ return_dict=True,
36
+ return_tensors="pt",
37
+ ).to(model.device)
38
+
39
+ outputs = model.generate(**inputs, max_new_tokens=40)
40
+ print(tokenizer.decode(outputs[0][inputs["input_ids"].shape[-1]:]))
41
+ with open('ArliAI_gpt-oss-120b-Derestricted_1.txt', 'w', encoding='utf-8') as f:
42
+ f.write('Everything was good in ArliAI_gpt-oss-120b-Derestricted_1.txt')
43
+ except Exception as e:
44
+ import os
45
+ from slack_sdk import WebClient
46
+ client = WebClient(token=os.environ['SLACK_TOKEN'])
47
+ client.chat_postMessage(
48
+ channel='#hub-model-metadata-snippets-sprint',
49
+ text='Problem in <https://huggingface.co/datasets/model-metadata/code_execution_files/blob/main/ArliAI_gpt-oss-120b-Derestricted_1.txt|ArliAI_gpt-oss-120b-Derestricted_1.txt>',
50
+ )
51
+
52
+ with open('ArliAI_gpt-oss-120b-Derestricted_1.txt', 'a', encoding='utf-8') as f:
53
+ import traceback
54
+ f.write('''```CODE:
55
+ # Load model directly
56
+ from transformers import AutoTokenizer, AutoModelForCausalLM
57
+
58
+ tokenizer = AutoTokenizer.from_pretrained("ArliAI/gpt-oss-120b-Derestricted")
59
+ model = AutoModelForCausalLM.from_pretrained("ArliAI/gpt-oss-120b-Derestricted")
60
+ messages = [
61
+ {"role": "user", "content": "Who are you?"},
62
+ ]
63
+ inputs = tokenizer.apply_chat_template(
64
+ messages,
65
+ add_generation_prompt=True,
66
+ tokenize=True,
67
+ return_dict=True,
68
+ return_tensors="pt",
69
+ ).to(model.device)
70
+
71
+ outputs = model.generate(**inputs, max_new_tokens=40)
72
+ print(tokenizer.decode(outputs[0][inputs["input_ids"].shape[-1]:]))
73
+ ```
74
+
75
+ ERROR:
76
+ ''')
77
+ traceback.print_exc(file=f)
78
+
79
+ finally:
80
+ from huggingface_hub import upload_file
81
+ upload_file(
82
+ path_or_fileobj='ArliAI_gpt-oss-120b-Derestricted_1.txt',
83
+ repo_id='model-metadata/code_execution_files',
84
+ path_in_repo='ArliAI_gpt-oss-120b-Derestricted_1.txt',
85
+ repo_type='dataset',
86
+ )