ariG23498 HF Staff commited on
Commit
4c4559b
·
verified ·
1 Parent(s): c877761

Upload ServiceNow-AI_Apriel-1.6-15b-Thinker_1.py with huggingface_hub

Browse files
ServiceNow-AI_Apriel-1.6-15b-Thinker_1.py CHANGED
@@ -20,10 +20,11 @@
20
  # ///
21
 
22
  try:
23
- # Use a pipeline as a high-level helper
24
- from transformers import pipeline
25
 
26
- pipe = pipeline("image-text-to-text", model="ServiceNow-AI/Apriel-1.6-15b-Thinker")
 
27
  messages = [
28
  {
29
  "role": "user",
@@ -33,7 +34,16 @@ try:
33
  ]
34
  },
35
  ]
36
- pipe(text=messages)
 
 
 
 
 
 
 
 
 
37
  with open('ServiceNow-AI_Apriel-1.6-15b-Thinker_1.txt', 'w', encoding='utf-8') as f:
38
  f.write('Everything was good in ServiceNow-AI_Apriel-1.6-15b-Thinker_1.txt')
39
  except Exception as e:
@@ -48,10 +58,11 @@ except Exception as e:
48
  with open('ServiceNow-AI_Apriel-1.6-15b-Thinker_1.txt', 'a', encoding='utf-8') as f:
49
  import traceback
50
  f.write('''```CODE:
51
- # Use a pipeline as a high-level helper
52
- from transformers import pipeline
53
 
54
- pipe = pipeline("image-text-to-text", model="ServiceNow-AI/Apriel-1.6-15b-Thinker")
 
55
  messages = [
56
  {
57
  "role": "user",
@@ -61,7 +72,16 @@ messages = [
61
  ]
62
  },
63
  ]
64
- pipe(text=messages)
 
 
 
 
 
 
 
 
 
65
  ```
66
 
67
  ERROR:
 
20
  # ///
21
 
22
  try:
23
+ # Load model directly
24
+ from transformers import AutoProcessor, AutoModelForVision2Seq
25
 
26
+ processor = AutoProcessor.from_pretrained("ServiceNow-AI/Apriel-1.6-15b-Thinker")
27
+ model = AutoModelForVision2Seq.from_pretrained("ServiceNow-AI/Apriel-1.6-15b-Thinker")
28
  messages = [
29
  {
30
  "role": "user",
 
34
  ]
35
  },
36
  ]
37
+ inputs = processor.apply_chat_template(
38
+ messages,
39
+ add_generation_prompt=True,
40
+ tokenize=True,
41
+ return_dict=True,
42
+ return_tensors="pt",
43
+ ).to(model.device)
44
+
45
+ outputs = model.generate(**inputs, max_new_tokens=40)
46
+ print(processor.decode(outputs[0][inputs["input_ids"].shape[-1]:]))
47
  with open('ServiceNow-AI_Apriel-1.6-15b-Thinker_1.txt', 'w', encoding='utf-8') as f:
48
  f.write('Everything was good in ServiceNow-AI_Apriel-1.6-15b-Thinker_1.txt')
49
  except Exception as e:
 
58
  with open('ServiceNow-AI_Apriel-1.6-15b-Thinker_1.txt', 'a', encoding='utf-8') as f:
59
  import traceback
60
  f.write('''```CODE:
61
+ # Load model directly
62
+ from transformers import AutoProcessor, AutoModelForVision2Seq
63
 
64
+ processor = AutoProcessor.from_pretrained("ServiceNow-AI/Apriel-1.6-15b-Thinker")
65
+ model = AutoModelForVision2Seq.from_pretrained("ServiceNow-AI/Apriel-1.6-15b-Thinker")
66
  messages = [
67
  {
68
  "role": "user",
 
72
  ]
73
  },
74
  ]
75
+ inputs = processor.apply_chat_template(
76
+ messages,
77
+ add_generation_prompt=True,
78
+ tokenize=True,
79
+ return_dict=True,
80
+ return_tensors="pt",
81
+ ).to(model.device)
82
+
83
+ outputs = model.generate(**inputs, max_new_tokens=40)
84
+ print(processor.decode(outputs[0][inputs["input_ids"].shape[-1]:]))
85
  ```
86
 
87
  ERROR: