Text Generation
Transformers
Safetensors
7 languages
stablelm
causal-lm
Inference Endpoints
12 papers
jon-tow commited on
Commit
2aa2607
1 Parent(s): d49f6a2

bump(transformers): use upstream implementation

Browse files
Files changed (3) hide show
  1. README.md +0 -2
  2. config.json +1 -5
  3. generation_config.json +1 -1
README.md CHANGED
@@ -34,7 +34,6 @@ tokenizer = AutoTokenizer.from_pretrained("stabilityai/stablelm-2-12b")
34
  model = AutoModelForCausalLM.from_pretrained(
35
  "stabilityai/stablelm-2-12b",
36
  torch_dtype="auto",
37
- trust_remote_code=True
38
  )
39
  model.cuda()
40
  inputs = tokenizer("The weather is always wonderful", return_tensors="pt").to(model.device)
@@ -60,7 +59,6 @@ model = AutoModelForCausalLM.from_pretrained(
60
  "stabilityai/stablelm-2-12b",
61
  torch_dtype="auto",
62
  attn_implementation="flash_attention_2",
63
- trust_remote_code=True
64
  )
65
  model.cuda()
66
  inputs = tokenizer("The weather is always wonderful", return_tensors="pt").to(model.device)
 
34
  model = AutoModelForCausalLM.from_pretrained(
35
  "stabilityai/stablelm-2-12b",
36
  torch_dtype="auto",
 
37
  )
38
  model.cuda()
39
  inputs = tokenizer("The weather is always wonderful", return_tensors="pt").to(model.device)
 
59
  "stabilityai/stablelm-2-12b",
60
  torch_dtype="auto",
61
  attn_implementation="flash_attention_2",
 
62
  )
63
  model.cuda()
64
  inputs = tokenizer("The weather is always wonderful", return_tensors="pt").to(model.device)
config.json CHANGED
@@ -3,10 +3,6 @@
3
  "StableLmForCausalLM"
4
  ],
5
  "attention_dropout": 0.0,
6
- "auto_map": {
7
- "AutoConfig": "configuration_stablelm.StableLmConfig",
8
- "AutoModelForCausalLM": "modeling_stablelm.StableLmForCausalLM"
9
- },
10
  "bos_token_id": 100257,
11
  "eos_token_id": 100257,
12
  "hidden_act": "silu",
@@ -27,7 +23,7 @@
27
  "rotary_scaling_factor": 1.0,
28
  "tie_word_embeddings": false,
29
  "torch_dtype": "bfloat16",
30
- "transformers_version": "4.39.0.dev0",
31
  "use_cache": true,
32
  "use_norm_bias": false,
33
  "use_parallel_residual": true,
 
3
  "StableLmForCausalLM"
4
  ],
5
  "attention_dropout": 0.0,
 
 
 
 
6
  "bos_token_id": 100257,
7
  "eos_token_id": 100257,
8
  "hidden_act": "silu",
 
23
  "rotary_scaling_factor": 1.0,
24
  "tie_word_embeddings": false,
25
  "torch_dtype": "bfloat16",
26
+ "transformers_version": "4.40.0",
27
  "use_cache": true,
28
  "use_norm_bias": false,
29
  "use_parallel_residual": true,
generation_config.json CHANGED
@@ -3,5 +3,5 @@
3
  "bos_token_id": 100257,
4
  "eos_token_id": 100257,
5
  "pad_token_id": 100257,
6
- "transformers_version": "4.39.0.dev0"
7
  }
 
3
  "bos_token_id": 100257,
4
  "eos_token_id": 100257,
5
  "pad_token_id": 100257,
6
+ "transformers_version": "4.40.0"
7
  }