0x0mom commited on
Commit
4e9202d
·
verified ·
1 Parent(s): c533df2

Upload StableLmForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +3 -3
  2. model.safetensors +1 -1
config.json CHANGED
@@ -1,11 +1,11 @@
1
  {
2
- "_name_or_path": "../finetuning-s_r37",
3
  "architectures": [
4
  "StableLmForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
- "bos_token_id": 100257,
8
- "eos_token_id": 100257,
9
  "hidden_act": "silu",
10
  "hidden_dropout": 0.0,
11
  "hidden_size": 2048,
 
1
  {
2
+ "_name_or_path": "../finetuning-s_r38",
3
  "architectures": [
4
  "StableLmForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
+ "bos_token_id": 100289,
8
+ "eos_token_id": 100290,
9
  "hidden_act": "silu",
10
  "hidden_dropout": 0.0,
11
  "hidden_size": 2048,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d0aa63a950dbcc0a781ec23c5a6e892ffb855d2b7786e3ee8f6cade9f9289fd9
3
  size 3289069520
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:afcc9bdc0c6c30df174b06c6f5d9cc7db93eccce49879647389233545ab1d3d5
3
  size 3289069520