keylazy commited on
Commit
3df7803
1 Parent(s): 6b918eb

Training in progress, epoch 1

Browse files
Files changed (3) hide show
  1. config.json +4 -4
  2. model.safetensors +2 -2
  3. training_args.bin +1 -1
config.json CHANGED
@@ -6,14 +6,14 @@
6
  "bos_token_id": 1,
7
  "eos_token_id": 2,
8
  "hidden_act": "silu",
9
- "hidden_size": 64,
10
  "initializer_range": 0.02,
11
  "intermediate_size": 256,
12
  "max_position_embeddings": 2048,
13
  "model_type": "llama",
14
- "num_attention_heads": 4,
15
- "num_hidden_layers": 6,
16
- "num_key_value_heads": 4,
17
  "pad_token_id": 2,
18
  "pretraining_tp": 1,
19
  "rms_norm_eps": 1e-06,
 
6
  "bos_token_id": 1,
7
  "eos_token_id": 2,
8
  "hidden_act": "silu",
9
+ "hidden_size": 128,
10
  "initializer_range": 0.02,
11
  "intermediate_size": 256,
12
  "max_position_embeddings": 2048,
13
  "model_type": "llama",
14
+ "num_attention_heads": 8,
15
+ "num_hidden_layers": 3,
16
+ "num_key_value_heads": 8,
17
  "pad_token_id": 2,
18
  "pretraining_tp": 1,
19
  "rms_norm_eps": 1e-06,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4d56edf82cc0c96633c9e2107682cc0741c6b39fb7eeaba1f3748c9d35d7b053
3
- size 4142176
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b496f88943e84c035c10a32e57a6e60844895677e6449d0545646e87e37855a6
3
+ size 7092848
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:035c8d78a9ef44969e4c434c05c56975218fac6772ffa3df4d3365f29a251773
3
  size 4600
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:280890a7f5bb8b444428bbcbf9b37681807e561673129c0d859f8582f9bc219b
3
  size 4600