BaoLocTown commited on
Commit
bef6cbd
1 Parent(s): 0a49871

Training in progress, step 500

Browse files
adapter_config.json CHANGED
@@ -16,10 +16,10 @@
16
  "rank_pattern": {},
17
  "revision": null,
18
  "target_modules": [
19
- "v_proj",
20
  "o_proj",
21
- "q_proj",
22
- "k_proj"
23
  ],
24
  "task_type": "CAUSAL_LM"
25
  }
 
16
  "rank_pattern": {},
17
  "revision": null,
18
  "target_modules": [
19
+ "k_proj",
20
  "o_proj",
21
+ "v_proj",
22
+ "q_proj"
23
  ],
24
  "task_type": "CAUSAL_LM"
25
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2e6817db399fb1143ae8f74cd4323df83008e79d3952de51393225d504f53040
3
  size 872450448
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:30d67e21dccf855a196cedc60253c1e90a1933a71768cd9698e7c3649c7a6703
3
  size 872450448
tokenizer_config.json CHANGED
@@ -34,7 +34,7 @@
34
  "legacy": true,
35
  "model_max_length": 1000000000000000019884624838656,
36
  "pad_token": "<unk>",
37
- "padding_side": "left",
38
  "sp_model_kwargs": {},
39
  "spaces_between_special_tokens": false,
40
  "tokenizer_class": "LlamaTokenizer",
 
34
  "legacy": true,
35
  "model_max_length": 1000000000000000019884624838656,
36
  "pad_token": "<unk>",
37
+ "padding_side": "right",
38
  "sp_model_kwargs": {},
39
  "spaces_between_special_tokens": false,
40
  "tokenizer_class": "LlamaTokenizer",
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f400d23a26c288eaf6970820e5178501992c86abab9c76e64c18c0a49563759f
3
  size 4664
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16fd9c1d2cb430017342d11f9594cc289b052b3d10c314e2969610f637d7523e
3
  size 4664