PhilSad commited on
Commit
24ce9e5
1 Parent(s): 3d63ff0

Training in progress, step 100

Browse files
adapter_config.json CHANGED
@@ -19,9 +19,9 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "w1",
23
  "w3",
24
- "w2"
25
  ],
26
  "task_type": "CAUSAL_LM",
27
  "use_rslora": false
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
+ "w2",
23
  "w3",
24
+ "w1"
25
  ],
26
  "task_type": "CAUSAL_LM",
27
  "use_rslora": false
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:12c3ad3c68f246dc008c529677aebf4d9042934aeee0dae8689d89fc3c80eea0
3
  size 453213272
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8c679080d7a87fddc7531fdc8bb0db691e1422cffffd422412c94a1dc8ee2709
3
  size 453213272
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d924f0dc6220ec179ba3a14613b77a4a9c2a40175c649c9bc5a3b978be1c554b
3
  size 4664
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:56a594af23a772ac3a7811f4ee03c86556f6a3dd4293793fc016856150af38a9
3
  size 4664