lole25 commited on
Commit
709d55b
1 Parent(s): 55be290

Training in progress, step 1800

Browse files
adapter_config.json CHANGED
@@ -19,10 +19,10 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
 
22
  "v_proj",
23
  "q_proj",
24
- "k_proj",
25
- "dense"
26
  ],
27
  "task_type": "CAUSAL_LM"
28
  }
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
+ "dense",
23
  "v_proj",
24
  "q_proj",
25
+ "k_proj"
 
26
  ],
27
  "task_type": "CAUSAL_LM"
28
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f151e33d19265ee3395c7c03b8642db344c581e97e9385ebbb15089dabc9ab77
3
  size 167807296
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d93b085024dfe4a5d4132a312cfb670b9c2fe3de8deddc3c70b525c43582bb6f
3
  size 167807296
runs/May21_15-48-24_gpu4-119-5/events.out.tfevents.1716270669.gpu4-119-5.1755535.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:21ec9065fddc03647b8e968cb3be58df1c3816952edd7250f4d51cb32a485a94
3
- size 113184
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f81a981b16795f6b2dc6b7b83ea9d04a3dd393803ecf8e794283e31330bd8a26
3
+ size 119524
runs/May21_19-10-01_gpu4-119-5/events.out.tfevents.1716282770.gpu4-119-5.1848158.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:90ad1098c8efaad4a001f5813f3d2ea0a1bede3b01774625ee48c65014d3b0e2
3
+ size 11253
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:683ee66b405c93cdef524a79d315785392848eca1bf93d025f11a185a676b965
3
  size 5816
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:65ab343686c5e7dd5e87f1f57003341b4e12747cd84181779f228894f7c70c01
3
  size 5816