tharunkrishna1611 commited on
Commit
c87a00b
1 Parent(s): 63d346c

Training in progress, epoch 1

Browse files
adapter_config.json CHANGED
@@ -10,13 +10,13 @@
10
  "layers_pattern": null,
11
  "layers_to_transform": null,
12
  "loftq_config": {},
13
- "lora_alpha": 32,
14
  "lora_dropout": 0.05,
15
  "megatron_config": null,
16
  "megatron_core": "megatron.core",
17
  "modules_to_save": null,
18
  "peft_type": "LORA",
19
- "r": 64,
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
 
10
  "layers_pattern": null,
11
  "layers_to_transform": null,
12
  "loftq_config": {},
13
+ "lora_alpha": 16,
14
  "lora_dropout": 0.05,
15
  "megatron_config": null,
16
  "megatron_core": "megatron.core",
17
  "modules_to_save": null,
18
  "peft_type": "LORA",
19
+ "r": 12,
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0aaea7efa426cd2dfb75c6049f934866e1705a5487a983b9e671637ad5e41f53
3
- size 125845344
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5419f902407694f01a204f943ec74b55cdcec934697dff2beb35eafba4a087ed
3
+ size 23609048
tokenizer.json CHANGED
@@ -2,7 +2,7 @@
2
  "version": "1.0",
3
  "truncation": {
4
  "direction": "Right",
5
- "max_length": 100,
6
  "strategy": "LongestFirst",
7
  "stride": 0
8
  },
 
2
  "version": "1.0",
3
  "truncation": {
4
  "direction": "Right",
5
+ "max_length": 512,
6
  "strategy": "LongestFirst",
7
  "stride": 0
8
  },
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4112b9c88d7eddd4b54cb6b093ea04e2926344a2137b84ec8d007b482212774b
3
  size 5240
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7472e8171a48752db67710e4e603d4683e6b42c1bc188ac064f4ebcffdd98b1d
3
  size 5240