lapp0 commited on
Commit
09d6849
1 Parent(s): f5f6512

Training in progress, step 61875

Browse files
README.md CHANGED
@@ -44,42 +44,42 @@ More information needed
44
  | step | epoch | enwikippl | frwikippl | loss | runtime | samples_per_second | steps_per_second | tinystoriesppl | zhwikippl |
45
  | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: |
46
  | **teacher eval** | | 43.25 | 61.25 | | | | | 11.6875 | 19.125 |
47
- | 0 | 0 | 2473901162496.0 | 170424302305280.0 | 30.7740 | 25.2349 | 99.069 | 12.403 | 4060086272.0 | 71468255805440.0 |
48
- | 2500 | 0.0404 | 1184.0 | 11776.0 | 9.8284 | 25.2487 | 99.015 | 12.397 | 784.0 | 12800.0 |
49
- | 5000 | 0.0808 | 412.0 | 2272.0 | 8.3993 | 25.2618 | 98.964 | 12.39 | 290.0 | 434.0 |
50
- | 7500 | 0.1212 | 245.0 | 916.0 | 7.6586 | 25.2887 | 98.858 | 12.377 | 218.0 | 195.0 |
51
- | 10000 | 0.1616 | 182.0 | 676.0 | 7.2415 | 25.2556 | 98.988 | 12.393 | 164.0 | 190.0 |
52
- | 12500 | 0.2020 | 131.0 | 504.0 | 6.6883 | 25.2962 | 98.829 | 12.373 | 115.5 | 158.0 |
53
- | 15000 | 0.2424 | 112.5 | 432.0 | 6.4127 | 25.2743 | 98.915 | 12.384 | 89.5 | 144.0 |
54
- | 17500 | 0.2828 | 93.5 | 344.0 | 6.1979 | 25.214 | 99.151 | 12.414 | 70.5 | 127.0 |
55
- | 20000 | 0.3232 | 75.0 | 270.0 | 5.9310 | 25.2265 | 99.102 | 12.408 | 63.75 | 128.0 |
56
- | 22500 | 0.3636 | 67.0 | 209.0 | 5.6634 | 25.2495 | 99.012 | 12.396 | 49.75 | 83.5 |
57
- | 25000 | 0.4040 | 63.5 | 192.0 | 5.5561 | 25.2476 | 99.019 | 12.397 | 44.25 | 86.0 |
58
- | 27500 | 0.4444 | 58.0 | 192.0 | 5.4855 | 25.2834 | 98.879 | 12.38 | 40.25 | 70.5 |
59
- | 30000 | 0.4848 | 58.75 | 195.0 | 5.4646 | 25.2547 | 98.992 | 12.394 | 41.75 | 65.0 |
60
- | 32500 | 0.5253 | 58.5 | 171.0 | 5.4511 | 25.2 | 99.206 | 12.421 | 40.0 | 60.0 |
61
- | 35000 | 0.5657 | 57.0 | 165.0 | 5.3711 | 25.2873 | 98.864 | 12.378 | 36.75 | 49.25 |
62
- | 37500 | 0.6061 | 57.75 | 155.0 | 5.3390 | 25.2952 | 98.833 | 12.374 | 37.75 | 54.0 |
63
- | 40000 | 0.6465 | 55.75 | 154.0 | 5.3225 | 25.2919 | 98.846 | 12.376 | 34.5 | 57.0 |
64
- | 42500 | 0.6869 | 54.75 | 146.0 | 5.2939 | 25.2713 | 98.926 | 12.386 | 35.5 | 49.0 |
65
- | 45000 | 0.7273 | 50.75 | 133.0 | 5.1563 | 25.2812 | 98.888 | 12.381 | 30.0 | 48.0 |
66
- | 47500 | 0.7677 | 50.75 | 124.5 | 5.1271 | 25.3128 | 98.764 | 12.365 | 29.375 | 35.25 |
67
- | 50000 | 0.8081 | 49.75 | 123.0 | 5.1093 | 25.2369 | 99.061 | 12.402 | 28.75 | 37.5 |
68
- | 52500 | 0.8485 | 48.5 | 119.5 | 5.0960 | 25.2934 | 98.84 | 12.375 | 28.875 | 34.5 |
69
- | 55000 | 0.8889 | 48.5 | 118.0 | 5.0747 | 25.23 | 99.088 | 12.406 | 28.0 | 33.25 |
70
- | 57500 | 0.9293 | 48.0 | 117.0 | 5.0698 | 25.2235 | 99.114 | 12.409 | 27.75 | 32.0 |
71
- | 60000 | 0.9697 | 48.0 | 117.0 | 5.0651 | 25.2107 | 99.164 | 12.415 | 27.75 | 31.875 |
72
- | 61875 | 1.0 | 48.0 | 117.5 | 5.0643 | 25.1856 | 99.263 | 12.428 | 27.625 | 32.0 |
73
 
74
  # Resource Usage Comparison
75
 
76
  - VRAM Use: 7.7843 GB
77
 
78
- # Distillation (Teacher -> Student) Architecture Difference:
79
 
80
  - **Architecture**: `GPT2LMHeadModel` -> `GPT2LMHeadModel`
81
  - **Total Parameters**: 124,439,808 -> 124,439,808
82
- - **Data Type (dtype)**: torch.bfloat16 -> torch.bfloat16
83
  - **Model Size**: 0.24 GB -> 0.24 GB
84
 
85
  <details>
@@ -122,7 +122,7 @@ The following hyperparameters were used during training:
122
  - num_epochs: `1.0`
123
  - distillation_objective: `DistillationObjective(logits_loss_component=LossComponent(label=logits, weight=1, loss_fn=kl), attn_loss_component=LossComponent(label=attn, weight=10.0, loss_fn=cos, layer_mapper=layer-2))`
124
  - train_embeddings: `True`
125
- - lr_scheduler: `<torch.optim.lr_scheduler.LambdaLR object at 0x7f6f2143c700>`
126
  - student_model_name_or_path: `None`
127
  - student_config_name_or_path: `None`
128
  - student_model_config: `None`
@@ -154,6 +154,6 @@ The following hyperparameters were used during training:
154
 
155
  # Framework Versions
156
  - Distily 0.2.0
157
- - Transformers 4.44.1
158
- - Pytorch 2.5.0.dev20240821+cu121
159
  - Datasets 2.21.0
 
44
  | step | epoch | enwikippl | frwikippl | loss | runtime | samples_per_second | steps_per_second | tinystoriesppl | zhwikippl |
45
  | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: |
46
  | **teacher eval** | | 43.25 | 61.25 | | | | | 11.6875 | 19.125 |
47
+ | 0 | 0 | 2473901162496.0 | 170424302305280.0 | 30.7740 | 30.8629 | 81.003 | 10.142 | 4060086272.0 | 71468255805440.0 |
48
+ | 2500 | 0.0404 | 1176.0 | 11520.0 | 9.8202 | 30.363 | 82.337 | 10.309 | 776.0 | 15040.0 |
49
+ | 5000 | 0.0808 | 412.0 | 2240.0 | 8.3984 | 30.2576 | 82.624 | 10.344 | 290.0 | 438.0 |
50
+ | 7500 | 0.1212 | 245.0 | 908.0 | 7.6616 | 30.2286 | 82.703 | 10.354 | 218.0 | 198.0 |
51
+ | 10000 | 0.1616 | 183.0 | 672.0 | 7.2414 | 30.2767 | 82.572 | 10.338 | 165.0 | 205.0 |
52
+ | 12500 | 0.2020 | 132.0 | 504.0 | 6.6901 | 30.2951 | 82.522 | 10.332 | 114.5 | 155.0 |
53
+ | 15000 | 0.2424 | 113.0 | 434.0 | 6.4131 | 30.208 | 82.76 | 10.362 | 89.5 | 137.0 |
54
+ | 17500 | 0.2828 | 93.0 | 340.0 | 6.1935 | 30.2459 | 82.656 | 10.349 | 71.0 | 130.0 |
55
+ | 20000 | 0.3232 | 74.5 | 280.0 | 5.9346 | 30.3009 | 82.506 | 10.33 | 64.5 | 117.0 |
56
+ | 22500 | 0.3636 | 66.5 | 213.0 | 5.6624 | 30.1305 | 82.972 | 10.388 | 50.75 | 78.0 |
57
+ | 25000 | 0.4040 | 63.5 | 197.0 | 5.5619 | 30.139 | 82.949 | 10.385 | 45.25 | 79.0 |
58
+ | 27500 | 0.4444 | 57.75 | 209.0 | 5.4901 | 30.2173 | 82.734 | 10.358 | 40.5 | 79.0 |
59
+ | 30000 | 0.4848 | 60.25 | 202.0 | 5.4756 | 30.2106 | 82.752 | 10.361 | 42.5 | 67.5 |
60
+ | 32500 | 0.5253 | 58.5 | 171.0 | 5.4479 | 30.148 | 82.924 | 10.382 | 39.5 | 58.75 |
61
+ | 35000 | 0.5657 | 57.75 | 167.0 | 5.3742 | 30.1599 | 82.892 | 10.378 | 37.0 | 50.25 |
62
+ | 37500 | 0.6061 | 58.5 | 156.0 | 5.3426 | 30.1592 | 82.893 | 10.378 | 38.25 | 59.75 |
63
+ | 40000 | 0.6465 | 54.25 | 160.0 | 5.3271 | 30.1125 | 83.022 | 10.394 | 34.25 | 54.5 |
64
+ | 42500 | 0.6869 | 54.5 | 146.0 | 5.2933 | 30.1199 | 83.002 | 10.392 | 35.75 | 56.5 |
65
+ | 45000 | 0.7273 | 51.0 | 132.0 | 5.1589 | 30.1122 | 83.023 | 10.394 | 30.375 | 41.75 |
66
+ | 47500 | 0.7677 | 50.5 | 124.5 | 5.1270 | 30.1744 | 82.852 | 10.373 | 29.5 | 36.25 |
67
+ | 50000 | 0.8081 | 50.0 | 123.0 | 5.1102 | 30.1217 | 82.997 | 10.391 | 29.0 | 38.0 |
68
+ | 52500 | 0.8485 | 48.75 | 119.5 | 5.0968 | 30.2475 | 82.651 | 10.348 | 29.25 | 33.75 |
69
+ | 55000 | 0.8889 | 48.75 | 117.0 | 5.0761 | 30.1367 | 82.955 | 10.386 | 28.625 | 33.75 |
70
+ | 57500 | 0.9293 | 48.0 | 116.5 | 5.0695 | 30.1291 | 82.976 | 10.389 | 28.125 | 31.875 |
71
+ | 60000 | 0.9697 | 48.25 | 116.0 | 5.0657 | 30.2483 | 82.649 | 10.348 | 28.125 | 31.875 |
72
+ | 61875 | 1.0 | 48.0 | 116.5 | 5.0641 | 30.2256 | 82.711 | 10.355 | 28.125 | 32.0 |
73
 
74
  # Resource Usage Comparison
75
 
76
  - VRAM Use: 7.7843 GB
77
 
78
+ `# Distillation (Teacher -> Student) Architecture Difference:
79
 
80
  - **Architecture**: `GPT2LMHeadModel` -> `GPT2LMHeadModel`
81
  - **Total Parameters**: 124,439,808 -> 124,439,808
82
+ - **Data Type (dtype)**: 124439808 -> torch.bfloat16
83
  - **Model Size**: 0.24 GB -> 0.24 GB
84
 
85
  <details>
 
122
  - num_epochs: `1.0`
123
  - distillation_objective: `DistillationObjective(logits_loss_component=LossComponent(label=logits, weight=1, loss_fn=kl), attn_loss_component=LossComponent(label=attn, weight=10.0, loss_fn=cos, layer_mapper=layer-2))`
124
  - train_embeddings: `True`
125
+ - lr_scheduler: `<torch.optim.lr_scheduler.LambdaLR object at 0x7f02802ab190>`
126
  - student_model_name_or_path: `None`
127
  - student_config_name_or_path: `None`
128
  - student_model_config: `None`
 
154
 
155
  # Framework Versions
156
  - Distily 0.2.0
157
+ - Transformers 4.44.0
158
+ - Pytorch 2.3.0
159
  - Datasets 2.21.0
config.json CHANGED
@@ -33,7 +33,7 @@
33
  }
34
  },
35
  "torch_dtype": "bfloat16",
36
- "transformers_version": "4.44.1",
37
  "use_cache": true,
38
  "vocab_size": 50257
39
  }
 
33
  }
34
  },
35
  "torch_dtype": "bfloat16",
36
+ "transformers_version": "4.44.0",
37
  "use_cache": true,
38
  "vocab_size": 50257
39
  }
generation_config.json CHANGED
@@ -2,5 +2,5 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 50256,
4
  "eos_token_id": 50256,
5
- "transformers_version": "4.44.1"
6
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 50256,
4
  "eos_token_id": 50256,
5
+ "transformers_version": "4.44.0"
6
  }
logs/attn_loss_fn=cos, attn_weight=10.0, projector=ensemble/completed.flag ADDED
File without changes
logs/attn_loss_fn=raw_mse, attn_weight=10.0, projector=mlp/events.out.tfevents.1724321123.f383272e719b ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4016ce1b427cdc89d42d3a9e8f3e821ca7793a0fff7c9e822fdc28cd74a56a35
3
+ size 29632520
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1b060c70d391d4d3ce37a1d05c4ce9af13f05d890d6431d90a4ff68ace5087c9
3
  size 248894656
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:20d8621d6d94e804ca31a4e420d3c62c86caa7388f9a1c731d1f8ab62654fb38
3
  size 248894656
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:59f5bc3895d33642020ca8bbc75907081d958c6ed6122a4adfc5204ea2fd68fc
3
  size 1017899144
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:583096a2f5136fc2f40e01ef9082bd0bd038e85cf116331465ccd250dd8d23a8
3
  size 1017899144