robin commited on
Commit
c2d80ca
1 Parent(s): b808ca4

gate_OutEffHop_opt-125m

Browse files
all_results.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"perplexity": 15.706686202168713, "model.decoder.final_layer_norm": 90.96276274729331, "model.decoder.layers.0": 0.4204075758269325, "model.decoder.layers.1": 0.4908598484700949, "model.decoder.layers.2": 0.5028251055120726, "model.decoder.layers.3": 0.691462532509645, "model.decoder.layers.4": 0.8262411430478096, "model.decoder.layers.5": 1.0756556153220116, "model.decoder.layers.6": 1.366795804172793, "model.decoder.layers.7": 1.5691746961852069, "model.decoder.layers.8": 2.1472622930385104, "model.decoder.layers.9": 3.6863469854770314, "model.decoder.layers.10": 6.168255855968336, "model.decoder.layers.11": 7.57078872866513, "model.decoder.layers.0.fc2": 0.1039871887996039, "model.decoder.layers.1.fc2": 0.05608035337503627, "model.decoder.layers.2.fc2": 0.03349900133207189, "model.decoder.layers.3.fc2": 0.06225394718614727, "model.decoder.layers.4.fc2": 0.06453566397655025, "model.decoder.layers.5.fc2": 0.07603821023602894, "model.decoder.layers.6.fc2": 0.11477861616001905, "model.decoder.layers.7.fc2": 0.16301063338992816, "model.decoder.layers.8.fc2": 0.2619496168986678, "model.decoder.layers.9.fc2": 0.414940554808086, "model.decoder.layers.10.fc2": 0.4336998329416942, "model.decoder.layers.11.fc2": 0.28299739546155805, "model.decoder.layers.0.final_layer_norm": 0.45984292787960546, "model.decoder.layers.1.final_layer_norm": 0.4209096071819691, "model.decoder.layers.2.final_layer_norm": 0.5994071354647184, "model.decoder.layers.3.final_layer_norm": 0.46284601323922103, "model.decoder.layers.4.final_layer_norm": 0.5087763514933797, "model.decoder.layers.5.final_layer_norm": 0.5348684589040256, "model.decoder.layers.6.final_layer_norm": 0.5873251914226254, "model.decoder.layers.7.final_layer_norm": 0.655958993094573, "model.decoder.layers.8.final_layer_norm": 0.814051132275788, "model.decoder.layers.9.final_layer_norm": 0.9984980142119612, "model.decoder.layers.10.final_layer_norm": 1.5374041555680895, "model.decoder.layers.11.final_layer_norm": 1.5156749826808207, "model.decoder.layers.0.self_attn.out_proj": 0.2508772022529335, "model.decoder.layers.1.self_attn.out_proj": 0.24341993913267063, "model.decoder.layers.2.self_attn.out_proj": 0.07271888871446504, "model.decoder.layers.3.self_attn.out_proj": 0.1664157649731667, "model.decoder.layers.4.self_attn.out_proj": 0.1435992467264257, "model.decoder.layers.5.self_attn.out_proj": 0.12288297165847165, "model.decoder.layers.6.self_attn.out_proj": 0.20423455739299612, "model.decoder.layers.7.self_attn.out_proj": 0.2692112508296348, "model.decoder.layers.8.self_attn.out_proj": 0.4702520902364016, "model.decoder.layers.9.self_attn.out_proj": 0.7821362368018402, "model.decoder.layers.10.self_attn.out_proj": 1.6762599524511592, "model.decoder.layers.11.self_attn.out_proj": 2.955877756794139, "model.decoder.layers.0.self_attn_layer_norm": 4.502585898500163, "model.decoder.layers.1.self_attn_layer_norm": 4.977919626135461, "model.decoder.layers.2.self_attn_layer_norm": 6.711245270361387, "model.decoder.layers.3.self_attn_layer_norm": 9.585455415352463, "model.decoder.layers.4.self_attn_layer_norm": 7.619464027974392, "model.decoder.layers.5.self_attn_layer_norm": 6.711193338558368, "model.decoder.layers.6.self_attn_layer_norm": 7.448162097836592, "model.decoder.layers.7.self_attn_layer_norm": 6.470245250931973, "model.decoder.layers.8.self_attn_layer_norm": 5.9982524145438045, "model.decoder.layers.9.self_attn_layer_norm": 5.239535203123835, "model.decoder.layers.10.self_attn_layer_norm": 5.41346654508519, "model.decoder.layers.11.self_attn_layer_norm": 6.829138810687183, "max_inf_norm": 90.96276274729331, "max_ffn_inf_norm": 0.4336998329416942, "max_layer_inf_norm": 7.57078872866513, "avg_kurtosis": 11.205696386600962, "max_kurtosis": 111.01246542132782, "max_kurtosis_layers": 31.2915544352952}
checkpoints/checkpoint_125000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:289ecb4897346feccbe75a39ed0b39742316a84bb45976e8cd0c98e415cf72cc
3
+ size 496331808
checkpoints/checkpoint_125000/optimizer.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e08a15572caae4409868d713f8a73b206a090646d407c5adcb12a028b7400dbd
3
+ size 992954750
checkpoints/checkpoint_125000/random_states_0.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a62622f9ba131fe899729c87384413488db7506aa92557f39afda370a66d40dd
3
+ size 14540
checkpoints/checkpoint_125000/scaler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e6ed5313f128b91696fef11f6607d81abc7c1b8bb42c881f65d15ac0585ccc4f
3
+ size 988
checkpoints/checkpoint_125000/scheduler.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d37a131294d0b27a11c8580fc6096736b28a4ddd0f827583794798cd35582761
3
+ size 1064
config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_remove_final_layer_norm": false,
3
+ "activation_function": "relu",
4
+ "architectures": [
5
+ "OPTForCausalLM"
6
+ ],
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 2,
9
+ "do_layer_norm_before": true,
10
+ "dropout": 0.1,
11
+ "enable_bias": true,
12
+ "eos_token_id": 2,
13
+ "ffn_dim": 3072,
14
+ "hidden_size": 768,
15
+ "init_std": 0.006,
16
+ "layer_norm_elementwise_affine": true,
17
+ "layerdrop": 0.0,
18
+ "max_position_embeddings": 512,
19
+ "model_type": "opt",
20
+ "num_attention_heads": 12,
21
+ "num_hidden_layers": 12,
22
+ "pad_token_id": 1,
23
+ "torch_dtype": "float32",
24
+ "transformers_version": "4.31.0",
25
+ "use_cache": true,
26
+ "vocab_size": 50272,
27
+ "word_embed_proj_dim": 768
28
+ }
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 2,
4
+ "eos_token_id": 2,
5
+ "pad_token_id": 1,
6
+ "transformers_version": "4.31.0"
7
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6cefdad9675d141f479cec20bebfaee1eb5e8d5a267cff30fa7bdc5455f12b8a
3
+ size 496440090
special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "</s>",
4
+ "lstrip": false,
5
+ "normalized": true,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": true,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<pad>",
18
+ "lstrip": false,
19
+ "normalized": true,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "</s>",
25
+ "lstrip": false,
26
+ "normalized": true,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_prefix_space": false,
4
+ "bos_token": {
5
+ "__type": "AddedToken",
6
+ "content": "</s>",
7
+ "lstrip": false,
8
+ "normalized": true,
9
+ "rstrip": false,
10
+ "single_word": false
11
+ },
12
+ "clean_up_tokenization_spaces": true,
13
+ "eos_token": {
14
+ "__type": "AddedToken",
15
+ "content": "</s>",
16
+ "lstrip": false,
17
+ "normalized": true,
18
+ "rstrip": false,
19
+ "single_word": false
20
+ },
21
+ "errors": "replace",
22
+ "model_max_length": 1000000000000000019884624838656,
23
+ "pad_token": {
24
+ "__type": "AddedToken",
25
+ "content": "<pad>",
26
+ "lstrip": false,
27
+ "normalized": true,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ },
31
+ "tokenizer_class": "GPT2Tokenizer",
32
+ "unk_token": {
33
+ "__type": "AddedToken",
34
+ "content": "</s>",
35
+ "lstrip": false,
36
+ "normalized": true,
37
+ "rstrip": false,
38
+ "single_word": false
39
+ }
40
+ }
vocab.json ADDED
The diff for this file is too large to render. See raw diff