Yaning1001 commited on
Commit
81dc001
·
verified ·
1 Parent(s): 571943b

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +59 -35
  2. gpt2_no_positional_encoding_model.py +427 -0
  3. hop_surprisal/hop_surprisal.ipynb +223 -0
  4. impossible_llm.yaml +154 -0
  5. impossible_llm_update.yaml +162 -0
  6. requirements.txt +94 -0
  7. requirements_1.txt +51 -0
  8. requirements_2.txt +82 -0
  9. test.py +23 -0
  10. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/artifacts/models--meta-llama--Llama-3.2-3B/refs/main +1 -0
  11. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/artifacts/models--meta-llama--Llama-3.2-3B/snapshots/5cc0ffe09ee49f7be6ca7c794ee6bd7245e84e60/generation_config.json +9 -0
  12. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/artifacts/models--meta-llama--Llama-3.2-3B/snapshots/5cc0ffe09ee49f7be6ca7c794ee6bd7245e84e60/model.safetensors.index.json +261 -0
  13. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1050/tokenizer.json +3 -0
  14. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1050/tokenizer_config.json +2078 -0
  15. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1200/config.json +36 -0
  16. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1200/rng_state_0.pth +3 -0
  17. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1200/rng_state_1.pth +3 -0
  18. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1200/rng_state_2.pth +3 -0
  19. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1200/rng_state_3.pth +3 -0
  20. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1200/rng_state_4.pth +3 -0
  21. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1200/rng_state_5.pth +3 -0
  22. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1200/rng_state_6.pth +3 -0
  23. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1200/scheduler.pt +3 -0
  24. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1200/training_args.bin +3 -0
  25. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1350/config.json +36 -0
  26. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1350/generation_config.json +9 -0
  27. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1350/latest +1 -0
  28. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1350/model.safetensors.index.json +262 -0
  29. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1350/special_tokens_map.json +23 -0
  30. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1350/trainer_state.json +96 -0
  31. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1500/config.json +36 -0
  32. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1500/generation_config.json +9 -0
  33. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1500/latest +1 -0
  34. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1500/model.safetensors.index.json +262 -0
  35. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1500/rng_state_2.pth +3 -0
  36. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1500/rng_state_6.pth +3 -0
  37. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1500/special_tokens_map.json +23 -0
  38. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1500/tokenizer_config.json +2078 -0
  39. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1500/trainer_state.json +103 -0
  40. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1500/zero_to_fp32.py +604 -0
  41. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1650/config.json +36 -0
  42. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1650/generation_config.json +9 -0
  43. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1650/latest +1 -0
  44. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1650/rng_state_3.pth +3 -0
  45. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1650/rng_state_6.pth +3 -0
  46. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1650/scheduler.pt +3 -0
  47. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1650/training_args.bin +3 -0
  48. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1650/zero_to_fp32.py +604 -0
  49. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1800/config.json +36 -0
  50. train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1800/generation_config.json +9 -0
.gitattributes CHANGED
@@ -1,35 +1,59 @@
1
- *.7z filter=lfs diff=lfs merge=lfs -text
2
- *.arrow filter=lfs diff=lfs merge=lfs -text
3
- *.bin filter=lfs diff=lfs merge=lfs -text
4
- *.bz2 filter=lfs diff=lfs merge=lfs -text
5
- *.ckpt filter=lfs diff=lfs merge=lfs -text
6
- *.ftz filter=lfs diff=lfs merge=lfs -text
7
- *.gz filter=lfs diff=lfs merge=lfs -text
8
- *.h5 filter=lfs diff=lfs merge=lfs -text
9
- *.joblib filter=lfs diff=lfs merge=lfs -text
10
- *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
- *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
- *.model filter=lfs diff=lfs merge=lfs -text
13
- *.msgpack filter=lfs diff=lfs merge=lfs -text
14
- *.npy filter=lfs diff=lfs merge=lfs -text
15
- *.npz filter=lfs diff=lfs merge=lfs -text
16
- *.onnx filter=lfs diff=lfs merge=lfs -text
17
- *.ot filter=lfs diff=lfs merge=lfs -text
18
- *.parquet filter=lfs diff=lfs merge=lfs -text
19
- *.pb filter=lfs diff=lfs merge=lfs -text
20
- *.pickle filter=lfs diff=lfs merge=lfs -text
21
- *.pkl filter=lfs diff=lfs merge=lfs -text
22
- *.pt filter=lfs diff=lfs merge=lfs -text
23
- *.pth filter=lfs diff=lfs merge=lfs -text
24
- *.rar filter=lfs diff=lfs merge=lfs -text
25
- *.safetensors filter=lfs diff=lfs merge=lfs -text
26
- saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
- *.tar.* filter=lfs diff=lfs merge=lfs -text
28
- *.tar filter=lfs diff=lfs merge=lfs -text
29
- *.tflite filter=lfs diff=lfs merge=lfs -text
30
- *.tgz filter=lfs diff=lfs merge=lfs -text
31
- *.wasm filter=lfs diff=lfs merge=lfs -text
32
- *.xz filter=lfs diff=lfs merge=lfs -text
33
- *.zip filter=lfs diff=lfs merge=lfs -text
34
- *.zst filter=lfs diff=lfs merge=lfs -text
35
- *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ data/babylm_data/* filter=lfs diff=lfs merge=lfs -text
2
+ data/Perturbed_data/* filter=lfs diff=lfs merge=lfs -text
3
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-2080/rng_state_1.pth filter=lfs diff=lfs merge=lfs -text
4
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1200/rng_state_4.pth filter=lfs diff=lfs merge=lfs -text
5
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1200/training_args.bin filter=lfs diff=lfs merge=lfs -text
6
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1200/rng_state_0.pth filter=lfs diff=lfs merge=lfs -text
7
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-300/scheduler.pt filter=lfs diff=lfs merge=lfs -text
8
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1200/rng_state_5.pth filter=lfs diff=lfs merge=lfs -text
9
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1200/rng_state_1.pth filter=lfs diff=lfs merge=lfs -text
10
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-2080/rng_state_2.pth filter=lfs diff=lfs merge=lfs -text
11
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-2080/rng_state_5.pth filter=lfs diff=lfs merge=lfs -text
12
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-2080/training_args.bin filter=lfs diff=lfs merge=lfs -text
13
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-2080/scheduler.pt filter=lfs diff=lfs merge=lfs -text
14
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-2080/rng_state_4.pth filter=lfs diff=lfs merge=lfs -text
15
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-2080/rng_state_3.pth filter=lfs diff=lfs merge=lfs -text
16
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1650/training_args.bin filter=lfs diff=lfs merge=lfs -text
17
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1650/rng_state_6.pth filter=lfs diff=lfs merge=lfs -text
18
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1650/scheduler.pt filter=lfs diff=lfs merge=lfs -text
19
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1650/rng_state_3.pth filter=lfs diff=lfs merge=lfs -text
20
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1950/scheduler.pt filter=lfs diff=lfs merge=lfs -text
21
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-600/rng_state_0.pth filter=lfs diff=lfs merge=lfs -text
22
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-600/rng_state_5.pth filter=lfs diff=lfs merge=lfs -text
23
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-600/scheduler.pt filter=lfs diff=lfs merge=lfs -text
24
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-600/rng_state_2.pth filter=lfs diff=lfs merge=lfs -text
25
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1050/tokenizer.json filter=lfs diff=lfs merge=lfs -text
26
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-300/rng_state_3.pth filter=lfs diff=lfs merge=lfs -text
27
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-600/rng_state_1.pth filter=lfs diff=lfs merge=lfs -text
28
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-300/rng_state_4.pth filter=lfs diff=lfs merge=lfs -text
29
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-300/training_args.bin filter=lfs diff=lfs merge=lfs -text
30
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-600/training_args.bin filter=lfs diff=lfs merge=lfs -text
31
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-300/rng_state_2.pth filter=lfs diff=lfs merge=lfs -text
32
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-300/rng_state_5.pth filter=lfs diff=lfs merge=lfs -text
33
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-300/rng_state_1.pth filter=lfs diff=lfs merge=lfs -text
34
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1200/rng_state_6.pth filter=lfs diff=lfs merge=lfs -text
35
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1200/scheduler.pt filter=lfs diff=lfs merge=lfs -text
36
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-450/rng_state_2.pth filter=lfs diff=lfs merge=lfs -text
37
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1200/rng_state_3.pth filter=lfs diff=lfs merge=lfs -text
38
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-450/training_args.bin filter=lfs diff=lfs merge=lfs -text
39
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1200/rng_state_2.pth filter=lfs diff=lfs merge=lfs -text
40
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1500/rng_state_2.pth filter=lfs diff=lfs merge=lfs -text
41
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-450/rng_state_4.pth filter=lfs diff=lfs merge=lfs -text
42
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-450/rng_state_5.pth filter=lfs diff=lfs merge=lfs -text
43
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-900/rng_state_0.pth filter=lfs diff=lfs merge=lfs -text
44
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-900/rng_state_4.pth filter=lfs diff=lfs merge=lfs -text
45
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-900/rng_state_6.pth filter=lfs diff=lfs merge=lfs -text
46
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-900/rng_state_1.pth filter=lfs diff=lfs merge=lfs -text
47
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-450/scheduler.pt filter=lfs diff=lfs merge=lfs -text
48
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-900/rng_state_3.pth filter=lfs diff=lfs merge=lfs -text
49
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-450/rng_state_1.pth filter=lfs diff=lfs merge=lfs -text
50
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-900/rng_state_2.pth filter=lfs diff=lfs merge=lfs -text
51
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-450/rng_state_6.pth filter=lfs diff=lfs merge=lfs -text
52
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-900/rng_state_5.pth filter=lfs diff=lfs merge=lfs -text
53
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1800/rng_state_5.pth filter=lfs diff=lfs merge=lfs -text
54
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-900/scheduler.pt filter=lfs diff=lfs merge=lfs -text
55
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1800/training_args.bin filter=lfs diff=lfs merge=lfs -text
56
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1800/rng_state_1.pth filter=lfs diff=lfs merge=lfs -text
57
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1950/rng_state_0.pth filter=lfs diff=lfs merge=lfs -text
58
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-450/rng_state_3.pth filter=lfs diff=lfs merge=lfs -text
59
+ train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1500/rng_state_6.pth filter=lfs diff=lfs merge=lfs -text
gpt2_no_positional_encoding_model.py ADDED
@@ -0,0 +1,427 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # gpt2_no_positional_encoding_model.py
2
+ # Adapted from Huggingface's transformers library
3
+
4
+ import torch
5
+ from transformers.models.gpt2.modeling_gpt2 import GPT2Block, GPT2PreTrainedModel
6
+ from transformers.modeling_outputs import CausalLMOutputWithCrossAttentions, BaseModelOutputWithPastAndCrossAttentions
7
+ from transformers.utils.model_parallel_utils import assert_device_map, get_device_map
8
+ from torch import nn
9
+ from torch.nn import CrossEntropyLoss
10
+ from typing import Optional, Tuple, Union
11
+
12
+ class GPT2NoPositionalEncodingModel(GPT2PreTrainedModel):
13
+ def __init__(self, config):
14
+ super().__init__(config)
15
+
16
+ self.embed_dim = config.hidden_size
17
+
18
+ self.wte = nn.Embedding(config.vocab_size, self.embed_dim)
19
+
20
+ self.drop = nn.Dropout(config.embd_pdrop)
21
+ self.h = nn.ModuleList([GPT2Block(config, layer_idx=i) for i in range(config.num_hidden_layers)])
22
+ self.ln_f = nn.LayerNorm(self.embed_dim, eps=config.layer_norm_epsilon)
23
+
24
+ # Model parallel
25
+ self.model_parallel = False
26
+ self.device_map = None
27
+ self.gradient_checkpointing = False
28
+
29
+ # Initialize weights and apply final processing
30
+ self.post_init()
31
+
32
+ def parallelize(self, device_map=None):
33
+ # Check validity of device_map
34
+ self.device_map = (
35
+ get_device_map(len(self.h), range(torch.cuda.device_count())) if device_map is None else device_map
36
+ )
37
+ assert_device_map(self.device_map, len(self.h))
38
+ self.model_parallel = True
39
+ self.first_device = "cpu" if "cpu" in self.device_map.keys() else "cuda:" + str(min(self.device_map.keys()))
40
+ self.last_device = "cuda:" + str(max(self.device_map.keys()))
41
+ self.wte = self.wte.to(self.first_device)
42
+ # Load onto devices
43
+ for k, v in self.device_map.items():
44
+ for block in v:
45
+ cuda_device = "cuda:" + str(k)
46
+ self.h[block] = self.h[block].to(cuda_device)
47
+ # ln_f to last
48
+ self.ln_f = self.ln_f.to(self.last_device)
49
+
50
+ def deparallelize(self):
51
+ self.model_parallel = False
52
+ self.device_map = None
53
+ self.first_device = "cpu"
54
+ self.last_device = "cpu"
55
+ self.wte = self.wte.to("cpu")
56
+ for index in range(len(self.h)):
57
+ self.h[index] = self.h[index].to("cpu")
58
+ self.ln_f = self.ln_f.to("cpu")
59
+ torch.cuda.empty_cache()
60
+
61
+ def get_input_embeddings(self):
62
+ return self.wte
63
+
64
+ def set_input_embeddings(self, new_embeddings):
65
+ self.wte = new_embeddings
66
+
67
+ def _prune_heads(self, heads_to_prune):
68
+ """
69
+ Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer}
70
+ """
71
+ for layer, heads in heads_to_prune.items():
72
+ self.h[layer].attn.prune_heads(heads)
73
+
74
+ def forward(
75
+ self,
76
+ input_ids: Optional[torch.LongTensor] = None,
77
+ past_key_values: Optional[Tuple[Tuple[torch.Tensor]]] = None,
78
+ attention_mask: Optional[torch.FloatTensor] = None,
79
+ token_type_ids: Optional[torch.LongTensor] = None,
80
+ position_ids: Optional[torch.LongTensor] = None,
81
+ head_mask: Optional[torch.FloatTensor] = None,
82
+ inputs_embeds: Optional[torch.FloatTensor] = None,
83
+ encoder_hidden_states: Optional[torch.Tensor] = None,
84
+ encoder_attention_mask: Optional[torch.FloatTensor] = None,
85
+ use_cache: Optional[bool] = None,
86
+ output_attentions: Optional[bool] = None,
87
+ output_hidden_states: Optional[bool] = None,
88
+ return_dict: Optional[bool] = None,
89
+ ) -> Union[Tuple, BaseModelOutputWithPastAndCrossAttentions]:
90
+ output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
91
+ output_hidden_states = (
92
+ output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
93
+ )
94
+ use_cache = use_cache if use_cache is not None else self.config.use_cache
95
+ return_dict = return_dict if return_dict is not None else self.config.use_return_dict
96
+
97
+ if input_ids is not None and inputs_embeds is not None:
98
+ raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time")
99
+ elif input_ids is not None:
100
+ input_shape = input_ids.size()
101
+ input_ids = input_ids.view(-1, input_shape[-1])
102
+ batch_size = input_ids.shape[0]
103
+ elif inputs_embeds is not None:
104
+ input_shape = inputs_embeds.size()[:-1]
105
+ batch_size = inputs_embeds.shape[0]
106
+ else:
107
+ raise ValueError("You have to specify either input_ids or inputs_embeds")
108
+
109
+ device = input_ids.device if input_ids is not None else inputs_embeds.device
110
+
111
+ if token_type_ids is not None:
112
+ token_type_ids = token_type_ids.view(-1, input_shape[-1])
113
+
114
+ if past_key_values is None:
115
+ past_length = 0
116
+ past_key_values = tuple([None] * len(self.h))
117
+ else:
118
+ past_length = past_key_values[0][0].size(-2)
119
+ if position_ids is None:
120
+ position_ids = torch.arange(past_length, input_shape[-1] + past_length, dtype=torch.long, device=device)
121
+ position_ids = position_ids.unsqueeze(0)
122
+
123
+ # GPT2Attention mask.
124
+ if attention_mask is not None:
125
+ if batch_size <= 0:
126
+ raise ValueError("batch_size has to be defined and > 0")
127
+ attention_mask = attention_mask.view(batch_size, -1)
128
+ # We create a 3D attention mask from a 2D tensor mask.
129
+ # Sizes are [batch_size, 1, 1, to_seq_length]
130
+ # So we can broadcast to [batch_size, num_heads, from_seq_length, to_seq_length]
131
+ # this attention mask is more simple than the triangular masking of causal attention
132
+ # used in OpenAI GPT, we just need to prepare the broadcast dimension here.
133
+ attention_mask = attention_mask[:, None, None, :]
134
+
135
+ # Since attention_mask is 1.0 for positions we want to attend and 0.0 for
136
+ # masked positions, this operation will create a tensor which is 0.0 for
137
+ # positions we want to attend and the dtype's smallest value for masked positions.
138
+ # Since we are adding it to the raw scores before the softmax, this is
139
+ # effectively the same as removing these entirely.
140
+ attention_mask = attention_mask.to(dtype=self.dtype) # fp16 compatibility
141
+ attention_mask = (1.0 - attention_mask) * torch.finfo(self.dtype).min
142
+
143
+ # If a 2D or 3D attention mask is provided for the cross-attention
144
+ # we need to make broadcastable to [batch_size, num_heads, seq_length, seq_length]
145
+ if self.config.add_cross_attention and encoder_hidden_states is not None:
146
+ encoder_batch_size, encoder_sequence_length, _ = encoder_hidden_states.size()
147
+ encoder_hidden_shape = (encoder_batch_size, encoder_sequence_length)
148
+ if encoder_attention_mask is None:
149
+ encoder_attention_mask = torch.ones(encoder_hidden_shape, device=device)
150
+ encoder_attention_mask = self.invert_attention_mask(encoder_attention_mask)
151
+ else:
152
+ encoder_attention_mask = None
153
+
154
+ # Prepare head mask if needed
155
+ # 1.0 in head_mask indicate we keep the head
156
+ # attention_probs has shape bsz x n_heads x N x N
157
+ # head_mask has shape n_layer x batch x n_heads x N x N
158
+ head_mask = self.get_head_mask(head_mask, self.config.n_layer)
159
+
160
+ if inputs_embeds is None:
161
+ inputs_embeds = self.wte(input_ids)
162
+ hidden_states = inputs_embeds
163
+
164
+ if token_type_ids is not None:
165
+ token_type_embeds = self.wte(token_type_ids)
166
+ hidden_states = hidden_states + token_type_embeds
167
+
168
+ hidden_states = self.drop(hidden_states)
169
+
170
+ output_shape = (-1,) + input_shape[1:] + (hidden_states.size(-1),)
171
+
172
+ if self.gradient_checkpointing and self.training:
173
+ if use_cache:
174
+ use_cache = False
175
+
176
+ presents = () if use_cache else None
177
+ all_self_attentions = () if output_attentions else None
178
+ all_cross_attentions = () if output_attentions and self.config.add_cross_attention else None
179
+ all_hidden_states = () if output_hidden_states else None
180
+ for i, (block, layer_past) in enumerate(zip(self.h, past_key_values)):
181
+ # Model parallel
182
+ if self.model_parallel:
183
+ torch.cuda.set_device(hidden_states.device)
184
+ # Ensure layer_past is on same device as hidden_states (might not be correct)
185
+ if layer_past is not None:
186
+ layer_past = tuple(past_state.to(hidden_states.device) for past_state in layer_past)
187
+ # Ensure that attention_mask is always on the same device as hidden_states
188
+ if attention_mask is not None:
189
+ attention_mask = attention_mask.to(hidden_states.device)
190
+ if isinstance(head_mask, torch.Tensor):
191
+ head_mask = head_mask.to(hidden_states.device)
192
+ if output_hidden_states:
193
+ all_hidden_states = all_hidden_states + (hidden_states,)
194
+
195
+ if self.gradient_checkpointing and self.training:
196
+ outputs = self._gradient_checkpointing_func(
197
+ block.__call__,
198
+ hidden_states,
199
+ None,
200
+ attention_mask,
201
+ head_mask[i],
202
+ encoder_hidden_states,
203
+ encoder_attention_mask,
204
+ use_cache,
205
+ output_attentions,
206
+ )
207
+ else:
208
+ outputs = block(
209
+ hidden_states,
210
+ layer_past=layer_past,
211
+ attention_mask=attention_mask,
212
+ head_mask=head_mask[i],
213
+ encoder_hidden_states=encoder_hidden_states,
214
+ encoder_attention_mask=encoder_attention_mask,
215
+ use_cache=use_cache,
216
+ output_attentions=output_attentions,
217
+ )
218
+
219
+ hidden_states = outputs[0]
220
+ if use_cache is True:
221
+ presents = presents + (outputs[1],)
222
+
223
+ if output_attentions:
224
+ all_self_attentions = all_self_attentions + (outputs[2 if use_cache else 1],)
225
+ if self.config.add_cross_attention:
226
+ all_cross_attentions = all_cross_attentions + (outputs[3 if use_cache else 2],)
227
+
228
+ # Model Parallel: If it's the last layer for that device, put things on the next device
229
+ if self.model_parallel:
230
+ for k, v in self.device_map.items():
231
+ if i == v[-1] and "cuda:" + str(k) != self.last_device:
232
+ hidden_states = hidden_states.to("cuda:" + str(k + 1))
233
+
234
+ hidden_states = self.ln_f(hidden_states)
235
+
236
+ hidden_states = hidden_states.view(output_shape)
237
+ # Add last hidden state
238
+ if output_hidden_states:
239
+ all_hidden_states = all_hidden_states + (hidden_states,)
240
+
241
+ if not return_dict:
242
+ return tuple(
243
+ v
244
+ for v in [hidden_states, presents, all_hidden_states, all_self_attentions, all_cross_attentions]
245
+ if v is not None
246
+ )
247
+
248
+ return BaseModelOutputWithPastAndCrossAttentions(
249
+ last_hidden_state=hidden_states,
250
+ past_key_values=presents,
251
+ hidden_states=all_hidden_states,
252
+ attentions=all_self_attentions,
253
+ cross_attentions=all_cross_attentions,
254
+ )
255
+
256
+ class GPT2NoPositionalEncodingLMHeadModel(GPT2PreTrainedModel):
257
+ _tied_weights_keys = ["lm_head.weight"]
258
+
259
+ def __init__(self, config):
260
+ super().__init__(config)
261
+ self.transformer = GPT2NoPositionalEncodingModel(config)
262
+ self.lm_head = nn.Linear(config.n_embd, config.vocab_size, bias=False)
263
+
264
+ # Model parallel
265
+ self.model_parallel = False
266
+ self.device_map = None
267
+
268
+ # Initialize weights and apply final processing
269
+ self.post_init()
270
+
271
+ def parallelize(self, device_map=None):
272
+ self.device_map = (
273
+ get_device_map(len(self.transformer.h), range(torch.cuda.device_count()))
274
+ if device_map is None
275
+ else device_map
276
+ )
277
+ assert_device_map(self.device_map, len(self.transformer.h))
278
+ self.transformer.parallelize(self.device_map)
279
+ self.lm_head = self.lm_head.to(self.transformer.first_device)
280
+ self.model_parallel = True
281
+
282
+ def deparallelize(self):
283
+ self.transformer.deparallelize()
284
+ self.transformer = self.transformer.to("cpu")
285
+ self.lm_head = self.lm_head.to("cpu")
286
+ self.model_parallel = False
287
+ torch.cuda.empty_cache()
288
+
289
+ def get_output_embeddings(self):
290
+ return self.lm_head
291
+
292
+ def set_output_embeddings(self, new_embeddings):
293
+ self.lm_head = new_embeddings
294
+
295
+ def prepare_inputs_for_generation(self, input_ids, past_key_values=None, inputs_embeds=None, **kwargs):
296
+ token_type_ids = kwargs.get("token_type_ids", None)
297
+ # Omit tokens covered by past_key_values
298
+ if past_key_values:
299
+ past_length = past_key_values[0][0].shape[2]
300
+
301
+ # Some generation methods already pass only the last input ID
302
+ if input_ids.shape[1] > past_length:
303
+ remove_prefix_length = past_length
304
+ else:
305
+ # Default to old behavior: keep only final ID
306
+ remove_prefix_length = input_ids.shape[1] - 1
307
+
308
+ input_ids = input_ids[:, remove_prefix_length:]
309
+ if token_type_ids is not None:
310
+ token_type_ids = token_type_ids[:, -input_ids.shape[1] :]
311
+
312
+ attention_mask = kwargs.get("attention_mask", None)
313
+ position_ids = kwargs.get("position_ids", None)
314
+
315
+ if attention_mask is not None and position_ids is None:
316
+ # create position_ids on the fly for batch generation
317
+ position_ids = attention_mask.long().cumsum(-1) - 1
318
+ position_ids.masked_fill_(attention_mask == 0, 1)
319
+ if past_key_values:
320
+ position_ids = position_ids[:, -input_ids.shape[1] :]
321
+ else:
322
+ position_ids = None
323
+
324
+ # if `inputs_embeds` are passed, we only want to use them in the 1st generation step
325
+ if inputs_embeds is not None and past_key_values is None:
326
+ model_inputs = {"inputs_embeds": inputs_embeds}
327
+ else:
328
+ model_inputs = {"input_ids": input_ids}
329
+
330
+ model_inputs.update(
331
+ {
332
+ "past_key_values": past_key_values,
333
+ "use_cache": kwargs.get("use_cache"),
334
+ "position_ids": position_ids,
335
+ "attention_mask": attention_mask,
336
+ "token_type_ids": token_type_ids,
337
+ }
338
+ )
339
+
340
+ return model_inputs
341
+
342
+ def forward(
343
+ self,
344
+ input_ids: Optional[torch.LongTensor] = None,
345
+ past_key_values: Optional[Tuple[Tuple[torch.Tensor]]] = None,
346
+ attention_mask: Optional[torch.FloatTensor] = None,
347
+ token_type_ids: Optional[torch.LongTensor] = None,
348
+ position_ids: Optional[torch.LongTensor] = None,
349
+ head_mask: Optional[torch.FloatTensor] = None,
350
+ inputs_embeds: Optional[torch.FloatTensor] = None,
351
+ encoder_hidden_states: Optional[torch.Tensor] = None,
352
+ encoder_attention_mask: Optional[torch.FloatTensor] = None,
353
+ labels: Optional[torch.LongTensor] = None,
354
+ use_cache: Optional[bool] = None,
355
+ output_attentions: Optional[bool] = None,
356
+ output_hidden_states: Optional[bool] = None,
357
+ return_dict: Optional[bool] = None,
358
+ ) -> Union[Tuple, CausalLMOutputWithCrossAttentions]:
359
+ r"""
360
+ labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
361
+ Labels for language modeling. Note that the labels **are shifted** inside the model, i.e. you can set
362
+ `labels = input_ids` Indices are selected in `[-100, 0, ..., config.vocab_size]` All labels set to `-100`
363
+ are ignored (masked), the loss is only computed for labels in `[0, ..., config.vocab_size]`
364
+ """
365
+ return_dict = return_dict if return_dict is not None else self.config.use_return_dict
366
+
367
+ transformer_outputs = self.transformer(
368
+ input_ids,
369
+ past_key_values=past_key_values,
370
+ attention_mask=attention_mask,
371
+ token_type_ids=token_type_ids,
372
+ position_ids=position_ids,
373
+ head_mask=head_mask,
374
+ inputs_embeds=inputs_embeds,
375
+ encoder_hidden_states=encoder_hidden_states,
376
+ encoder_attention_mask=encoder_attention_mask,
377
+ use_cache=use_cache,
378
+ output_attentions=output_attentions,
379
+ output_hidden_states=output_hidden_states,
380
+ return_dict=return_dict,
381
+ )
382
+ hidden_states = transformer_outputs[0]
383
+
384
+ # Set device for model parallelism
385
+ if self.model_parallel:
386
+ torch.cuda.set_device(self.transformer.first_device)
387
+ hidden_states = hidden_states.to(self.lm_head.weight.device)
388
+
389
+ lm_logits = self.lm_head(hidden_states)
390
+
391
+ loss = None
392
+ if labels is not None:
393
+ # move labels to correct device to enable model parallelism
394
+ labels = labels.to(lm_logits.device)
395
+ # Shift so that tokens < n predict n
396
+ shift_logits = lm_logits[..., :-1, :].contiguous()
397
+ shift_labels = labels[..., 1:].contiguous()
398
+ # Flatten the tokens
399
+ loss_fct = CrossEntropyLoss()
400
+ loss = loss_fct(shift_logits.view(-1, shift_logits.size(-1)), shift_labels.view(-1))
401
+
402
+ if not return_dict:
403
+ output = (lm_logits,) + transformer_outputs[1:]
404
+ return ((loss,) + output) if loss is not None else output
405
+
406
+ return CausalLMOutputWithCrossAttentions(
407
+ loss=loss,
408
+ logits=lm_logits,
409
+ past_key_values=transformer_outputs.past_key_values,
410
+ hidden_states=transformer_outputs.hidden_states,
411
+ attentions=transformer_outputs.attentions,
412
+ cross_attentions=transformer_outputs.cross_attentions,
413
+ )
414
+
415
+ @staticmethod
416
+ def _reorder_cache(
417
+ past_key_values: Tuple[Tuple[torch.Tensor]], beam_idx: torch.Tensor
418
+ ) -> Tuple[Tuple[torch.Tensor]]:
419
+ """
420
+ This function is used to re-order the `past_key_values` cache if [`~PreTrainedModel.beam_search`] or
421
+ [`~PreTrainedModel.beam_sample`] is called. This is required to match `past_key_values` with the correct
422
+ beam_idx at every generation step.
423
+ """
424
+ return tuple(
425
+ tuple(past_state.index_select(0, beam_idx.to(past_state.device)) for past_state in layer_past)
426
+ for layer_past in past_key_values
427
+ )
hop_surprisal/hop_surprisal.ipynb ADDED
@@ -0,0 +1,223 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 1,
6
+ "metadata": {},
7
+ "outputs": [],
8
+ "source": [
9
+ "# For importing utils\n",
10
+ "import sys\n",
11
+ "sys.path.append(\"..\")"
12
+ ]
13
+ },
14
+ {
15
+ "cell_type": "code",
16
+ "execution_count": 2,
17
+ "metadata": {},
18
+ "outputs": [
19
+ {
20
+ "name": "stderr",
21
+ "output_type": "stream",
22
+ "text": [
23
+ "/nlp/scr/kallini/miniconda3/envs/llmenv/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
24
+ " from .autonotebook import tqdm as notebook_tqdm\n"
25
+ ]
26
+ }
27
+ ],
28
+ "source": [
29
+ "import numpy as np\n",
30
+ "import pandas as pd\n",
31
+ "import matplotlib.pyplot as plt\n",
32
+ "from matplotlib.patches import Patch\n",
33
+ "from scipy import stats\n",
34
+ "from utils import PERTURBATIONS"
35
+ ]
36
+ },
37
+ {
38
+ "cell_type": "code",
39
+ "execution_count": 3,
40
+ "metadata": {},
41
+ "outputs": [],
42
+ "source": [
43
+ "def get_surprisal_differences(perturbation, seed, ckpt, pos_encodings=True):\n",
44
+ "\n",
45
+ " # Load surprisal DataFrame\n",
46
+ " surprisals_path = \"hop_surprisal_results/{}_100M{}/randinit_seed{}.csv\"\n",
47
+ " nps = \"\" if pos_encodings else \"_no_positional_encodings\"\n",
48
+ " surprisal_df = pd.read_csv(surprisals_path.format(perturbation, nps, seed))\n",
49
+ " \n",
50
+ " # Get summary stats for suprisal differences\n",
51
+ " marker_token_surprisals = surprisal_df[f\"Marker Token Surprisals (ckpt {ckpt})\"]\n",
52
+ " nomarker_token_surprisals = surprisal_df[f\"No Marker Token Surprisals (ckpt {ckpt})\"]\n",
53
+ " differences = nomarker_token_surprisals - marker_token_surprisals\n",
54
+ " avg_differences = differences.mean()\n",
55
+ "\n",
56
+ " return avg_differences\n",
57
+ "\n",
58
+ "def get_summary_stats(l):\n",
59
+ " # Calculate confidence interval using t-distribution\n",
60
+ " mean = np.mean(l)\n",
61
+ " sem = stats.sem(l)\n",
62
+ " ci_lower, ci_upper = stats.t.interval(0.95, df=len(l)-1, loc=mean, scale=sem)\n",
63
+ " return mean, (ci_upper - ci_lower) / 2\n",
64
+ " "
65
+ ]
66
+ },
67
+ {
68
+ "cell_type": "code",
69
+ "execution_count": 4,
70
+ "metadata": {},
71
+ "outputs": [],
72
+ "source": [
73
+ "def plot_surprisal_differences(ax, seeds, ckpt, colors, hatches, pos_encodings):\n",
74
+ "\n",
75
+ " hop_control_seeds = []\n",
76
+ " hop_tokens4_seeds = []\n",
77
+ " hop_words4_seeds = []\n",
78
+ " for seed in seeds:\n",
79
+ " # Get summary stats for each hop model\n",
80
+ " avg_hop_control = get_surprisal_differences(\"hop_control\", seed, ckpt, pos_encodings)\n",
81
+ " avg_hop_tokens4 = get_surprisal_differences(\"hop_tokens4\", seed, ckpt, pos_encodings)\n",
82
+ " avg_hop_words4 = get_surprisal_differences(\"hop_words4\", seed, ckpt, pos_encodings)\n",
83
+ "\n",
84
+ " # Append results\n",
85
+ " hop_control_seeds.append(avg_hop_control)\n",
86
+ " hop_tokens4_seeds.append(avg_hop_tokens4)\n",
87
+ " hop_words4_seeds.append(avg_hop_words4)\n",
88
+ "\n",
89
+ " if len(seeds) > 1:\n",
90
+ " # Prepare data to plot\n",
91
+ " summary_stats = [\n",
92
+ " get_summary_stats(hop_control_seeds),\n",
93
+ " get_summary_stats(hop_tokens4_seeds),\n",
94
+ " get_summary_stats(hop_words4_seeds)\n",
95
+ " ]\n",
96
+ " else:\n",
97
+ " summary_stats = [\n",
98
+ " (hop_control_seeds[0], 0),\n",
99
+ " (hop_tokens4_seeds[0], 0),\n",
100
+ " (hop_words4_seeds[0], 0),\n",
101
+ " ]\n",
102
+ "\n",
103
+ " x = np.arange(3) # label locations\n",
104
+ " width = 0.8 # width of the bars\n",
105
+ "\n",
106
+ " # Iterate over pos / no pos groups\n",
107
+ " for i, (avg, err) in enumerate(summary_stats):\n",
108
+ " # Iterate over models and plot bars\n",
109
+ " color = colors[i]\n",
110
+ " hatch = hatches[i]\n",
111
+ " ax.bar(x[i], avg, width, yerr=err, label=None,\n",
112
+ " color=color, hatch=hatch, edgecolor=\"w\", zorder=2)\n",
113
+ "\n",
114
+ " ax.set_xticks([])\n",
115
+ " ax.grid(zorder=0, color=\"lightgray\")\n",
116
+ " ax.set_title(f\"{ckpt} Steps\")\n",
117
+ "\n",
118
+ "\n",
119
+ "def plot_surprisal_differences_checkpoints(seeds, checkpoints, pos_encodings=True):\n",
120
+ "\n",
121
+ " # Colors patterns for bars\n",
122
+ " color1=PERTURBATIONS[\"hop_control\"][\"color\"]\n",
123
+ " color2=PERTURBATIONS[\"hop_tokens4\"][\"color\"]\n",
124
+ " color3=PERTURBATIONS[\"hop_words4\"][\"color\"]\n",
125
+ " colors = [color1, color2, color3]\n",
126
+ "\n",
127
+ " hatch1 = ''\n",
128
+ " hatch2 = '///'\n",
129
+ " hatch3 = '..'\n",
130
+ " hatches = [hatch1, hatch2, hatch3]\n",
131
+ "\n",
132
+ " # Create a figure with multiple subplots\n",
133
+ " fig, axs = plt.subplots(2, 3, figsize=(6, 4), sharey=True)\n",
134
+ " axes_flat = axs.flatten()\n",
135
+ "\n",
136
+ " # Call individual plot function with different parameters for each subplot\n",
137
+ " for i, checkpoint in enumerate(checkpoints):\n",
138
+ " plot_surprisal_differences(\n",
139
+ " axes_flat[i], seeds, checkpoint, colors, hatches, pos_encodings)\n",
140
+ "\n",
141
+ " legend_elements = [Patch(facecolor=color1, hatch=hatch1,\n",
142
+ " edgecolor=\"w\", label='NoHop'),\n",
143
+ " Patch(facecolor=color2, hatch=hatch2,\n",
144
+ " edgecolor=\"w\", label='TokenHop'),\n",
145
+ " Patch(facecolor=color3, hatch=hatch3,\n",
146
+ " edgecolor=\"w\", label='WordHop')]\n",
147
+ " fig.legend(handles=legend_elements, ncol=3, loc=\"center\",\n",
148
+ " bbox_to_anchor=(0.55, 0), frameon=False)\n",
149
+ " \n",
150
+ " fig.supylabel(\"Surprisal Difference\", fontsize=12, x=0.04)\n",
151
+ "\n",
152
+ " # Adjust layout and show plot\n",
153
+ " plt.tight_layout()"
154
+ ]
155
+ },
156
+ {
157
+ "cell_type": "code",
158
+ "execution_count": 5,
159
+ "metadata": {},
160
+ "outputs": [
161
+ {
162
+ "data": {
163
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkUAAAGhCAYAAABvQ8DIAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAA9hAAAPYQGoP6dpAAB/DUlEQVR4nO3dd1hT59sH8O9JgLCXbAVliaMOpG4FcSHuXfeordZaq/VVq60DV13Vqq2L1p9oa62r1Vr33ntWRRTFjVu2MpLn/SMlGjNITgKcJPfnunJJznlynufInfDNmRxjjIEQQgghxMKJSnsAhBBCCCFCQKGIEEIIIQQUigghhBBCAFAoIoQQQggBQKGIEEIIIQQAhSJCCCGEEAAUigghhBBCAFAoIoQQQggBQKGIEEIIIQQAhSJCCCGEEAAUigTl6tWr6NatG4KCgmBvbw8PDw9ERkZi69atatsnJiaiVatWcHR0hLu7O/r27Ytnz56ptJPJZJgzZw4CAwNha2uL6tWrY+3atTqP6+jRo4iNjUXZsmVha2uLgIAAtGvXDr///ruiTU5ODuLi4nDw4EG915uYn/Pnz6N9+/Zwd3eHvb09PvjgAyxatEil3fHjx9GoUSPY29vDx8cHX375JbKyslTa5ebm4uuvv4afnx/s7OxQt25d7NmzR+fxbN26FVFRUfDy8oK9vT2CgoLQvXt37Ny5U9Hm0aNHiIuLw8WLF3mtMzEP586dQ6tWreDs7AwnJye0bNlSY01Q/ZohRgRj27ZtLCYmhsXFxbH4+Hi2YMEC1rhxYwaALV++XKnt/fv3mYeHBwsODmYLFy5kM2bMYG5ubqxGjRosNzdXqe24ceMYAPbpp5+y+Ph41qZNGwaArV27tsgxrV+/nnEcx8LDw9ns2bNZfHw8Gz9+PGvYsCFr0qSJot2zZ88YADZ58mSj/F8Q07Vr1y5mY2PD6taty+bPn8/i4+PZ119/zcaMGaPU7sKFC8zW1paFh4ezpUuXsm+//ZZJJBLWqlUrlWX26NGDWVlZsdGjR7Ply5ez+vXrMysrK3bkyJEixzN37lwGgEVFRbH58+ezZcuWsdGjR7OaNWuy/v37K9qdOXOGAWArV6409L+AmKhz584xW1tbFhoayr7//ns2Z84cVqFCBebs7MyuX7+u1Jbq1zxRKBK4goICVqNGDRYWFqY0fejQoczOzo7dvXtXMW3Pnj0qAerBgwfM2tqaDRs2TDFNJpOxxo0bs3LlyrGCggKt/VepUoVVrVpVJWgxxtiTJ08UP1MoIowxlp6ezry9vVmnTp2YVCrV2jY2Npb5+vqy9PR0xbSff/6ZAWC7du1STDt16hQDwObOnauY9vr1axYcHMzq16+vtY/8/Hzm7OzMWrRooXb+uzVMf1RI69atmZubG3v+/Lli2qNHj5ijoyPr3LmzUluqX/NEocgEtG3blnl7eytN8/LyYt26dVNpW7FiRdasWTPF88WLFzMA7OrVq0rtfv/9dwagyG8qEomEDRgwQGublJQUBkDl8W5ASkxMZF26dGFubm5MIpGwiIgItmXLFqXlrFy5kgFghw4dYoMHD2bu7u7MycmJ9e3bl718+VKp7ZkzZ1jLli1ZmTJlmK2tLatQoQIbOHCg1nGS4rd06VIGgF27do0xxlhWVpbacJSens6srKxUth7l5uYyR0dHNmjQIMW0MWPGMLFYrPTHhzHGvvvuOwaA3bt3T+N4UlNTGQAWFxenddwHDhxQW8Pv/oE5efIki4mJYc7OzszOzo5FRkayo0ePKi1n8uTJDABLTExk3bp1Y05OTszd3Z19+eWX7PXr10ptd+/ezRo2bMhcXFyYg4MDq1ixIhs/frzWcZLi5eTkpPZztU2bNszGxoZlZmYyxqh+GTPf+qVjigQoOzsbz58/x61bt/DDDz9gx44daNasmWL+w4cP8fTpU3z44Ycqr61Tpw4uXLigeH7hwgU4ODigcuXKKu0K52tTvnx57Nu3Dw8ePNDYxtPTE0uXLgUAdOrUCb/++it+/fVXdO7cGYD8WKl69eohMTER48aNw7x58+Dg4ICOHTvir7/+UlneF198gcTERMTFxaFfv35Ys2YNOnbsCMYYAODp06do2bIl7ty5g3HjxuHHH39E7969cfLkSa3rQorf3r174ezsjIcPHyIsLAyOjo5wdnbG0KFD8ebNG0W7f//9FwUFBSo1bGNjg5o1a6rUcMWKFeHs7KzUtrCGtR1D4eXlBTs7O2zduhUvX77U2K5y5cqYOnUqAGDw4MGKGo6MjAQA7N+/H5GRkcjIyMDkyZPx3XffIS0tDU2bNsXp06dVlte9e3e8efMGM2fOROvWrbFo0SIMHjxYMf/q1ato27YtcnNzMXXqVMybNw/t27fHsWPHNI6RFL/c3FzY2dmpTLe3t0deXh6uXLkCgOrXrOu3tFMZUTVkyBBF0heJRKxr165KW0oKN5OuXr1a5bVjxoxhANibN28YY/JvOEFBQSrtsrOzGQA2btw4rWNZsWIFA8BsbGxYdHQ0mzhxIjty5IjKt39tu8+aNWvGqlWrphgTY/JdeA0aNGChoaGKaYVbiiIiIlheXp5i+pw5cxgAxZalv/76iwFgZ86c0Tp2UvKqV6/O7O3tmb29PRs+fDjbtGkTGz58OAPAevTooWi3YcMGBoAdPnxYZRndunVjPj4+iudVq1ZlTZs2VWl39epVBoAtW7ZM65gmTZrEADAHBwcWGxvLZsyYwc6dO6fSTtPuB5lMxkJDQ1lMTAyTyWSK6Tk5OSwwMFBp10bhN+327dsrLePzzz9nANilS5cYY4z98MMPDAB79uyZ1rGTklWtWjVWsWJFpcMKcnNzWUBAAAPANm7cyBij+jXn+qUtRQI0cuRI7NmzB6tWrUJsbCykUiny8vIU81+/fg0AkEgkKq+1tbVVavP69Wud2mny8ccfY+fOnWjSpAmOHj2KadOmoXHjxggNDcXx48eLXJeXL19i//796N69OzIzM/H8+XM8f/4cL168QExMDG7evImHDx8qvWbw4MGwtrZWPB86dCisrKywfft2AICrqysA4J9//kF+fn6RYyAlJysrCzk5OejXrx8WLVqEzp07Y9GiRRgyZAj++OMP3Lx5E0DRNfxuXRpaw1OmTMHvv/+O8PBw7Nq1C99++y0iIiJQq1YtJCYmFrlOFy9exM2bN9GrVy+8ePFCUcPZ2dlo1qwZDh8+DJlMpvSaYcOGKT0fPnw4AKjU8JYtW1ReS0rP559/jhs3bmDQoEG4du0arly5gn79+iE1NRWA8ucqQPVrjvVLoUiAKlWqhObNm6Nfv374559/kJWVhXbt2il2HxVu3s3NzVV5beEuisI2dnZ2OrXTJiYmBrt27UJaWhoOHz6MYcOG4e7du2jbti2ePn2q9bXJyclgjGHixInw9PRUekyePBkAVJYRGhqq9NzR0RG+vr64c+cOACAqKgpdunTBlClT4OHhgQ4dOmDlypVq15OUrMJ66tmzp9L0Xr16AQBOnDih1E5Tbb5bl8ao4Z49e+LIkSN49eoVdu/ejV69euHChQto166d0m49dQqDXP/+/VVq+JdffkFubi7S09OVXvN+DQcHB0MkEilq+KOPPkLDhg3xySefwNvbGz169MD69evN7g+Mqfnss8/wzTff4Pfff0fVqlVRrVo13Lp1C2PHjgUg/ywCqH7NuX6tSnsApGhdu3bFkCFDcOPGDYSFhcHX1xcAFN9e3pWamgp3d3fFNxNfX18cOHAAjDFwHKfUDgD8/Px0Hoe9vT0aN26Mxo0bw8PDA1OmTMGOHTvQv39/ja8pfJOMHj0aMTExatuEhIToPAYA4DgOGzduxMmTJ7F161bs2rULH3/8MebNm4eTJ08qPrhIyfPz88PVq1fh7e2tNN3LywsA8OrVKwAosobfrUtfX1+VrYnvvlafGnZ2dkaLFi3QokULWFtbY9WqVTh16hSioqI0vqawhufOnYuaNWuqbVNUzb373gPkfwgPHz6MAwcOYNu2bdi5cyfWrVuHpk2bYvfu3RCLxTqvEzGuGTNmYPTo0bh69SpcXFxQrVo1fPPNNwCAihUrAqD6Nef6pS1FJqBw82phmi9btiw8PT1x9uxZlbanT59WKvyaNWsiJydHZTPrqVOnFPP5KDzAsPCN/f6bplBQUBAAwNraGs2bN1f7cHJyUnpN4TebQllZWUhNTUWFChWUpterVw8zZszA2bNnsWbNGly9ehV//PEHr/UhxhEREQEAKn8EHj16BEB+UD4AfPDBB7CyslKp4by8PFy8eFGlhm/cuIGMjAyltiVVw8HBwQDkf5A01fC7u3sB1RpOTk6GTCZTqmGRSIRmzZph/vz5uHbtGmbMmIH9+/fjwIEDvNaHGI+bmxsaNWqEatWqAZCfQFCuXDlUqlQJANUvYL71S6FIQNTtisrPz8fq1athZ2eHKlWqKKZ36dIF//zzD+7fv6+Ytm/fPty4cQPdunVTTOvQoQOsra2xZMkSxTTGGJYtW4ayZcuiQYMGWse0b98+tdML9y2HhYUBkG9FAoC0tDSldl5eXmjSpAmWL1+u9luVuitwx8fHKx0rtHTpUhQUFCA2NhaAfGtD4a7EQoUfLLQLrXR1794dALBixQql6b/88gusrKzQpEkTAICLiwuaN2+O3377DZmZmYp2v/76K7KyspRquGvXrpBKpYiPj1dMy83NxcqVK1G3bl34+/trHE9OTo5il937duzYAeBtDTs4OABQreGIiAgEBwfj+++/V3u1YnU1vHjxYqXnP/74IwAoaljdmURUw8K0bt06nDlzBiNHjoRIJP+TSfVrvvVLu88EZMiQIcjIyEBkZCTKli2Lx48fY82aNbh+/TrmzZuntInzm2++wYYNGxAdHY0RI0YgKysLc+fORbVq1TBw4EBFu3LlymHkyJGYO3cu8vPzUbt2bWzevBlHjhzBmjVritzM2aFDBwQGBqJdu3YIDg5GdnY29u7di61bt6J27dpo164dAChC27p161CxYkW4u7vjgw8+wAcffIDFixcrvnV9+umnCAoKwpMnT3DixAk8ePAAly5dUuozLy8PzZo1Q/fu3ZGUlIQlS5agUaNGaN++PQBg1apVWLJkCTp16oTg4GBkZmbi559/hrOzM1q3bm2sXwfhITw8HB9//DH+97//oaCgAFFRUTh48CA2bNiA8ePHK+0qmDFjBho0aICoqCgMHjwYDx48wLx589CyZUu0atVK0a5u3bro1q0bxo8fj6dPnyIkJASrVq3CnTt3VMLX+3JyctCgQQPUq1cPrVq1gr+/P9LS0hTvgY4dOyI8PByA/Bu1q6srli1bBicnJzg4OKBu3boIDAzEL7/8gtjYWFStWhUDBw5E2bJl8fDhQxw4cADOzs4qt+JJSUlB+/bt0apVK5w4cQK//fYbevXqhRo1agAApk6disOHD6NNmzYoX748nj59iiVLlqBcuXJo1KiRsX4dRE+HDx/G1KlT0bJlS5QpUwYnT57EypUr0apVK4wYMUKpLdWvmdZvqZ77RpSsXbuWNW/enHl7ezMrKyvm5ubGmjdvrnKRw0JXrlxhLVu2ZPb29szV1ZX17t2bPX78WKWdVCpl3333HStfvjyzsbFhVatWZb/99pvOY+rRowcLDg5mdnZ2zNbWllWpUoV9++23LCMjQ6nt8ePHWUREBLOxsVE5Pf/WrVusX79+zMfHh1lbW7OyZcuytm3bKk5xZUz14o1ubm7M0dGR9e7dm7148ULR7vz586xnz54sICCASSQS5uXlxdq2bcvOnj2r0zqR4pWXl8fi4uJY+fLlmbW1NQsJCWE//PCD2rZHjhxhDRo0YLa2tszT05MNGzZMpa4Yk18BePTo0czHx4dJJBJWu3ZttnPnziLHkp+fz37++WfWsWNHVr58eSaRSJi9vT0LDw9nc+fOVblS+5YtW1iVKlWYlZWVyunNFy5cYJ07d2ZlypRhEomElS9fnnXv3p3t27dP0abwlOZr166xrl27MicnJ+bm5sa++OILpYvf7du3j3Xo0IH5+fkxGxsb5ufnx3r27Mlu3LhR5DqR4pOcnMxatmzJPDw8mEQiYZUqVWIzZ85Ue0V/xqh+zbF+Ocbe2w9BSClJSEjAwIEDcebMGbUXpiRE6OLi4jBlyhQ8e/YMHh4epT0cQvRC9UvHFBFCCCGEAKBQRAghhBACgEIRIYQQQggAgI4pIoQQQggBbSkihBBCCAFAoYgQQgghBIAFXrxRJpPh0aNHcHJy0nhZdEKMiTGGzMxM+Pn5Ka6IawiqYVKSqH6JqdOnhi0uFD169EjrZdUJKS73799HuXLlDF4O1TApDVS/xNTpUsMWF4oKbz56//59ODs7l/JoiCXIyMiAv7+/yo1v+aIaJiWJ6peYOn1q2OJCUeHmWmdnZ3pDkhJlrF0FVMOkNFD9ElOnSw3TgdaEEEIIIaBQRAghhBACgEIRIYQQQggACkWEEEIIIQAoFBFCCCGEAKBQRAghhBACgEIRIYQQQggACkWEEEIIIQAoFBFCCCGEAKBQRAghhBACgEIRIYQQQggACkWEEEIIIQAEGIoOHz6Mdu3awc/PDxzHYfPmzUrzBwwYAI7jlB6tWrUqncESQgghxGwILhRlZ2ejRo0aWLx4scY2rVq1QmpqquKxdu3aEhwhIYQQQsyRVWkP4H2xsbGIjY3V2kYikcDHx6eERkQIIcQUZWdnw9HREQCQlZUFBweHUh4RETrBhSJdHDx4EF5eXnBzc0PTpk0xffp0lClTRm3b3Nxc5ObmKp5nZGQAAKRSKaRSaYmMl1g2Q+uMapiUJlOu33eXT+8Xy6XP793kQlGrVq3QuXNnBAYG4tatW/jmm28QGxuLEydOQCwWq7SfOXMmpkyZojI9KSlJ8Q2CkOKUlZVl0OuphklpKun6tba2VvtZzkdOTo7i5zt37sDe3t4oy5VKpcjPzzfKskjx06eGOcYYK8axGITjOPz111/o2LGjxja3b99GcHAw9u7di2bNmqnMV/ctxd/fHy9fvoSzs3NxDJsQJRkZGXB3d0d6ejqvmqMaJqWppOuX4ziIRMY53LW4dp/JZDII+E8neY8+NWxyW4reFxQUBA8PDyQnJ6sNRRKJBBKJRGW6WCw22rcRQrQxtM6ohklpKo36Xb58OVJTUw3qFwDy8vIUP8+YMQM2NjYGL9PX1xdDhgwxeDmk5OhTwyYfih48eIAXL17A19e3tIdCiFmhg1RJaUlNTcXdu3cNXs67u7ju3bsHa2trg5dJzJvgQlFWVhaSk5MVz1NSUnDx4kW4u7vD3d0dU6ZMQZcuXeDj44Nbt25h7NixCAkJQUxMTCmOmhBCiNBYW1tjwIABpT0MYkIEF4rOnj2L6OhoxfNRo0YBAPr374+lS5fi8uXLWLVqFdLS0uDn54eWLVti2rRpajfPEkIIIYToSnChqEmTJloPYNu1a1cJjoYQ0yOTyYx2oGpxMYUxEkIsj+BCESHEMCKRyCgHqhbHQaoAHahKCBEuCkWEmCFjHahaeDyGMc4EIoQQoaPt14QQQgghoFBECCGEEAKAQhEhhBBCCAAKRYQQQgghACgUEUIIIYQAoFBECCGEEAKAQhEhhBBCCAAKRYQQQgghACgUEUIIIYQAoFBECCGEEAKAQhEhhBBCCAAKRYQQQgghACgUEUIIIYQAoFBECCGEEAKAQhEhhBBCCAAKRYQQQgghACgUEUIsQHZ2NjiOA8dxyM7OLu3hEEIEikIRIYQQQggoFBFCCCGEAKBQRAghhBACgEIRIYQQQggACkWEEEIIIQAoFBFCCCGEAKBQRAghhBACgEIRIYQQQggACkWEEEKI4NEFSEsGhSJCCCGEEFAoIoQQQggBQKGIECJgTCYt7SEUyRTGSAjRjVVpD4AQQjThRGIU/DkWXKVmEFVuAenhpWBJ+7W+RhTeBeLavSA98ztkFzYBAPJzCxTz8//XC/kS+UcfF9YU4sihkCXugexovPaxBNSCuPlYsPvnId03H5AVgPMIglXnuQauJSFEKAQXig4fPoy5c+fi3LlzSE1NxV9//YWOHTsq5jPGMHnyZPz8889IS0tDw4YNsXTpUoSGhpbeoAkhxUYRiLZOBLv4p9a2osafyQPRgYWQHVn2dkae7O3PT64DNiJwNTvLA9G59ZBtnwqAaR5DaBTEzceAJR+CdOMoQJYPaH0FIcQUCW73WXZ2NmrUqIHFixernT9nzhwsWrQIy5Ytw6lTp+Dg4ICYmBi8efOmhEdKCCkJegWi6BGqgUgNrmZniNtN0z0QdVsElnxYKRARQsyP4LYUxcbGIjY2Vu08xhgWLFiACRMmoEOHDgCA1atXw9vbG5s3b0aPHj1UXpObm4vc3FzF84yMDACAVCqFVErHApDiZ2id6VvDYrHYoP5Kii7/L2KxWL7LzJiBqHp7owcic/4sofpVr6R/5+/2R3+/9KPP/5XgQpE2KSkpePz4MZo3b66Y5uLigrp16+LEiRNqQ9HMmTMxZcoUlelJSUlwdHQs1vESAgBZWVkGvV6fGra1tUVISIhB/ZWUlJQUrVt4C9elyGOI9AhEACBuPcnoW4iKWhdTRvWrXkn/znNychQ/X79+Hfb29iXWt6nTp4ZNKhQ9fvwYAODt7a003dvbWzHvfePHj8eoUaMUzzMyMuDv74+wsDA4OzsX32Ah3xXo4uICAEhPT4eDg0Ox9keEqfCbMV+lWcPFKTAw0OBl6BuIAEB2YRNkB+bAmLvMjLEuQkX1q15J/87fvWBjpUqV6O+JHvSpYZMKRXxIJBJIJBKV6WKxuNg30767/JLojwiTob/30qzh4mTo2PXdZQZ8DwCQ7ZoF2HCa2+oRiDhP+VYNU/49FIXqVz1dxs5kUnAi46xjcf09MeYYhUqf/yuTCkU+Pj4AgCdPnsDX11cx/cmTJ6hZs2YpjYoQUtL0Pqi66TgUhiL5FiL1oUivQORXDeLWk/itALEI8ktKjAF7flt1psgK4majwPnXgnTvHLB757UuS1qrn+Lndy8roY66y1KoHR9dUkKFSYWiwMBA+Pj4YN++fYoQlJGRgVOnTmHo0KGlOzhCSIngdZbZ0dVFLlfvQNRnBdire+B8Kuu9DsRysOe3gcfXlCeKrCHuOh+cfzikG4aD3TykZQkcRK0nQVSp6dtJ/11WQh2Nl6VQJ6yp9vkWSHChKCsrC8nJyYrnKSkpuHjxItzd3REQEICRI0di+vTpCA0NRWBgICZOnAg/Pz+laxkRQswT79Pud83S3pZPIHp2E9K98yAauIbXuhALVRiIQiIh3fClboEoojukG74uetH6vj8iaWPC+wQXis6ePYvo6GjF88ID9Pr374+EhASMHTsW2dnZGDx4MNLS0tCoUSPs3LkTtra2pTVkQkgJEMJ1iJQC0ZpPAfcKPNeGWCS+gWjrRLDLf2tfNJ/3R+IeiKvE8FgR8yW4UNSkSRMwpuUDjOMwdepUTJ06tQRHRQgpTYYGIgcbEfKmVFJta0ggysvR2JYQFYYEouK6cOn5DRSK3iO4K1qXNplMVnSjUmYKYyTEWEThXYS3hYgCEdGHEAPRdtqwoI7gthSVNpFIhOXLlyM1NdXgZeXl5Sl+njFjBmxsbAxepq+vL4YMGWLwcggxFboeNEqBiAiSyEqggYju3KcOhSI1UlNTcffuXYOXk5//9oP23r17sLa2NniZhFga6ZnfKRARkyU/7T6cApGJoFBECBE0bddZASgQEWHj/Gvpftq9lkD07nFxFIiKDx1TRAgxWRSIiNBJ986hLUQmhEIRIcQkCSIQufhqnkcIUMSVqikQCQ2FIkKIyRFEIHKvAKs2qnd/J0Q3FIiEiEIRIcSkCCYQ9UsA8mm3GuFDGIGIC6jFZ/BmjQ60LkbW1tYYMGBAaQ+DELMhqECUm4mCnTNh3XcF/xUiFkgggSg0CuLmY/msgFmjLUWEEJMguEC0eiDwOo33+hBLJKBA1G0R2H1txztZJgpFhBDBE2Qgyn7Of4WIBRJYIEo+DOm++XxWxKxRKCKECBoX1pQCETFxAgxEG0cBsgI+K2PWKBQRQgRNHDmUAhExacIMRJrfH5aMdyjKyMjArFmzEBMTg/DwcJw+fRoA8PLlS8yfPx/JyclGGyQhxHLJEvdQICImS9RoMAUiE8Lr7LMHDx4gKioK9+/fR2hoKK5fv46srCwAgLu7O5YvX467d+9i4cKFRh0sIcTyyI7GgwIRMVWiyi0oEJkQXqFozJgxyMzMxMWLF+Hl5QUvLy+l+R07dsQ///xjlAESQogmFIiI0EkPL6VAZEJ47T7bvXs3vvzyS1SpUgUcx6nMDwoKwv379w0eHCGEaCKIQGTnynv8xDKwpP1a51MgEhZeoej169fw9PTUOD8zM5P3gAghpCiCCEQOHrBqG8d7HQihQCQ8vEJRlSpVcPjwYY3zN2/ejPDwcN6DIoQQTQQTiPqtBKzt+a8IsWhCCEScZwjf4ZstXqFo5MiR+OOPPzB79mykp6cDAGQyGZKTk9G3b1+cOHECX331lVEHSgghggpEEicUbJvMf2WIxRJEIPKrBnHrSXxXwWzxOtC6T58+uHv3LiZMmIBvv/0WANCqVSswxiASifDdd9+hY8eOxhwnIcTCCS4QrR4A2NCWIqIfwQSiPivAXt0D51OZ76qYJd43hP3222/Rt29fbNq0CcnJyZDJZAgODkbnzp0RFBRkzDESQiycIAPRyzuATxW+q0QskKAC0bObkO6dB9HANXxXxyzxDkUAEBAQQLvJCCHFiguoBXHzMcILRKUkOzsbjo6OAICsrCw4ODiU2liI7gQXiNZ8CrhX4Lk25ovXMUXnz5/HkiVLNM5fsmQJLl68yHdMhBCiIG4+lgIRMWmCDETa3h8WjFco+vbbb7F3716N8/fv348JEybwHhQhhBRi989TICImSxTehQKRCeEVis6dO4fGjRtrnN+4cWOcPXuW96AIIaSQdN98CkTEZIlr96JA9J7s7GxwHAeO45CdnV3aw1HCKxRlZmbCykrz4UgikUhxqj4hhBhEVqBxFgUiInTSM79TIDIhvEJRaGgodu/erXH+zp076Qw0QkixEkQgsrblOXpiKWQXNmmdT4FIWHiFokGDBmHbtm0YNWoU0tLSFNPT0tLw1VdfYefOnRg0aJCxxkgIIUoEEYhs7CGOpWMnCX8UiISH1yn5X375JS5evIgFCxZg0aJF8PPzAwA8evQIMpkMffv2pVP1CSHFQjCBqPfP4NwCdB63VAaIeX0NLRlCH5+5EUQgcvE1YA3ME69QxHEcVq5ciX79+mHTpk24ffs2AKBDhw7o0qULmjRpYswxEkIIAIEFIs9QSLdPhVWn2TqNXSwC+h4GXubKnz99o76dvRjwtgNSstTPl77zusbbAbGaPXhetvr1UckF+DWy6HUgxiGIQOReAVZtphiwFubJoIs3RkdHIzo62lhj0UlcXBymTFH+RYaFheH69eslOg5CSMkSXCD6bRCYTKrXOjhZASsbyX9uvQfY/1h5vqMVcLEDUMER+DER+Oq06jJmfQBc+O/n5Awg573/hqY+wJZmhvVBio9gAlG/BCA/B0AZncYtZYCY06lpqTHG1k6DQlFpqVq1qtJ1krSdCUcIMX2CDESP/tX7Nh81ywBW/31oN/RWDSz+DvKwAgAt/NQvo5mvcvukXOX5jbwN74MUD0EFotxMFOycCeu+K3Qa+7uBaHkS8MsN5fmBjsD6/7aRpGQC3Q+qLmNDE6CCk/K0d7d2floRGBzGrw9jbe3klSYYY4iPj8eKFStw+/ZtvHr1SqUNx3EoKNB8Kq0hrKys4OPjUyzLJoQIC+cZAnHsBOEFIh623AOquMp/XnFTdX5iunzrTQs/YPw59cuYdOHtz9czAE6iPH/FTaCpr2F9EOMTXCBaPRBw8tJrHS68ALILgBmXgUfvdXPhpXJdXXip+vqvzgAzI4B/kt9Ou/TqbQ3PuPz2/cG3D0PxCkVjx47F/PnzUbNmTfTp0wdubm7GHpdWN2/ehJ+fH2xtbVG/fn3MnDkTAQHqD3jMzc1Fbu7br1IZGRkAAKlUCqlUddO3WCwunkEbmbqxE2Ey9HdlyTUsFoshbj0J7GmSoAORruvy9A3QZKf2dkXtztr+UPv8hzn8+1C3HlS/6un6OwcEGoiynytCka7rMvi49iBSVO1uvS9/sFz18w2p3UKG1jCvULRq1Sp06dIF69ev5/Nyg9StWxcJCQkICwtDamoqpkyZgsaNG+PKlStwcnJSaT9z5kyVY5AAICkpSXFTxUK2trYICQkptrEbU0pKCt680XAUJRGUrCwNR8zqyFJruHBd2Kt7gg5E+qyL0KlbD6pf9XT9nXNhTSGOHCq8QMRjXUyBoTXMKxS9fv0azZs35/NSg8XGxip+rl69OurWrYvy5ctj/fr1aq+NNH78eIwaNUrxPCMjA/7+/ggLC4Ozs3OJjLk4BAYGlvYQiI4KvxnzZek1LN0xXdCBCCjZ9yMncYA4QfMfVkOoWw+qX/V0/Z0LPRDpsy6mwNAa5hWKmjVrhjNnzmDw4MF8Xm5Urq6uqFixIpKTk9XOl0gkkEgkKtPFYrHJbKZVx5THbmkM/V1ZfA3na9kiKoBABJjP+1HdelD9qqfr2GWJewQdiPRZF2MpzmBvaA3zOnltyZIlOHnyJL777ju8ePGCzyKMJisrC7du3YKvL12EihCLIoRAJKIzX4l2sqPxEHIgIsp4haKwsDDcvn0bEydOhJeXFxwcHODs7Kz0cHFxMfZYAQCjR4/GoUOHcOfOHRw/fhydOnWCWCxGz549i6U/IifkuxoTCySIQGQNcbNRmufrwE4MbIwGzrcDwt3VtxlRBbjWUf6vOrXc5a/fGC1fHp8+etKtKkuFKQciodRuYR/GqmFeX3O6dOkCjiudqzg9ePAAPXv2xIsXL+Dp6YlGjRrh5MmT8PT0LJXxEEJKmFACUdf54Pxr8VwJudblgI7/nTg7thrQ89B73XDAnAj5BenmRMhPR5a9t9FhTDWgurv8EVsO+POu/n2MqGzQahAeBBGI7Fx5j18otVvYxwjV86x44RWKEhISjNM7D3/88Uep9U0IKWVCCkQhkZDunQOrVt/yXBn56c2Z+YCTNXAgVXW+jAFHnwJRPvJ/3/+jAshf162CfDkX1ZwurUsfF18CER68V4PoSRCByMEDVm3jeK+DUGq3sA9j1TDtECeEmAahBaINX4JlPuO5MnK3M4HgTYCbDXArU32bVnuAUGfgpoYTaOJvAPtSgVd5b++rpm8fX5wCTrThtw5EP4IJRP1WAtb2vNdDKLVb2IeDlXFqmPddQu7du4fPPvsMYWFhcHNzw+HDhwEAz58/x5dffokLFy4UsQRCCNGREAPRzUOa2+rhZa7mD3wAyJcB19Lk/2pyK1P9HxVd+yjQsmxiPIIKRBInFGybzH9lIIzaLezDWDXMa0vRtWvX0LhxY8hkMtStWxfJycmKW3p4eHjg6NGjyM7OxooVut1ThRBCNHLxhVXsRLMMRMRyCC4QrR4A2PDfUmSueN/mw9XVFSdPngTHcfDyUr5/Sps2bbBu3TqjDJAQYtms2kwBXr+iQERMliAD0cs7et/Q2BLw2n12+PBhDB06FJ6enmrPQgsICMDDh0XcoIcQQnSRn0OBiJgsLqCWMAMRUYtXKJLJZLC317zZ7dmzZ2qvYEoIIfoq+CfObAKRiAOGhskfYg1XNWnqA0yqAZTV8BFb1l4+v6mP+vliI/RBjEfcfKzZBKKu5c2/dnntPqtVqxa2bduGzz//XGVeQUEB/vjjD9SrV8/gwRFCCF6naZ5nQoEIADoHAF9Xf/t8aZLy/LL2wPYWgJUIaOqr/o7hayKBRt7yA0uDN8nvLP6uwRWBH9/5+OXTBzEedv+8WQQiwDJql9eWovHjx2Pnzp0YOnQorly5AgB48uQJ9u7di5YtWyIxMRHjxo0z6kAJIUSJiQUiYpmk++abRSCyFLy2FMXGxiIhIQEjRoxAfHw8AKBPnz5gjMHZ2RmrV69GZGSkUQdKCCEKgghEHESN9Lsp9p/3gAf//c2Lv6E6/2EO0HoP0NAbWHFT/TJ6HQYGhQLHnqh+035/uXz7IEYkK9A4y9QC0ezL8vo159rlffHGvn37onPnztizZw9u3rwJmUyG4OBgxMTEwMnJSNfbJoSQ9wklELWeBFHlFnoNXcZUdwm8b/9j+UOTRznAtEua50uN0AcpfoIIRNa2eo154135VaY1MYfa1TsU5eTkwN/fH+PGjcOYMWPQsWPHYhgWMQYmk4ITqbnLnoCYwhiJgAgpEEV0h/TwUlhFDeO5MsRSCSIQ2dhDHDuB5xqYL71Dkb29PaysrODg4FAc4yFGxInEKPhzDNjLexA3GwXOvxake+eA3Tuv9XWiRoMhqtwC0sNLwZL2AwDyc99uAs7/Xy/kS+SlIwrvAnHtXpCe+R2yC5u0jyesKcSRQyFL3APZ0XhwHkGw6jzXwLUkFkNogWjrRLDH1wEKRUQPgglEvX8G5xbAcy3MF6/dZ126dMHGjRsxdOhQtdcpIsLBXt6DuNGn4PzDId0wXOddAtKtE8Eu/vl2Vt4711B/ch2wEUHU+DN5IDqwELIjy7SOg6vZWR6Izq2HbPtUAAxq7g9IiHpCDEQX/6SL3xG9CCoQeYZCun0qrDrN5rk25olXKOrRowc+//xzREdH49NPP0WFChVgZ2en0q5WrVoGD5AYRr6FKJzfB74WosafQRw9QvdA1G6aUiAiRGd2rvIPbqEFIiMpIwFctdzw0lr09qaamu4hFewEpOUBLzTcQ6qoPqx43wWT6Epwgei3QWAyKc+1kRNC7Rb2Yawa5hWKmjRpovj5yJEjKvMZY+A4DlKpYf/hxHCcfy3dtxDpGogaDDLpQJSdnQ1HR0cAQFZWFu0KFjirtnGA2MYsA1GQE3CuHeBkDXx+Qv0ZNztbAFE+wKHHQLNdqvMHVwSW1Acy84GIrfI7i+vbx091jbM+RD1BBqJH/xq0pVMotVvYxzkt/0364BWKVq5caZzeSbGT7p1j9A98cdQwkw1ExARZ26MgoY/ZBSIACHeXf+ADQLSv6oe+iAMa/XdryUZe8uey995G0b7yf52sgZruqn9YdOmjprvh60LU4zxDII6dILxAZCCh1G5hH8aqYV6hqH///sbpnRQ77QdV8/vAlx5aDNlZ7cGYAhExloJtk80yEAHA9gfA5ntAkCMwR82wZQwYe07+jTr+huofFUD+ujBn4HYWsOMBvz4WJgKjqhq+PkSVuPUksKdJZhWIAOHUbmEf2x4Yp4Z5X6eoUGpqKp4+fYqQkBDaDWFS9N9lBnwNAJAdXwHYaN6BS4GIGFV6quZ5JhyIAOC1FOh6QHubhdfkD00uvARqbTWsj7W3KRQVF/bqntkFIkA4tVvYR7i7cWqY96FJW7ZsQaVKlVCuXDnUqlULp06dAgA8f/4c4eHh+OuvvwwfHSkmPA6q1vG0Y30CEReg+4H4Ug0H6QmJKYzRrJh4ICKWQbpjutkFInPGa0vR1q1b0blzZ9SvXx+9evVCXFycYp6HhwfKli2LhIQEdOrUyVjjJEbD8yyz7XOKXrI+gSg0CuLmY3UetVgE9D0MXE/X+SUaSd+8/bnxdkCs30Vd1arkAvxKd7YpOQIJRKLwLvzGTyxH/hvN8ygQCQ6vUDR16lRERkbiwIEDePHihVIoAoD69etj+fLlxhgfMSoDTrs/vkL7kvUNRN0Wgd0/Dy5Q99Nerqdrv8S8rtg7p35eegVwEsOXSUqQUALRf9fpIoQXIQQikcFH0JgdXrvPrly5gu7du2uc7+3tjadPn/IeFCkOwrgOkSIQJR+W3z2aEH0IKRBFj4D0zO/81oNYNkEEImuIm43iuQLmi1cosre3R3Z2tsb5t2/fRpkyZXgPihibAAPRxlFa7x6tTVl74GAr+cPPXn2bH+oAVzsC7fzVjEPigI77GK6nMSxorP7kAEP7IMVAaIHowMIib23zPi9bw+uqnb98/g911M+n2hU4oQSirvPB+et3geX4BuZfu7xCUXR0NFatWoWCAtU/ao8fP8bPP/+Mli1bGjw4YhzGCEQONiLkTamEvCmV4PDfmWeGBaJ83uszKBRo5C1/DApVnV/ZBRheGQhzAWZGqF/GrAj5/OGV5e2Low9iREIMREV8YVCnQwDVrkUTUiAKiYR0b9HHir4rvIz51y6vUDRjxgw8ePAAtWvXxvLly8FxHHbt2oUJEyagWrVqYIxh8uTJxh4r4UHUaLAAtxDxD0QAcPQJUCCTP449UZ1/Lxu4kyX/ec8j9cvY/d/0O1ny9sXRBzESa1uzCEQAcPEF1a7FElog2vBlkTcHf58l1C6vo6zCwsJw9OhRjBgxAhMnTgRjDHPnyu923qRJEyxevBgVKlQw5jgJT2pv7qqunYkEIgDY/xgI/m+vxUM1Z7pmFwA1tgABDkCihrPVvjoNxCfJ32TZavbi6dtHOF0RuNiIYyeAcy1n8oEIAM68EF7tkhIgxEB085Det/nosA+4kmbetatTKLp8+TLKly8PF5e327uqVq2KvXv34tWrV0hOToZMJkNQUBA8PT2LZ6SEF+nhpWYViAqpe8O8K7ug6DdNUfON0QcxHOcWAOmvA00+EBWi2rUwLr6wip0ovEDEw9M32mvLHGpXp91n4eHh2LZtm+J506ZNsW/fPgCAm5sbateujbp161IgEiCWtF/rfFMMRMSySLdPNZtARCyPVZspZhGILIVOocjOzg45OW/j28GDB/HkiZodfsSkUCAipoA9S9Y8kwIREbr8HApEJkSn3Wc1atTA/PnzIRaLFbvQzpw5A1tb7ZcC7ty5s+EjJMVCCIGI8wzhO3xCKBARk1DwTxwFIhOiUyhasGABunXrhkGDBgEAOI7DwoULsXDhQo2v4TgOUqnUOKMkRiWIQORXDeLWk/Qadws/zVe07l4BCHQEFl8HstQcwFfdDegUAPx1D7j8SnW+oxUwrBKQkgWsv8OvD1KCBBKIuLCmPAavTAi1G+pswAoQ7V6naZ5n4oFICLVb2IexjjPSKRTVrl0bycnJuHXrFp48eYImTZrgm2++QYsWLYwzClJiBBOI+qwAe3UPnE9lncf+XQRw6hlw6L09t018gN+j5D+XcwCGn3qvPwB7WgJlbIHPKwE+61TXZGYEMLSS/Ocnr/n1QUqIUAJRzc4QRw7lsQJvCaV2l9QzaDUIHyYeiIRSu4V9pOXCKHQKRX///Tc+/PBDhIWFISwsDP3790e7du1Qt67u960ytsWLF2Pu3Ll4/PgxatSogR9//BF16mi4TCYBILBA9OwmpHvnQTRwDd/VUWCah2Y0JdEH0YGQAlG7aZAl7oG4SgyPFZGj2rVQgghEHESNBvNcAfOtXZ1CUadOnfDrr7+iVy/5zQ8PHTqETp06FevAtFm3bh1GjRqFZcuWoW7duliwYAFiYmKQlJQELy+vUhuXkAkuEK35FHCvoNc6fHNO9ZsEIJ/W69DbTazvYwBa7AY6BgCb76lfm/HngAfZ8s24fPsgxUxogejcesjObzAoFAmldj8/+fYbOSlmQglErSdBVJn/3h6h1G5hH9fTjVPDOoUiJycnpKWlKZ7fuXMHWVlZhvfO0/z58/Hpp59i4MCBAIBly5Zh27Zt+N///odx48Yptc3NzUVu7tvtaunp8h2Pr169UnvMk1gshqurq9JrhMTV1RUZGRk6Ha8lFotRYF8WXONYiCO6Q3rgF8gu7wNc1Fw//T9caCTEDT+B7MxmyE78DrhoPhiaK1cD4uiRYNeOQnpoCeBUQXNbjyCIW34Ndv86pHvmAnZlwdmXhZUe63LrKVBdw7H91x8D1wEEWUF9Vb8GNifJf9S0jB3J2udr6yPICsjIgNp1ycjIAAAwnl97qIbLgrkxiKO+AOdVA9It08CePtJax6L6AyAKbQrpjvlgKf9qb1u9Pe/3B1emgl41HGQFSN+rr9KuXQCQ5lD96opX/br892XRzgVWMeOBfCsU/DMRkFprrjdrW4hbjAFn6wvppolg2W80txVZ8Xp/yA6vhji8M+/6FULtFvYR6mycGuaYDq1atmyJq1ev4pNPPoGLiwtGjx6NXr16oVYtzTeT4zgOX331VZED0FdeXh7s7e2xceNGdOzYUTG9f//+SEtLw5YtW5Tax8XFYcqUKUYfByH6un//PsqVK6f366iGiRBQ/RJTp0sN6xSKkpOT0a9fP5w8eVL+Io4rMnEV19lnjx49QtmyZXH8+HHUr19fMX3s2LE4dOgQTp1SPhLr/W8pMpkML1++RJkyZcBxdHgsKX6MMWRmZsLPzw8ikf63G6QaJqWJ6peYOn1qWKfdZyEhITh+/DjevHmDp0+fokKFCliwYAE6dOhglAEXJ4lEAolEojTN1dW1dAZDLNa7t8jRF9UwKW1Uv8TU6VrDet0Q1tbWFgEBAZg8eTKaNm2K8uXL8xqcITw8PCAWi1WuqP3kyRP4+PiU+HgIIYQQYh703xYKYPLkyfjggw+MPRad2NjYICIiQnHvNUC+OXbfvn1Ku9MIIYQQQvSh05aijz/+GBzHIT4+HmKxGB9//HGRr+E4DitWrDB4gOqMGjUK/fv3x4cffog6depgwYIFyM7OVpyNRgghhBCiL51C0f79+yESiSCTySAWi7F///4iD5ArzgPoPvroIzx79gyTJk3C48ePUbNmTezcuRPe3t7F1ichhBBCzJtOZ58RQgghhJg7XscUEUIIIYSYG73OPiuUmZmJo0eP4tatW8jMzISTkxNCQkLQqFEjODo6GnuMhBBCCCHFTq9QJJVKMWHCBPz000/IyclRuoAjx3Gwt7fHiBEjMHXqVF4X+SKEEEIIKS16HVPUo0cPrF+/HlWqVEHPnj3xwQcfwNHREVlZWfj333/x+++/IykpCT179sRvv/1WnOMmhBBCCDEqnUPR3r170bJlSwwbNgwLFy5UuyVIJpNh+PDhWLZsGfbs2YOmTZsafcCEEEIIIcVB51DUr18/nDhxAjdu3NB6ur1MJkNYWBgaNGiAVatWGW2ghBBCCCHFSecDf06fPo3OnTsXef0hkUiEzp07q9yYlRBCCCFEyHQORampqQgJCdGpbUhICFJTU3kPihBCCCGkpOkcirKysuDg4KBTW3t7e2RlZfEeFCGEEEJISdM5FDHGivXWHYQQQgghpUnnA61FIhH8/f3h4uJSZNv09HQ8ePAAUqnU4AESQgghhJQEnS/eGBkZqfOWojJlyiAoKIj3oAghhBBCShrdEJYQQgghBHRDWEIIIYQQABSKCCGEEEIAUCgihBBCCAFAoYgQQgghBACFolKTlZWFyZMno1WrVnB3dwfHcUhISFBpJ5PJkJCQgPbt28Pf3x8ODg744IMPMH36dLx580btslesWIHKlSvD1tYWoaGh+PHHH9W2e/jwIbp37w5XV1c4OzujQ4cOuH37tk7jz8vLw8KFCxEeHg5nZ2e4urqiatWqGDx4MK5fv65od/z4ccTFxSEtLU2n5RLToGv9AsCAAQPAcZzKo1KlSiptZTIZ5syZg8DAQNja2qJ69epYu3at2uUmJiaiVatWcHR0hLu7O/r27Ytnz57pNf4PPvgADg4OKFOmDGrWrIkRI0bg0aNHinbbt29HXFycTsskpuXq1avo1q0bgoKCYG9vDw8PD0RGRmLr1q1q2+tab1TDJo6RUpGSksIAsICAANakSRMGgK1cuVKlXWZmJgPA6tWrx6ZPn87i4+PZwIEDmUgkYk2aNGEymUyp/bJlyxgA1qVLFxYfH8/69u3LALBZs2apLDc0NJR5eXmx2bNns/nz5zN/f39Wrlw59vz58yLH37ZtWyYWi1mfPn3Y4sWL2YIFC9hnn33GypUrp7Qec+fOZQBYSkoKn/8mIlC61i9jjPXv359JJBL266+/Kj3+/vtvlbbjxo1jANinn37K4uPjWZs2bRgAtnbtWqV29+/fZx4eHiw4OJgtXLiQzZgxg7m5ubEaNWqw3NxcrWPPy8tj4eHhzM7Ojn322Wds2bJl7Pvvv2cDBw5kHh4e7MCBA4q2w4YNY/QxaZ62bdvGYmJiWFxcHIuPj2cLFixgjRs3ZgDY8uXLldrqU29Uw6aN/qdKyZs3b1hqaipjjLEzZ85o/KOSm5vLjh07pjJ9ypQpDADbs2ePYlpOTg4rU6YMa9OmjVLb3r17MwcHB/by5UvFtNmzZzMA7PTp04ppiYmJTCwWs/Hjx2sd++nTpxkANmPGDJV5BQUFSqGKQpF50rV+GZOHIgcHhyKX+eDBA2Ztbc2GDRummCaTyVjjxo1ZuXLlWEFBgWL60KFDmZ2dHbt7965i2p49e9T+QXvf+vXrGQC2Zs0alXmvX79m6enpiuf0B8WyFBQUsBo1arCwsDCl6brWG9Ww6dPpf6pChQosMDBQr0dQUFBxj91sFPVHRZ3Lly8zAGzRokWKadu2bWMA2LZt25TaHj9+nAFgv/76q2Ja7dq1We3atVWW27JlSxYcHKy177Vr1zIA7ODBg1rbTZ48mQFQebwbkH799VdWq1YtZmtry9zc3NhHH33E7t27p7ScqKgoVrVqVXb27FlWv359ZmtryypUqMCWLl2q0ueiRYtYlSpVmJ2dHXN1dWURERFqPziI8egaigoKCpQ+rN+3ePFiBoBdvXpVafrvv//OALAjR44opnl5ebFu3bqpLKNixYqsWbNmWsc7c+ZMBoDduXNHa7v+/furrd9CUqmU/fDDD6xKlSpMIpEwLy8vNnjwYKUvH4wxVr58edamTRu2a9cuVqNGDSaRSFjlypXZpk2blNrl5eWxuLg4FhISwiQSCXN3d2cNGzZku3fv1jpOYlxt27Zl3t7eStN0rTeqYdOvYZ2uaB0VFUX3PROYx48fAwA8PDwU0y5cuAAA+PDDD5XaRkREQCQS4cKFC+jTpw9kMhkuX76Mjz/+WGW5derUwe7du5GZmQknJye1fZcvXx4AsGbNGjRs2BBWVurLqHPnzrhx4wbWrl2LH374QTFWT09PAMCMGTMwceJEdO/eHZ988gmePXuGH3/8EZGRkbhw4QJcXV0Vy3r16hVat26N7t27o2fPnli/fj2GDh0KGxsbxXr8/PPP+PLLL9G1a1eMGDECb968weXLl3Hq1Cn06tWryP9TUnxycnLg7OyMnJwcuLm5oWfPnpg9ezYcHR0VbS5cuAAHBwdUrlxZ6bV16tRRzG/UqBEePnyIp0+fqtR5Ydvt27drHUth/a5evRoTJkzQ+Nk2ZMgQPHr0CHv27MGvv/6qdn5CQgIGDhyIL7/8EikpKfjpp59w4cIFHDt2DNbW1oq2N2/exEcffYTPPvsM/fv3x8qVK9GtWzfs3LkTLVq0AADExcVh5syZ+OSTT1CnTh1kZGTg7NmzOH/+vKINMb7s7Gy8fv0a6enp+Pvvv7Fjxw589NFHivn61BvVsBnUcGmnMsJvS1Hz5s2Zs7Mze/XqlWLasGHDmFgsVtve09OT9ejRgzHG2LNnzxgANnXqVJV2hd90rl+/rrFvmUzGoqKiGADm7e3NevbsyRYvXqy0GbiQpt1nd+7cYWKxWGUX3L///susrKyUphf2NW/ePMW03NxcVrNmTebl5cXy8vIYY4x16NCBVa1aVeO4SfEoqn7HjRvHvv76a7Zu3Tq2du1axbfXhg0bsvz8fEW7Nm3aqN3CnJ2dzQCwcePGKfW3evVqlbZjxoxhANibN280jjcnJ4eFhYUxAKx8+fJswIABbMWKFezJkycqbTXtejhy5Ija3Rc7d+5UmV6+fHkGQOlbdXp6OvP19WXh4eGKaTVq1FDZ9U2K35AhQxRbUEQiEevatavSlhJ96o1q2PRrmM4+M0Hfffcd9u7di1mzZiltTXn9+jVsbGzUvsbW1havX79WtAMAiUSitt27bdThOA67du3C9OnT4ebmhrVr12LYsGEoX748PvroI53ONPvzzz8hk8nQvXt3PH/+XPHw8fFBaGgoDhw4oNTeysoKQ4YMUTy3sbHBkCFD8PTpU5w7dw4A4OrqigcPHuDMmTNF9k9KzsyZMzFr1ix0794dPXr0QEJCAmbMmIFjx45h48aNinavX7/WqSYNrV87OzucOnUKY8aMAQAkJCRg0KBB8PX1xfDhw5Gbm1vkOm3YsAEuLi5o0aKFUv1GRETA0dFRpX79/PzQqVMnxXNnZ2f069cPFy5cUGz1dXV1xdWrV3Hz5s0i+yfGM3LkSOzZswerVq1CbGwspFIp8vLyFPP1qTeqYdOvYYNCUX5+Pv79918cPXoUhw8fVnkQ41u3bh0mTJiAQYMGYejQoUrz7OzslN7M73rz5g3s7OwU7QCofeMUnuZf2EYTiUSCb7/9FomJiXj06BHWrl2LevXqYf369fjiiy+KXI+bN2+CMYbQ0FB4enoqPRITE/H06VOl9n5+fnBwcFCaVrFiRQDAnTt3AABff/01HB0dUadOHYSGhmLYsGE4duxYkWMhJe+rr76CSCTC3r17FdPs7Ox0qklj1K+LiwvmzJmDO3fu4M6dO1ixYgXCwsLw008/Ydq0aUWO/+bNm0hPT4eXl5dK/WZlZanUb0hIiMoujvfrd+rUqUhLS0PFihVRrVo1jBkzBpcvXy5yLMQwlSpVQvPmzdGvXz/8888/yMrKQrt27cD+uy2oPvVGNWz6NazTMUXvk8lkGD9+PJYsWYKcnByN7aRSKe+BEVV79uxBv3790KZNGyxbtkxlvq+vL6RSKZ4+fQovLy/F9Ly8PLx48QJ+fn4AAHd3d0gkEqSmpqoso3BaYVtd+Pr6okePHujSpQuqVq2K9evXIyEhQeOxRoC8hjiOw44dOyAWi1Xmv3usia4qV66MpKQk/PPPP9i5cyc2bdqEJUuWYNKkSZgyZYreyyPFx87ODmXKlMHLly8V03x9fXHgwAEwxpQ+fN+vSV9fX6Xp70pNTVXUt67Kly+Pjz/+GJ06dUJQUBDWrFmD6dOna32NTCaDl5cX1qxZo3Z+4XFz+oiMjMStW7ewZcsW7N69G7/88gt++OEHLFu2DJ988oneyyP8dO3aFUOGDMGNGzcQFhamV71RDZt+DfMKRd999x3mzp2LIUOGoFGjRujbty9mz54NV1dXLFmyBBzHYc6cOcYeq0U7deoUOnXqhA8//BDr169XGzhq1qwJADh79ixat26tmH727FnIZDLFfJFIhGrVquHs2bNq+wkKCtJ4kLU21tbWqF69Om7evKnYFabpAMDg4GAwxhAYGKj4tqHNo0ePkJ2drbS16MaNGwCAChUqKKY5ODjgo48+wkcffYS8vDx07twZM2bMwPjx4xWbpUnpy8zMxPPnz5U+eGvWrIlffvkFiYmJqFKlimL6qVOnFPMBoGzZsvD09FRbv6dPn1a005ebmxuCg4Nx5coVxTRt9bt37140bNiwyG/0AJCcnKzyh1Jd/bq7u2PgwIEYOHAgsrKyEBkZibi4OJP5g2IOCndbpaenA9Cv3qiGzaCG+RyIFBwczD766CPGGGPPnz9nHMexffv2McbkB8DWqlWryGvdkLeKOlD12rVrrEyZMqxq1aoqp0q+Kycnh7m7u7O2bdsqTe/Tpw+zt7dnL168UEybNWsWA8DOnDmjmHb9+nUmFovZ119/rXW8N27cUHtQ9atXr5ifnx9zc3NTXI9j6dKlDAC7cOGCUtvk5GQmFotZr169VC5AKZPJlK51pO1Aa09PT8WB1uouOjlmzBgmEolYRkaG1nUi/Gmr39evX6v9vy88mPTPP/9UTLt//77Ga7yULVtW6Rovn332GbOzs1O6fMPevXsZALWXanjXxYsX2bNnz1Sm37lzh9nZ2bHq1asrpn399dcMgNIJDYwxdvDgQQZA7edcfn6+UnttB6nWrFlTMU1d/Xbr1o15eHhoXR/Cj7qDkvPy8litWrWYnZ0dy8zMVEzXtd6ohk2/hnltKXrw4AHGjh0L4O2BYoX7QW1sbNCnTx/Mnz8f3333Hd+sZhF++uknpKWlKS7JvnXrVjx48AAAMHz4cLi4uCAzMxMxMTF49eoVxowZg23btiktIzg4GPXr1wcg3yUxbdo0DBs2DN26dUNMTAyOHDmC3377DTNmzIC7u7vidZ9//jl+/vlntGnTBqNHj4a1tTXmz58Pb29v/N///Z/WcV+6dAm9evVCbGwsGjduDHd3dzx8+BCrVq3Co0ePsGDBAsUusYiICADAt99+ix49esDa2hrt2rVDcHAwpk+fjvHjx+POnTvo2LEjnJyckJKSgr/++guDBw/G6NGjFX36+flh9uzZuHPnDipWrIh169bh4sWLiI+PV5w22rJlS/j4+KBhw4bw9vZGYmIifvrpJ7Rp04bXli+inS71+/jxY4SHh6Nnz56K23rs2rUL27dvR6tWrdChQwfF8sqVK4eRI0di7ty5yM/PR+3atbF582YcOXIEa9asUdrN+s0332DDhg2Ijo7GiBEjkJWVhblz56JatWoYOHCg1nHv2bMHkydPRvv27VGvXj04Ojri9u3b+N///ofc3FylWyIU1u+XX36JmJgYiMVi9OjRA1FRURgyZAhmzpyJixcvomXLlrC2tsbNmzexYcMGLFy4EF27dlUsp2LFihg0aBDOnDkDb29v/O9//8OTJ0+wcuVKRZsqVaqgSZMmiIiIgLu7O86ePYuNGzfqdIwe0d+QIUOQkZGByMhIlC1bFo8fP8aaNWtw/fp1zJs3T2kXvq71RjVsBjXMJ0n5+fkpfWt3cXFhP/74o+L5/PnzdbqCraUrTN/qHoWnsBfeTkHTo3///irLjY+PZ2FhYczGxoYFBwezH374QWVrDGPybzVdu3Zlzs7OzNHRkbVt25bdvHmzyHE/efKEzZo1i0VFRTFfX19mZWXF3NzcWNOmTdnGjRtV2k+bNo2VLVuWiUQildPzN23axBo1asQcHByYg4MDq1SpEhs2bBhLSkpStFF38cby5cuzn376Samf5cuXs8jISFamTBkmkUhYcHAwGzNmjNYLBhL+dKnfV69esT59+rCQkBBmb2/PJBIJq1q1Kvvuu+8UW/jeJZVK2XfffcfKly/PbGxsWNWqVdlvv/2mtv8rV66wli1bMnt7e+bq6sp69+7NHj9+XOS4b9++zSZNmsTq1avHvLy8mJWVFfP09GRt2rRh+/fvV2pbUFDAhg8fzjw9PRnHcSqnNsfHx7OIiAhmZ2fHnJycWLVq1djYsWPZo0ePlP6fCi98V716dSaRSFilSpXYhg0blJY1ffp0VqdOHebq6srs7OxYpUqV2IwZM9T+PxHDrV27ljVv3px5e3srPsOaN2/OtmzZora9rvVGNWzaNcwx9t8h9npo27YtnJycFDe569SpE/7991+sWrUKMpkM/fr1g5+fH535Q4yiSZMmeP78udJ+ckJMRYUKFfDBBx/gn3/+Ke2hEMKLJdUwr1PyBw8ejNzcXMXphDNmzEBaWhoiIyMRFRWFjIwMzJs3z6gDJYQQQggpTryOKWrfvj3at2+veF6lShXcunULBw4cgJWVFRo0aKB0/AohhBBCiNDxCkXquLi4oGPHjsZaHCGEEEJIieJ1TNG9e/dw7949NGrUSDHt0qVLmDdvHnJzc9GzZ08KSIQQQggxKbxCUceOHZGVlaW4RP+TJ09QuXJl5OXlwcnJCU+fPsWGDRvQuXNnow+YEEIIIaQ48DrQ+vTp02jRooXi+erVq/H69WtcunQJDx8+RLNmzfD9998bbZCEEEIIIcWN1zFFL1++VLq31j///IOoqCgEBwcDADp37oxvvvnGOCM0MplMhkePHsHJyUnj5c8JMSbGGDIzM+Hn5weRyKB7MAOgGiYli+qXmDp9aphXKPL09MTdu3cBAGlpaTh58iRmzZqlmF9QUICCggI+iy52jx49gr+/f2kPg1ig+/fvo1y5cgYvh2qYlAaqX2LqdKlhXqGoefPmWLRoEZydnXHw4EHIZDKlA6uvXbsm2KIvvN3D/fv34ezsXMqjIZYgIyMD/v7+RrvVCNUwKUlUv8TU6VPDvELRrFmzcOPGDYwePRo2Njb4/vvvERgYCADIzc3F+vXr0atXLz6LLnaFm2udnZ3pDUlKlLF2FVANk9JA9UtMnS41zCsUeXt749ixY0hPT4ednR1sbGwU82QyGfbt2yfYLUWEEEIIIeoYdPFGFxcXlWl2dnaoUaOGIYslhBBCCClxOoWi1atXAwD69u0LjuMUz4vSr18//iMjhBBCCClBOl28USQSgeM4vH79GjY2NjqdlslxHKRSqVEGaUwZGRlwcXFBeno67c8mJcLYNUc1TEoS1S8xdfrUnE5bilJSUgBAcexQ4XNCCCFEqLKzs+Ho6AgAyMrKgoODQymPiAidTqGofPnyip/z8/ORnp4Od3d3o1yzghBCCCFECPS+PKlIJEJERAT+/PPP4hgPIYQQCyaTyUp7CEUyhTESfvQ++0wsFqN8+fLIzc0tjvEQQgixYCKRCMuXL0dqaqrBy8rLy1P8PGPGDKXLx/Dl6+uLIUOGGLwcIky8TskfPnw4fvrpJwwaNAju7u7GHhMhRADoeAxSWlJTUxW3kjLUgAEDFMskpCi8QpFUKoVEIkFwcDC6du2KChUqwM7OTqkNx3H46quvjDJIQgghhJDixisUjR49WvHzihUr1LahUEQIIYQQU8IrFNEp+YQQQggxN7xC0bun6BNCCCGEmAO9T8l/18uXL7F+/XrMmTMHc+bMwfr16/HixQuDBnT48GG0a9cOfn5+4DgOmzdvVpo/YMAAcByn9GjVqpVBfRaX7OxsxRizs7NLeziEEEII0YL3DWHj4uIwe/ZslVPzbWxsMHbsWEydOpXXcrOzs1GjRg18/PHH6Ny5s9o2rVq1wsqVKxXPJRIJr74IMUcymUynW/GUJlMYIyHE8vAKRdOmTcPUqVPRpk0bfPHFF6hYsSIAICkpCT/99BNmzJgBa2trTJw4Ue9lx8bGIjY2VmsbiUQCHx8fnZaXm5urFNwyMjIAyM+gK+57s727/JLojwiTob93fWtYLBYb5TovxXGNF+DtdV7o/WAaSqN+TQHVr+nQ53fFKxQtW7YM7dq1w5YtW5SmBwYGolWrVmjXrh2WLl3KKxTp4uDBg/Dy8oKbmxuaNm2K6dOno0yZMmrbzpw5E1OmTFGZnpSUpLgGSyFra2sEBwfDyor3BjQl7765xWKx0d7sBQUFuHXrFvLz842yPFK8srKyDHq9PjVsa2uLkJAQo1zn5d36unfvHqytrQ1a3vtSUlLw5s0boy6TGF9p1K8poPo1HfrUMK+//unp6VqP42ndujUOHjzIZ9FFatWqFTp37ozAwEDcunUL33zzDWJjY3HixAm1oWP8+PEYNWqU4nlGRgb8/f0RFham9m65xvqWDRTv1VRN5YODvP1mzJe+NWwqAgMDS3sIRAdUv+pR/ZoOfWqYVyhq2LAhTp06haFDh6qdf+rUKTRs2JDPoovUo0cPxc/VqlVD9erVERwcjIMHD6JZs2Yq7SUSidpjjrRtuTHW1VSL85u2qWxiJob/rvjUsCkw5bFbEqpf9Ux57JZGn98VryMdly1bhhMnTuCrr75CcnIyZDIZZDIZkpOTMXLkSJw8eRLLli3js2i9BQUFwcPDA8nJySXSHyGWwtraGgMGDMCAAQOMvuuMEEKEiNeWourVq0Mmk2HRokVYtGiR4iySwjsHSyQSVK9eXek1HMchPT3dwOGqevDgAV68eAFfX1+jL9tQhX9UCCGEECJ8vEJRly5dwHGcsccCQH5A1LtbfVJSUnDx4kW4u7vD3d0dU6ZMQZcuXeDj44Nbt25h7NixCAkJQUxMTLGMhxBCCCGWgVcoSkhIMPIw3jp79iyio6MVzwsP0Ovfvz+WLl2Ky5cvY9WqVUhLS4Ofnx9atmyJadOm0bWKCCGEEGIQvUNRTk4OGjdujE8//RSfffaZ0QfUpEkTMMY0zt+1a5fR+ySEEEII0ftAa3t7e6SkpBTb7jNCCCGEkNLA6+yzVq1a0RYbQgghhJgVXqFo4sSJuHHjBvr27YujR4/i4cOHePnypcqDEEIIIcRU8DrQumrVqgCAa9eu4ffff9fYju4NQwghhBBTwSsUTZo0iY4pIoQQQohZ4RWK4uLijDwMQgghhJDSxeuYIkIIIYQQc8NrS9HUqVOLbMNxHCZOnMhn8YQQQgghJc7ou884jgNjjEIRIYQQQkwKr91nMplM5VFQUIBbt27hq6++wocffoinT58ae6yEEEIIIcXGaMcUiUQiBAYG4vvvv0doaCiGDx9urEUTQgghhBS7YjnQOjIyEtu3by+ORRNCCCGEFItiCUVnz56FSEQnthFCCCHEdPA60Hr16tVqp6elpeHw4cP4888/8cknnxg0MEIIIYSQksQrFA0YMEDjPA8PD4wbNw6TJk3iOyZCCCGEkBLHKxSlpKSoTOM4Dm5ubnBycjJ4UIQQQgghJY1XKCpfvryxx0EIIYQQUqp4haL3Xb9+HRs2bEBqairCwsIwcOBAODs7G2PRhBBCCCElQudQ9NNPP2HRokU4fvw4PDw8FNO3bt2Kbt26IS8vTzHtxx9/xMmTJ5XaEUIIIYSf7OxsODo6AgCysrLg4OBQyiMyTzqfN//3338jODhYKegUFBTgk08+gVgsxsqVK/Hvv/9i1qxZuHv3LmbMmFEsAyaEEEIIKQ46h6Jr166hXr16StMOHDiAZ8+e4auvvkL//v1RtWpVjB07Ft27d6eLNxJCCCHEpOgcil68eAF/f3+lafv27QPHcejUqZPS9IYNG+LevXvGGSEhhBBCSAnQORR5e3vj8ePHStOOHDkCe3t71KhRQ2m6jY0NbGxsjDNCQgghhJASoHMo+vDDD7Fq1SpkZmYCAK5evYrTp08jJiYGVlbKx2tfv34d5cqVM+5ICSGEEEKKkc6haPLkybh79y5CQ0PRrFkzNGzYEBzHYfz48Spt//rrLzRo0MCoAyWEEEJMCZNJS3sIRWIyWWkPQVB0PiW/WrVq2L9/P2bMmIHbt2+jXr16GD16NCIiIpTaHTx4EPb29ujWrZvRB0sIIYSYCk4kRsGfY8Ce33470c4VVm3jAGt7FGybDKSnal6AtS3EsRPAuQWg4O8Jisn5/+uFfMl7f75FVhA3GwXOvxake+eA3TuvdWyiRoMhqtwCHN28XYleF29s0KABtm3bprVNkyZN8O+//xo0KEIIMSa6xgspLez5beDxNfkTBw9YdZoNiG1QkNAHeHlH8wtt7CHu/TM413KQ/joQ7M6lt/OeXAds3gkzImuIu84H5x8O6YbhYDcPaRkRB1HrSRBVbgHp4aWwihpmyOqZHYqIhBBCSHFz8IBVv5WAxAkFqwfoFog8QyH9bRDYIy0bGgoDUUgkpBu+1C0QRXSHdOtEsKT9PFfGfFEoIjrJzs4Gx3HgOA7Z2dmlPRxCCDEdQgxEF//kuTLmzSj3PiOEEEKIGnau8l1mBgYiBxsR8qZUetuWAlGxoFBECCGEFBOrtnHyY4hoC5FJENzus8OHD6Ndu3bw8/MDx3HYvHmz0nzGGCZNmgRfX1/Y2dmhefPmuHnzZukMlhBCCNHG2p4CkQkRXCjKzs5GjRo1sHjxYrXz58yZg0WLFmHZsmU4deoUHBwcEBMTgzdv3pTwSAkhhBDtCrZNpkBkQgS3+yw2NhaxsbFq5zHGsGDBAkyYMAEdOnQAAKxevRre3t7YvHkzevToUZJDJYQUMyaTghOJS3sYWjGZjK71QjTTdh0iCkSCI7hQpE1KSgoeP36M5s2bK6a5uLigbt26OHHihNpQlJubi9zcXMXzjIwMAIBUKoVUqnq1UbFY2B/AhdSNvaT60/R/R9Qz9P/KkmtYLFZz8Tu8vfCc9PDSIk8rFoV3Aap0UDxXe+G7/3BhTSGOHApZ4h7IjsZrXS4XUAviFl+DE1uZ9fuB6lc9XetXIwEFInOuX0C/9TOpUFR4Q1pvb2+l6epuVlto5syZmDJlisr0pKQkxcXcCtna2iIkJMRIoy1eKSkpJbrLMCcnR/Hz9evXYW9vX2J9m7qsrCyDXm+pNVy4LkoXv3v3wnM6fOCLGn8Gce1ekG6f83bi+xe+K1xyzc7yQHRuPWTbpwJgGpfLhUZB3HwM2L1z4ALrlvj7sSRR/aqna/2qJZBAJArvotO6mDp9atikQhEf48ePx6hRoxTPMzIy4O/vj7CwMDg7O5fiyAwTGBhYov29e22iSpUq0RWB9VD4zZgvquFCen7gN/4M4ugRkB5YCNnxFdqXXLMzxO2m6R6Iui0CSz4M6dGfIfpkXYm/H0sS1a96vH/nQglE/31hAEr+70lJ06eGTSoU+fj4AACePHkCX19fxfQnT56gZs2aal8jkUggkUhUpovFYpPZTKtOSY/93f5M/f+upBn6f0U1DBgUiI4s075kvoFo4yjAK5THupgWql/1eI1dSIEoegSkZ36HuHavEv89lPRtd/RZP5M6OjAwMBA+Pj7Yt2+fYlpGRgZOnTqF+vXrl+LIhMk07tAs/DGS0ifIQCTL57MqxFIJLRAdWAjZhU381sWMCW5LUVZWFpKTkxXPU1JScPHiRbi7uyMgIAAjR47E9OnTERoaisDAQEycOBF+fn7o2LFj6Q1aoDiRGIzJdD9otPlYsPvnId03H5AVKM3Pz337vODvCWCdp4O9ugfpjulAvpZ90S6+sGozBcjPQcE/ccDrtLd9egTBqvNcPqtGLIjioGoKRMRUCTEQHVkG+FThtz5mTHCh6OzZs4iOjlY8L9wX3b9/fyQkJGDs2LHIzs7G4MGDkZaWhkaNGmHnzp2wtbUtrSELmixxD2Qbv4JOB40mH9L8gZ8nU/wobvk12NMkSNd8CuTlqLYt5F4BVrETgdevULB6IJD9XGm25hER8hYFImLSrG2FGYiIWoILRU2aNAFjWj6gOA5Tp07F1KlTS3BUpku+hci4H/js+S1I//qy6EDULwHIzVQbiAjRlfTwUoM/8N+9bxQFIlKSxLETwLmWo0BkIkzqmCJiXPp84HO+VRU/S9cNo0BESkyR1yGiLUREwDi3AApEJoRCkYXSKxD5VYO4x5K3E/Jea16wPoHIzlXvcRPyLgpEROik26dSIDIhFIoskN6BqM8KsOe3il6wPoHIwUN+92hCeBJCIOI8TeNCg6T0sGfJmmdSIBIcCkUWhlcgenZTvstMG30DUb+VgDVdFZvwI4hA5FcN4taT+K4CsXQUiASJQpEF4R2I1nxqvF1mhYFI4iS/ezQhehJMIOqzAuzVPb6rQSyZQAIRF9aUx+DNG4UiC2FYIDLSQdXvBqLVA7TfPZoQNQQViJ7dlF+nixB9CCUQ/XevP11JZUW3KW3GGKPgTsknxmeMQPTuKc0KhgSil3fowmFEL4ILRGs+Bdwr8Fwb/kr6FgnEiIQUiNpNgyxxD8RVYnQauvidTSizLwMb7yrP97IFtjQDrETAhRfA4OOqy4hvAISXAQreCS+NtwPi/y4z2LU88HV1fn1UcgF+jdRpVbSiUGTm5FeqHiOsLUQv7/BdHWKhBBmItL0/CHmf0ALRufWQnd+gcyh614Mc4MJL5Wll3zlENLtAdX7h9PddegVw/90ar56n4X0YikKRmRM3H6v9StX/oUBEhEoU3gXi2r0oEBHTJcRAtH0q4FNZr9WYfVkeVuJvqM57mAO03gM09AZW3FT/+l6HgUGhwOG76ue/u1y+fRiKQpGZY/fPUyAiJo0CETFpIithBiIeN1raeFf71pn9j+UPTR7lANMuASxX/XwpA5YmaR9DUX0YikKRmZPf3JUCETFd0jO/UyAiJkvcbBQ4/3CTD0SWgkKRuZOp2Yn7HwpExBTILmzSOp8CEREyzr8WpBuGUyAyEXRKvoUSRCCytuU5esNkZ2eD4zhwHIfs7OxSGQMxDgpEROike+dQIDIhFIoskCACkY09xLETeK4BIQIJRC6+BqwBsQTs3nktc007EJWRAMFOmudbi4AqrvJ/NQlylC/H0D6sjJRmKBRZGMEEot4/g3ML4LkWxNIJIhC5V4BVmykGrAWxbKYdiIKcgOQuQFJnYHBF9W12tgAud5D/q8nFDvLlBKkJPvr08VNd/ddBHQpFFkRQgcgzFNLtU3muCbFkgglE/RKAfNqtRvgQRiDiAmrxGTwAINwdcLKW/xytZoOpiAMaecl/buQlf67oV+IAcQLD+hQGBwcHOFkDNd0N60Pd6/mgUGQhBBeIfhuk/e7RhKghqECUm4mCf+J0HrvQb5MgpcNNSohAAlFoFMTNx/JZAQDA9gfA5nvA5ZfAnH9V58sYMPYckJQu/1emZihz/pW/fvM9YMcDw/pYmMh7VZTQ2WcWQJCB6NG/dJsPohfBBaLVAwEnL53HLxYBfQ8D19N1fola0jdvf373FgmGMNYtEkhRBBSIui0Cu38eXCC//U6vpUDXA9rbLLwmf2hy4SVQa6tx+gh3B0ZV1d5WFxSKzBznGQJx7AThBSJC9CDIQJT9XK9QBMgDkaG3Jnj3wnfv3iKBCJ3AAlHyYUiP/gzRJ+v4rIzZot1nZk7cehIFImLSuLCmwgxEhOhMgIFo4yit17GzVBSKzBx7dY8CETFp4sihFIiISRNmINL8/rBktPvMzEl3TKdAREyaLHEPBSJiskSNBkNUuQUFIhNBW4rMXf4bzfMoEBETIDsaD3MJRF62wMFW8oefvfo2P9QBrnYE2vmrn9+6rPY+ytob3gcxHnMKRPENDKurdv7y+T/UUT9fCLVLochSCSEQiXTfUCn005kB0xijuTGlQAQAHQKARt7yx6BQ1fmVXYDhlYEwF2BmhPplTA1/+3MlZ9X5g0IN74MYj/TwUrMIRAAQXsawupoVIZ8/vLK8/fuEULu0+8wSCSIQWUPcbJTOQ373dObaZYBF9eTTvzwJnHmh3NZODPzRRP5N448UYN4V5fnvntKcmgN8fEh+6ue79O1j2306pbmkCSIQ2bnqNeaLL4CC/8LzsSeq8+9lA3eygAqOwJ5H6pexL/Xtz/ezofIpfvSJ4X0Q42FJ+7XON5VABBheV7sfyQPNnSx5+/cJoXYpFFkaoQSirvPB+et3NdXC05kvvAS2PZRPe6jhb1vlv4AAByBRzTVh3j2ludbfQI6ad4G+fYQb6WqqRDeCCEQOHrBqG6fXuM+8AII3yX9WV1fZBUCNLZprFwC+PvdOeynAvVe/+x8b3gcpGaYUiACgwz7gShr/uvrqNBCfJA832WpOfBNC7VIosiRCCkQhkZDunQOrVt/yWhVNQaVQdoFubxp1f1SM3QcxLsEEon4rAWsNBz5oYWhdFd4ioTj7IMVPCIGI8wzRa8xP32ivLV3qqqj5pV27dEyRpRBaINrwZRF3jyZElaACkcQJBdsm818ZYrEEEYj8qkHcehLfVTBbFIosgRAD0c1DPFeGWCrBBaLVA4D0VM1tCVFDMIGozwqwV/f4robZolBk7lx8KRARkyfIQKTt/UGIGoIKRM9uyq9jR5RQKDJzVm2mmE0gauGneV73CsDXHwCOGo4Pqu4GTK4h/1cdRyv567tX4N8HKR5cQC2zDkRCqN1QNaf2E+MSXCBa86n269jpQAi1W9iHsWqYPt7NXX6OWQQiAPguAjj1DDj03qmaTXyA36PkP5dzAIafUp7PAdjTEihjC3xeCfBerbrsmRHA0Eryn5+85tcHKR7i5mPBkg+ZZSDSt3Z91qn+eTRG7S6pZ4SVIRoJMhBpe3/oQCi1W9hHWi6MwuS2FMXFxYHjOKVHpUqVSntYglXwT5xZBCJtmPYTcUymD6Ieu3/eLAMRQLVrCUThXcwuEAHmW7smuaWoatWq2Lt3r+K5lZVJrkbJeJ2meZ6JBaJvzql+kwDk03odAgIdgcXXVeczAC12Ax0DgM331H/MjD8HPMgGUrL490GKh3TffLMMRIBwavfzk2+/kRPjEtfuZXaBCBBO7Rb2cT3dODVskmnCysoKPj4+OrXNzc1Fbu7b7WoZGRkAAKlUCqlUqtJeLBYbZ5DFTN3Y36d1XQQUiHRdF21XMF1/R/vrL7+SPwD113nJKgBmX1HzQj36ANSviy7rp43F17BMzVXe/iOUQGTI+1Gf2lXHGLV7U15SVL960PV3Lj3zu+ADEd/6FULtFvZReAFdQ2vYJEPRzZs34efnB1tbW9SvXx8zZ85EQECA2rYzZ87ElClTVKYnJSXB0dFRaZqtrS1CQvS7mFVpSUlJwZs3mg+S07ougghEHESNBhu+LgKjbl2ysrIMWibVsHqCCETWtkZZF6Gg+tWdrr9z2YVNWpdT2oFIn3UxBYbWsMmForp16yIhIQFhYWFITU3FlClT0LhxY1y5cgVOTk4q7cePH49Ro97eYysjIwP+/v4ICwuDs7PpnnIRGBjI74VCCUStJ0FUuQUAA9ZFgNStS+E3Y76ohlUJIhDZ2EMcOwGA+dQw1a/ujPE7F0IgAsynfgHDa9jkQlFsbKzi5+rVq6Nu3booX7481q9fj0GDBqm0l0gkkEgkKtPFYrHJbKZVh9fYhRSIIrpDengprKKGmfTv4X3q1sXQ9aMaViaYQNT7Z3Bu8i3Upvx7eBfVr+4MHbsgApGLLwDzqV/A8Bo2ubPP3ufq6oqKFSsiOTm5tIcibEILRFsnFnn3aG3K2ssfmjhaAZVdtC+jsgvgoOVrgTH6IMYlqEDkGQrp9ql6rwPVLhFEIHKvIL+OnR68bM2/dk0+FGVlZeHWrVvw9fUt7aEIlxAD0cU/ea4M0NQHuNVF/miq5nh7RyvgYgfg347AD3XUL+OHOvL5lzqof4Maow9iXIILRL8NAnum35ex2mWodi2dYAJRvwQgX78z0bY0M//aNblQNHr0aBw6dAh37tzB8ePH0alTJ4jFYvTs2bO0hyZMdq5mFYgAoJE3YCWSPxp6q873dwAq/Hf8pqarYLf8b3oFRyDAoXj6IMYjyECk7f2hQc0yVLuWTFCBKDdTfh07PVhC7ZrcMUUPHjxAz5498eLFC3h6eqJRo0Y4efIkPD09S3togmTVNg4Q25hNIAKAFTeBpr5vf35fYjrwY6L8TTP+nPpljDsnv5rqnkfy9sXRBzEOzjME4tgJJh+IAGDLPaCKq/xnql3LIrhAtHog4OSl1zpceAFkF5h37ZpcKPrjjz9KewimxdoeBQl9zCYQAcDDHKDJTu1tvjqtff7W+/KHsfoovEYGMT5x60lgT5NMPhABwNM3wqtdUvwEGYiyn+sdigYfBy681DzfHGrX5EIR0U/BtslmFYiI5WGv7plFICKWiQtrCnHkUOEFIqKWyR1TRPSUnqp5HgUiYgKkO6ZTICImiwKRaaFQZKkoEBFTka/5SrsUiIjQyRL3UCAyIRSKLJFAApEovAu/8RMCCCMQiegIBKKd7Gg8KBCZDgpFlkYogajxZxDX7qXX0Od8CNipuTCpnRjYGA2cb6f5gOcRVYBrHeX/qlPLXf76jdH8+yAlSBCByBriZqM0z9eBUGq3ZxCv4RMDmXIgEkrtFvZhrBqmUGRJhBSIokdAeuZ3vYYf7QvEllOd3roc0DEAqO4OjK2mpj8OmBMBVHSR/yviVNuMqSZ/fccAfn1wapZJiolQAlHX+eD8a/FcCTkh1K6IA0ZUNmg1CA+CCER2rrzHL5TaLezDWDVMochSCC0QHVhY5N2j35edD1xUczrohZdA5n+fJwfUHFcuY8DRp/Kfjz6VP39f4esyefbBNG8dJ8YkpEAUEgnp3jk8V0ROCLUrY+pfR4qPIAKRg4f8OnY8CaV2C/swVg3TDnFLIMRAdGQZ4KNhm6oG7fcDtzNVp9/OBII3AW42wC018wGg1R4g1Bm4qeFmyfE3gH2pwKs84GUuvz5IMRNaINrwJVjmM54rIyeU2v3iFHCiDb91IPoRTCDqtxKw1nKTsSIIpXYL+3CwMk4N05Yic2dtK8xAxENGnuZ5L3O1h5V8GXAtTf6vJrcy1b8xde2DFCMhBiKt7w/dCaF2C7QsmxiPoAKRxEl+HTsDCKF2C/swVg3TliIzJ46dAM61nMkHImLBXHxhFTvRLAMRsRyCC0SrBwA2/LcUmSvaUmTmOLcACkTEpFm1mUKBiJg0QQYibe8PC0ZbisycdPtUCkTEtOXnUCAiJosLqAVx8zEUiEwEbSkyc+xZsuaZJhqIxBwwNEz+EGs4Fb6pDzCpBlBWw9bhsvby+U19iq8PYhwF/8SZTSASUe1aHHHzsWYTiLqWN//apS1FlspEAxEADK4I/Fjv7fOlScrzy9oD21sAViKgqa/6uy6viQQaecsPzgveJL87syF90F3Hi9HrNM3zTCgQAUDnAODr6m+fC6F2i7orOTEMu3/eLAIRYBm1S1uKLJFAAhEX1pTH4An5j4kFImKZpPvmm0UgshS0pcjSCCUQ1ewMceRQHisgv7aFup8LPcwBWu8BGnoDK26qX0avw8CgUODYE9VvK3z68LbVbx2IgQQRiDiIGg3Wa9h/3gMe/FdvQqldUsxkBRpnmVogmn1ZXr/mXLsUiiyJkAJRu2mQJe6BuEqM3qshZaqbVd+3/7H8ocmjHGDaJeP1QaGoBAklELWeBFHlFnoNXSbA2iWlQxCByFq/D66Nd+VXmdbEHGqXdp9ZCqEFonPr/7t7NCF6EFIgiugO6eGlPFeEWDJBBCIbe4hjJ/BcA/NFocgSCDEQbZ/KY0WIRRNaINo6ESxpP8+VIZZKMIGo98/g3AJ4roX5olBk7kRWAg1EdAdVogchBqIi3h+EvE9QgcgzFFL6cqqCQpGZEzcbZTaByMVG87wyEiDYSfN8axFQxVX+rybBTvLl8O2DFBM7V7MOREKoXSv6S1DsBBeIfhuk/Tp2OhBC7Rb2YawapreCmeP8a5lFIAKALU2BIDVvjiAnILkLkNRZfp0LdXa2AC53kP+rzuCK8tcnd+HfBykeVm3jzDYQCaV2f6rLb/xEN4IMRNreHzoQSu0W9mGsGqZQZOake+eYRSACAAdroKa76vRwd8DJWv5ztK+aMXNAIy/5z4285M/fV/g6J559cBquvkqMwNreLAMRIIzaFXHqX0eMg/MMMbtABAindgv7MFYNUygyc+zeeS1zTScQAcCBVGDHA9Xp2x8Am+8Bl18Cc9S812UMGHsOSEqX/ytTM5Q5/8pfv/kevz4YHSJVbAq2TTbLQAQIo3ZlDFiYaPi6EPXErSeZXSAChFO7hX0Yq4bpOkUWy7QCEQCMPQu8lqpOfy0Fuh7Q/tqF1+QPTS68BGpt1Txflz5IMUlP1TzPhAMRIJzaXXsbGFVVexvCD3t1z+wCESCc2i3sI9zdODVMW4oskjACERdQi8/gCZEz8UBELIN0x3SzC0TmjEKRxRFIIAqNgrj5WD4rQIhgApEovAu/8RPLkf9G8zwKRIJDociiCCgQdVsEdl/b8U6EaCCUQNT4M4hr9+K3DoQIIRCJ6Aia91EoshgCC0TJh+V3jyZEH0IKRNEjID3zO7/1IJZNEIHIGuJmo3iugPmiUGQRBBiINo7SevdobcraAwdbyR9+9urb/FAHuNoRaOevfn47f/n8H+oUXx/EyIQWiA4shOzCJr1WwcuWatfiCSUQdZ0Pzl+/4zrjG5h/7VIosgDCDET5fFYFADAoFGjkLX8MClWdX9kFGF4ZCHMBZkaoX8asCPn84ZXl7YujD2JEQgxERbw/1OkQQLVr0YQUiEIiId07R6/hh5cx/9o12VC0ePFiVKhQAba2tqhbty5Onz5d2kMSJFGjwWYViADg6BOgQCZ/HHuiOv9eNnAnS/7znkfql7H7v+l3suTti6MPYiTWtmYRiADg4guqXYsltEC04csirmOnyhJq1ySPslq3bh1GjRqFZcuWoW7duliwYAFiYmKQlJQELy+v0h6eoIgqtzCrQAQA+x8Dwf/ttXio5kzX7AKgxhYgwAFITFe/jK9OA/FJ8jdZtpq9ePr2EU5XBC424tgJ4FzLmXwgAoAzL4RXu6QECDEQ3TwE+FTRazU67AOupJl37ZrklqL58+fj008/xcCBA1GlShUsW7YM9vb2+N///lfaQxMc6eGlZhWICj3MUf+mKZRdUPSbJjFd/RvTmH0Qw3FuAWYRiApR7VoYF19hBiIenr4x/9o1uS1FeXl5OHfuHMaPH6+YJhKJ0Lx5c5w4cUKlfW5uLnJzcxXP09Pl/5uvXr2CVKp6eWSxWAxXV1el1wiJq6srMjIy1I79fWKxGAX3k8Bc1Oyc/Y+oenuII7pDeuAXyC7vA7S05UIjIW74CWRnNkN24nfAJURz23I1II4eCXbtKKSHlgBOFVTb2JeFlR7rEmQFSG2LbFoqgqyAjAyoXZeMjAwAAON5LxBLr2Hp4WWQZb/RXJsiK4ijvgDnVQPSLdPAnj7SWsei+gMgCm0K6Y75YCn/am9bxPvDXGqY6ld3+tYvi/oaePUCBbtmAlZugIub+sZ2LrCKGQ/kW6Hgn4mA1FpzbVrbQtxiDDhbX0g3TQTj+f4wl/oFjFjDzMQ8fPiQAWDHjx9Xmj5mzBhWp04dlfaTJ09mkG/OoAc9SvVx//59XjVPNUwPITyofulh6g9daphjzLRuZfno0SOULVsWx48fR/369RXTx44di0OHDuHUqVNK7d//liKTyfDy5UuUKVMGHN3anJQAxhgyMzPh5+cHkUj/PdZUw6Q0Uf0SU6dPDZvc7jMPDw+IxWI8eaJ8aPqTJ0/g4+Oj0l4ikUAikShNc3V1Lc4hEqLCxcWF92uphklpo/olpk7XGja5A61tbGwQERGBffv2KabJZDLs27dPacsRIYQQQog+TG5LEQCMGjUK/fv3x4cffog6depgwYIFyM7OxsCBA0t7aIQQQggxUSYZij766CM8e/YMkyZNwuPHj1GzZk3s3LkT3t7epT00QgghhJgokzvQmhBCCCGkOJjcMUWEEEIIIcWBQhEhhBBCCCgUEUIIIYQAoFBECCGEEAKAQhEhhBBCCAAKRYQQQgghACgUEUIIIYQAoFBECCGEEAKAQhEhhBBCCAAKRYQQQgghACgUEULMyJ07d8BxHC5evFjaQyFmqEmTJhg5cmRpD4MUIwpFRG8ymUzQ/Q0YMAAcx2HWrFlK0zdv3gyO43RejqYPwISEBLi6uuo1JqFhJfw7ZDKpzm05jtP6iIuLK76B6kBb8DL1P5rSki0LvftbtmwZnJycUFBQoJiWlZUFa2trNGnSRKntwYMHwXEcbt26ZYSRqmfOnxGWyqq0B0BMj0gkwvLly5Gamlrsffn6+mLIkCF6v87W1hazZ8/GkCFD4ObmVgwjM22cSISCbVPBHl7S3MjOFVZt4wBrexRsmwyka/l9W9tCHDsBnFsApNungj1LftuXRxCsOs/VeWzv1tW6deswadIkJCUlKaY5OjrqvCyiH7EI6HsYuJ6uPL2SCzCpBvAgB5h4Ach9L+NKxMC0cKCcPTDlEpD03usBoGcQ0CUA2HQPWHtbvsxfI/UbX3R0NLKysnD27FnUq1cPAHDkyBH4+Pjg1KlTePPmDWxtbQEABw4cQEBAAIKDg/XqgzEGqVQKKyv682iJaEsR4SU1NRV3794t9gff4NW8eXP4+Phg5syZGtts2rQJVatWhUQiQYUKFTBv3jy+/x1YunQpgoODYWNjg7CwMPz6669K8zmOw9KlSxEbGws7OzsEBQVh48aNvPszBvbwEvD4mvpH5lNYtRoPiG1QkNAHSNqnue3LOxA3/z9wruUg/XUg2L9/K81nz2/rNS4fHx/Fw8XFBRzHKZ57eXlh/vz5KFeuHCQSCWrWrImdO3dqXJZUKsXHH3+MSpUq4d69ewCALVu2oFatWrC1tUVQUBCmTJmitOWB4zj88ssv6NSpE+zt7REaGoq///6b1//xq1ev0K9fP7i5ucHe3h6xsbG4efOmYn7hFoXNmzcjNDQUtra2iImJwf3793n1ZwxJGcCFl8qP9gFAqAsQ7Qv42avOL2svnxfqAnQIUJ1/6RUwojJQ3kn+76VXqsFLF2FhYfD19cXBgwcV0w4ePIgOHTogMDAQJ0+eVJoeHR2N3NxcfPnll/Dy8oKtrS0aNWqEM2fOKLXjOA47duxAREQEJBIJjh49iuzsbPTr1w+Ojo7w9fU16PMBMM3PCEtEoYiYJbFYjO+++w4//vgjHjx4oDL/3Llz6N69O3r06IF///0XcXFxmDhxIhISEvTu66+//sKIESPwf//3f7hy5QqGDBmCgQMH4sCBA0rtJk6ciC5duuDSpUvo3bs3evTogcTERL6rWHwcPGDVbyUgcULB6gHAyzua29rYQ9z7Z3CeoZD+Ngjs0b/FOrSFCxdi3rx5+P7773H58mXExMSgffv2SkGjUG5uLrp164aLFy/iyJEjCAgIwJEjR9CvXz+MGDEC165dw/Lly5GQkIAZM2YovXbKlCno3r07Ll++jNatW6N37954+fKl3uMdMGAAzp49i7///hsnTpwAYwytW7dGfn6+ok1OTg5mzJiB1atX49ixY0hLS0OPHj30/88xEsZUpx3477tJZj5wUc1/w4WX8nnvtn2XjAFHn8p/PvpU/pyv6OhopffWgQMH0KRJE0RFRSmmv379GqdOnUJ0dDTGjh2LTZs2YdWqVTh//jxCQkIQExOj8vscN24cZs2ahcTERFSvXh1jxozBoUOHsGXLFuzevRsHDx7E+fPneY3Z7D4jzBjHmLq3ACHaxcXF4e7du8XeT/ny5fU+hmTAgAFIS0vD5s2bUb9+fVSpUgUrVqzA5s2b0alTJzDG0Lt3bzx79gy7d+9WvG7s2LHYtm0brl69CkB+vMDx48dhY2OjtPyCggLY2toiLS0NANCwYUNUrVoV8fHxijbdu3dHdnY2tm3bBkD+LfCzzz7D0qVLFW3q1auHWrVqYcmSJXqtn7Hkx3eRb815V3EEIp8qsB68idcYExISMHLkSMX/ddmyZTFs2DB88803ijZ16tRB7dq1sXjxYty5cweBgYE4cuQI4uLikJubi3/++QcuLi4A5FsQmzVrhvHjxyte/9tvv2Hs2LF49OgRAPnvasKECZg2bRoAIDs7G46OjtixYwdatWql6MPOzg4ikfL3ytevX2P48OFYsGABbt68iYoVK+LYsWNo0KABAODFixfw9/fHqlWr0K1bNyQkJGDgwIE4efIk6tatCwC4fv06KleujFOnTqFOnTq8/t8MUXurPOS8L9gJeJUHvMxV/zp3CeBmA9zKVD/fWgSEOgM3M4B8GRDuDpxpp//4fvnlF0VNvH79Gu7u7nj06BH27t2LZcuW4dChQ9i/fz+aNWuGO3fuIDQ0FAkJCejVqxcAID8/HxUqVMDIkSMxZswYxRalzZs3o0OHDgDkxymVKVMGv/32G7p16wYAePnyJcqVK4fBgwdjwYIFAMz/M8IS0ZYiYtZmz56NVatWqXzbSkxMRMOGDZWmNWzYEDdv3oRU+vaAid69e+PixYtKj6lTp+q0rPf7rF+/vspzQX0LLK4tRCLjHJuRkZGBR48e6fR/3bNnT2RnZ2P37t2KQAQAly5dwtSpU+Ho6Kh4fPrpp0hNTUVOTo6iXfXq1RU/Ozg4wNnZGU+fPlXqY926dSq18eGHHyrmJyYmwsrKShF2AKBMmTIICwtTGq+VlRVq166teF6pUiW4uroKqzYgDzuaAhEgn6cpEAHyIHQtTf6vIZo0aYLs7GycOXMGR44cQcWKFeHp6YmoqCjFcUUHDx5EUFAQ0tPTkZ+fr1Qz1tbWqFOnjsr/77u/u1u3biEvL0/pd+fu7o6wsDCV8VjUZ4QFoCPJiFmLjIxETEwMxo8fjwEDBuj9ehcXF4SEhChN8/LyMtLoBKTYApE1xM1GGXu0RWrdujV+++03nDhxAk2bNlVMz8rKwpQpU9C5c2eV1xQeoAvI/3C+i+M4lbMg/f39VWrDzs7OGMMnWoSEhKBcuXI4cOAAXr16haioKACAn58f/P39cfz4cRw4cEDp964LBwcHXuOxmM8IC0FbiojZmzVrFrZu3YoTJ04oplWuXBnHjh1Tanfs2DFUrFgRYrFYr+VrWlaVKlWUpr17EGjh88qVK+vVV7EozkDUdT44/1pGGaazszP8/Px0+r8eOnQoZs2ahfbt2+PQoUOK6bVq1UJSUhJCQkJUHu/vCjNU5cqVUVBQgFOnTimmvXjxAklJSUrjLSgowNmzZxXPk5KSkJaWJozaEKjo6GgcPHgQBw8eVDoVPzIyEjt27MDp06cRHR2tOLD53ZrJz8/HmTNnVGrmXcHBwbC2tlb63b169Qo3btzgNV6T/4ywILSliPDi6+trMv1Uq1YNvXv3xqJFixTT/u///g+1a9fGtGnT8NFHH+HEiRP46aefeO27HzNmDLp3747w8HA0b94cW7duxZ9//om9e/cqtduwYQM+/PBDNGrUCGvWrMHp06exYsUKg9ePL84jCOz90+5t7AEfDX8s3j/tXibV3FZkBXGzUeD8a0F2+leIG3xslDGPGTMGkydPRnBwMGrWrImVK1fi4sWLWLNmjUrb4cOHQyqVom3bttixYwcaNWqESZMmoW3btggICEDXrl0hEolw6dIlXLlyBdOnTzfKGAuFhoaiQ4cO+PTTT7F8+XI4OTlh3LhxKFu2rOLYFUC+VWr48OFYtGgRrKys8MUXX6BevXqlcjwRID9VXuj9REdHY9iwYcjPz1dsKQKAqKgofPHFF8jLy0N0dDQcHBwwdOhQjBkzBu7u7ggICMCcOXOQk5ODQYMGaVy+o6MjBg0ahDFjxqBMmTLw8vLCt99+yzs4m+pnhEVihOhJKpUKur/+/fuzDh06KE1LSUlhNjY27N2S37hxI6tSpQqztrZmAQEBbO7cuUqviYqKYiNGjFBZ/sqVK5mLi4vStCVLlrCgoCBmbW3NKlasyFavXq00HwBbvHgxa9GiBZNIJKxChQps3bp1eq2XMcmkBSbR3/v/11KplMXFxbGyZcsya2trVqNGDbZjxw7F/JSUFAaAXbhwQTFt3rx5zMnJiR07dowxxtjOnTtZgwYNmJ2dHXN2dmZ16tRh8fHxivYA2F9//aU0DhcXF7Zy5UqNfRR6v2ZevnzJ+vbty1xcXJidnR2LiYlhN27cUFm/TZs2saCgICaRSFjz5s3Z3bt39f/PMoKCkn1rswIZv9cV/g4qVaqkNP3OnTsMAAsLC1NMe/36NRs+fDjz8PBgEomENWzYkJ0+fVox/8CBAwwAe/XqldKyMjMzWZ8+fZi9vT3z9vZmc+bMUfn9mvNnhKWis88IKQEcx+Gvv/5Cx44dS3soREDeP7uOWC76jBAGOqaIEEIIIQQUigghhBBCANDFGwkhhBBCANCWIkIIIYQQABSKCCGEEEIAUCgihBBCCAFAoYgQQgghBACFIkIIIYQQABSKCCGEEEIAUCgihBBCCAFAoYgQQgghBACFIkIIIYQQABSKCCGEEEIAUCgihBBCCAFAoYgQQgghBACFIkIIIYQQABSKCCGEEEIAAP8P7w/uDoRpkpEAAAAASUVORK5CYII=",
164
+ "text/plain": [
165
+ "<Figure size 600x400 with 6 Axes>"
166
+ ]
167
+ },
168
+ "metadata": {},
169
+ "output_type": "display_data"
170
+ }
171
+ ],
172
+ "source": [
173
+ "CHECKPOINTS = [300, 600, 900, 1200, 1500, 3000]\n",
174
+ "SEEDS = [0, 14, 41, 53, 96]\n",
175
+ "\n",
176
+ "plot_surprisal_differences_checkpoints(seeds=SEEDS, checkpoints=CHECKPOINTS)\n",
177
+ "plt.savefig(f\"figures/hop_surprisals.pdf\", format=\"pdf\", bbox_inches=\"tight\")"
178
+ ]
179
+ },
180
+ {
181
+ "cell_type": "code",
182
+ "execution_count": 6,
183
+ "metadata": {},
184
+ "outputs": [
185
+ {
186
+ "data": {
187
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkUAAAGhCAYAAABvQ8DIAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAA9hAAAPYQGoP6dpAAB+I0lEQVR4nO3dd3wT9f8H8FeS7t0CXdACHZQhoyB7b8qQjYACIgoiIspPEJRREARBEFGm8mUo8mUpfBFB9pKNILIpe5RNNxSafH5/xEZCRpNrSi/J6/l45NHm7nP3+Vz7TvvK3eVOIYQQICIiInJyysIeABEREZEcMBQRERERgaGIiIiICABDEREREREAhiIiIiIiAAxFRERERAAYioiIiIgAMBQRERERAWAoIiIiIgLAUEREREQEgKFIVk6ePImuXbsiKioKXl5eKFq0KBo0aIB169YZbX/69Gm0atUKPj4+CAoKQq9evXD37l2DdhqNBlOmTEHp0qXh4eGBSpUqYdmyZRaPa8+ePUhISEDx4sXh4eGByMhItGvXDj/99JOuTVZWFhITE7Fjxw6rt5scz59//olXXnkFQUFB8PLywksvvYSZM2catNu7dy/q1asHLy8vhIaG4v3330dGRoZBu+zsbHz88ccIDw+Hp6cnatasic2bN1s8nnXr1qFhw4YIDg6Gl5cXoqKi0K1bN2zcuFHX5ubNm0hMTMSxY8ckbTM5hiNHjqBVq1bw8/ODr68vWrRoYbImWL8OSJBsrF+/XrRs2VIkJiaK+fPnixkzZoj69esLAGLevHl6ba9duyaKFi0qoqOjxddffy0mTpwoAgMDReXKlUV2drZe2xEjRggA4u233xbz588Xbdq0EQDEsmXL8hzTihUrhEKhEPHx8eKLL74Q8+fPFyNHjhR169YVjRo10rW7e/euACDGjh1rk58F2a/ff/9duLm5iZo1a4rp06eL+fPni48//lgMGzZMr93Ro0eFh4eHiI+PF3PmzBGffvqpcHd3F61atTJYZ/fu3YWLi4v46KOPxLx580Tt2rWFi4uL2L17d57jmTp1qgAgGjZsKKZPny7mzp0rPvroI1GlShXRp08fXbtDhw4JAGLhwoX5/RGQnTpy5Ijw8PAQsbGx4ssvvxRTpkwRpUqVEn5+fuLMmTN6bVm/jomhSOZycnJE5cqVRVxcnN70gQMHCk9PT3HlyhXdtM2bNxsEqOvXrwtXV1cxaNAg3TSNRiPq168vSpQoIXJycsz2X758eVGhQgWDoCWEELdv39Z9z1BEQgiRmpoqQkJCRMeOHYVarTbbNiEhQYSFhYnU1FTdtO+++04AEL///rtu2oEDBwQAMXXqVN20R48eiejoaFG7dm2zfTx9+lT4+fmJ5s2bG53/bA3znwq1bt1aBAYGinv37umm3bx5U/j4+IhOnTrptWX9OiaGIjvQtm1bERISojctODhYdO3a1aBtmTJlRNOmTXXPZ82aJQCIkydP6rX76aefBIA836m4u7uLN954w2ybS5cuCQAGj2cD0unTp0Xnzp1FYGCgcHd3F9WqVRNr167VW8/ChQsFALFz507Rv39/ERQUJHx9fUWvXr3EgwcP9NoeOnRItGjRQhQpUkR4eHiIUqVKib59+5odJxW8OXPmCADi1KlTQgghMjIyjIaj1NRU4eLiYrD3KDs7W/j4+Ih+/frppg0bNkyoVCq9fz5CCPH5558LAOLq1asmx5OcnCwAiMTERLPj3r59u9EafvYfzP79+0XLli2Fn5+f8PT0FA0aNBB79uzRW8/YsWMFAHH69GnRtWtX4evrK4KCgsT7778vHj16pNd206ZNom7dusLf3194e3uLMmXKiJEjR5odJxUsX19fo39X27RpI9zc3ER6eroQgvUrhOPWL88pkqHMzEzcu3cPFy5cwFdffYUNGzagadOmuvk3btzAnTt38PLLLxssW6NGDRw9elT3/OjRo/D29ka5cuUM2uXON6dkyZLYunUrrl+/brJNsWLFMGfOHABAx44d8cMPP+CHH35Ap06dAGjPlapVqxZOnz6NESNGYNq0afD29kaHDh3wyy+/GKzvvffew+nTp5GYmIjevXtj6dKl6NChA4QQAIA7d+6gRYsWuHz5MkaMGIFvvvkGr732Gvbv3292W6jgbdmyBX5+frhx4wbi4uLg4+MDPz8/DBw4EI8fP9a1+/vvv5GTk2NQw25ubqhSpYpBDZcpUwZ+fn56bXNr2Nw5FMHBwfD09MS6devw4MEDk+3KlSuH8ePHAwD69++vq+EGDRoAALZt24YGDRogLS0NY8eOxeeff46UlBQ0adIEBw8eNFhft27d8PjxY0yaNAmtW7fGzJkz0b9/f938kydPom3btsjOzsb48eMxbdo0vPLKK/jjjz9MjpEKXnZ2Njw9PQ2me3l54cmTJzhx4gQA1q9D129hpzIyNGDAAF3SVyqVokuXLnp7SnJ3ky5ZssRg2WHDhgkA4vHjx0II7TucqKgog3aZmZkCgBgxYoTZsSxYsEAAEG5ubqJx48Zi9OjRYvfu3Qbv/s0dPmvatKmoWLGibkxCaA/h1alTR8TGxuqm5e4pqlatmnjy5Ilu+pQpUwQA3Z6lX375RQAQhw4dMjt2evEqVaokvLy8hJeXlxg8eLBYvXq1GDx4sAAgunfvrmu3cuVKAUDs2rXLYB1du3YVoaGhuucVKlQQTZo0MWh38uRJAUDMnTvX7JjGjBkjAAhvb2+RkJAgJk6cKI4cOWLQztThB41GI2JjY0XLli2FRqPRTc/KyhKlS5fWO7SR+077lVde0VvHu+++KwCIv/76SwghxFdffSUAiLt375odO71YFStWFGXKlNE7rSA7O1tERkYKAGLVqlVCCNavI9cv9xTJ0AcffIDNmzdj8eLFSEhIgFqtxpMnT3TzHz16BABwd3c3WNbDw0OvzaNHjyxqZ8qbb76JjRs3olGjRtizZw8+++wz1K9fH7Gxsdi7d2+e2/LgwQNs27YN3bp1Q3p6Ou7du4d79+7h/v37aNmyJc6fP48bN27oLdO/f3+4urrqng8cOBAuLi747bffAAABAQEAgF9//RVPnz7Ncwz04mRkZCArKwu9e/fGzJkz0alTJ8ycORMDBgzAf//7X5w/fx5A3jX8bF3mt4bHjRuHn376CfHx8fj999/x6aefolq1aqhatSpOnz6d5zYdO3YM58+fR8+ePXH//n1dDWdmZqJp06bYtWsXNBqN3jKDBg3Sez548GAAMKjhtWvXGixLhefdd9/FuXPn0K9fP5w6dQonTpxA7969kZycDED/7yrA+nXE+mUokqGyZcuiWbNm6N27N3799VdkZGSgXbt2usNHubt3s7OzDZbNPUSR28bT09Oidua0bNkSv//+O1JSUrBr1y4MGjQIV65cQdu2bXHnzh2zyyYlJUEIgdGjR6NYsWJ6j7FjxwKAwTpiY2P1nvv4+CAsLAyXL18GADRs2BCdO3fGuHHjULRoUbRv3x4LFy40up30YuXWU48ePfSm9+zZEwCwb98+vXamavPZurRFDffo0QO7d+/Gw4cPsWnTJvTs2RNHjx5Fu3bt9A7rGZMb5Pr06WNQw99//z2ys7ORmpqqt8zzNRwdHQ2lUqmr4VdffRV169bFW2+9hZCQEHTv3h0rVqxwuH8w9uadd97BJ598gp9++gkVKlRAxYoVceHCBQwfPhyA9m8RwPp15Pp1KewBUN66dOmCAQMG4Ny5c4iLi0NYWBgA6N69PCs5ORlBQUG6dyZhYWHYvn07hBBQKBR67QAgPDzc4nF4eXmhfv36qF+/PooWLYpx48Zhw4YN6NOnj8llcl8kH330EVq2bGm0TUxMjMVjAACFQoFVq1Zh//79WLduHX7//Xe8+eabmDZtGvbv36/7w0UvXnh4OE6ePImQkBC96cHBwQCAhw8fAkCeNfxsXYaFhRnsTXx2WWtq2M/PD82bN0fz5s3h6uqKxYsX48CBA2jYsKHJZXJreOrUqahSpYrRNnnV3LOvPUD7j3DXrl3Yvn071q9fj40bN2L58uVo0qQJNm3aBJVKZfE2kW1NnDgRH330EU6ePAl/f39UrFgRn3zyCQCgTJkyAFi/jly/3FNkB3J3r+am+eLFi6NYsWI4fPiwQduDBw/qFX6VKlWQlZVlsJv1wIEDuvlS5J5gmPvCfv5FkysqKgoA4OrqimbNmhl9+Pr66i2T+84mV0ZGBpKTk1GqVCm96bVq1cLEiRNx+PBhLF26FCdPnsR///tfSdtDtlGtWjUAMPgncPPmTQDak/IB4KWXXoKLi4tBDT958gTHjh0zqOFz584hLS1Nr+2LquHo6GgA2n9Ipmr42cO9gGENJyUlQaPR6NWwUqlE06ZNMX36dJw6dQoTJ07Etm3bsH37dknbQ7YTGBiIevXqoWLFigC0HyAoUaIEypYtC4D1Czhu/TIUyYixQ1FPnz7FkiVL4OnpifLly+umd+7cGb/++iuuXbumm7Z161acO3cOXbt21U1r3749XF1dMXv2bN00IQTmzp2L4sWLo06dOmbHtHXrVqPTc48tx8XFAdDuRQKAlJQUvXbBwcFo1KgR5s2bZ/RdlbErcM+fP1/vXKE5c+YgJycHCQkJALR7G3IPJebK/cPCQ2iFq1u3bgCABQsW6E3//vvv4eLigkaNGgEA/P390axZM/z4449IT0/Xtfvhhx+QkZGhV8NdunSBWq3G/PnzddOys7OxcOFC1KxZExERESbHk5WVpTtk97wNGzYA+LeGvb29ARjWcLVq1RAdHY0vv/zS6NWKjdXwrFmz9J5/8803AKCrYWOfJGINy9Py5ctx6NAhfPDBB1Aqtf8yWb+OW788fCYjAwYMQFpaGho0aIDixYvj1q1bWLp0Kc6cOYNp06bp7eL85JNPsHLlSjRu3BhDhgxBRkYGpk6diooVK6Jv3766diVKlMAHH3yAqVOn4unTp6hevTrWrFmD3bt3Y+nSpXnu5mzfvj1Kly6Ndu3aITo6GpmZmdiyZQvWrVuH6tWro127dgCgC23Lly9HmTJlEBQUhJdeegkvvfQSZs2apXvX9fbbbyMqKgq3b9/Gvn37cP36dfz11196fT558gRNmzZFt27dcPbsWcyePRv16tXDK6+8AgBYvHgxZs+ejY4dOyI6Ohrp6en47rvv4Ofnh9atW9vq10ESxMfH480338R//vMf5OTkoGHDhtixYwdWrlyJkSNH6h0qmDhxIurUqYOGDRuif//+uH79OqZNm4YWLVqgVatWunY1a9ZE165dMXLkSNy5cwcxMTFYvHgxLl++bBC+npeVlYU6deqgVq1aaNWqFSIiIpCSkqJ7DXTo0AHx8fEAtO+oAwICMHfuXPj6+sLb2xs1a9ZE6dKl8f333yMhIQEVKlRA3759Ubx4cdy4cQPbt2+Hn5+fwa14Ll26hFdeeQWtWrXCvn378OOPP6Jnz56oXLkyAGD8+PHYtWsX2rRpg5IlS+LOnTuYPXs2SpQogXr16tnq10FW2rVrF8aPH48WLVqgSJEi2L9/PxYuXIhWrVphyJAhem1Zvw5av4X62TfSs2zZMtGsWTMREhIiXFxcRGBgoGjWrJnBRQ5znThxQrRo0UJ4eXmJgIAA8dprr4lbt24ZtFOr1eLzzz8XJUuWFG5ubqJChQrixx9/tHhM3bt3F9HR0cLT01N4eHiI8uXLi08//VSkpaXptd27d6+oVq2acHNzM/h4/oULF0Tv3r1FaGiocHV1FcWLFxdt27bVfcRVCMOLNwYGBgofHx/x2muvifv37+va/fnnn6JHjx4iMjJSuLu7i+DgYNG2bVtx+PBhi7aJCtaTJ09EYmKiKFmypHB1dRUxMTHiq6++Mtp29+7dok6dOsLDw0MUK1ZMDBo0yKCuhNBeAfijjz4SoaGhwt3dXVSvXl1s3Lgxz7E8ffpUfPfdd6JDhw6iZMmSwt3dXXh5eYn4+HgxdepUgyu1r127VpQvX164uLgYfLz56NGjolOnTqJIkSLC3d1dlCxZUnTr1k1s3bpV1yb3I82nTp0SXbp0Eb6+viIwMFC89957ehe/27p1q2jfvr0IDw8Xbm5uIjw8XPTo0UOcO3cuz22igpOUlCRatGghihYtKtzd3UXZsmXFpEmTjF7RXwjWryPWr0KI545DEBWSRYsWoW/fvjh06JDRC1MSyV1iYiLGjRuHu3fvomjRooU9HCKrsH55ThERERERAIYiIiIiIgAMRUREREQAAJ5TRERERATuKSIiIiICwFBEREREBMAJL96o0Whw8+ZN+Pr6mrwsOpEtCSGQnp6O8PBw3RVx84M1TC8S65fsnTU17HSh6ObNm2Yvq05UUK5du4YSJUrkez2sYSoMrF+yd5bUsNOFotybj167dg1+fn6FPBpyBmlpaYiIiDC48a1UrGF6kVi/ZO+sqWGnC0W5u2v9/Pz4gqQXylaHCljDVBhYv2TvLKlhnmhNREREBIYiIiIiIgAMRUREREQAGIqIiIiIADAUEREREQFgKCIiIiICwFBEREREBIChiIiIiAgAQxERERERAIYiIiIiIgAyDEW7du1Cu3btEB4eDoVCgTVr1ujNf+ONN6BQKPQerVq1KpzBEhERkcOQXSjKzMxE5cqVMWvWLJNtWrVqheTkZN1j2bJlL3CERERE5Ihkd0PYhIQEJCQkmG3j7u6O0NDQFzQiIiIicgayC0WW2LFjB4KDgxEYGIgmTZpgwoQJKFKkiNG22dnZyM7O1j1PS0sDAKjVaqjV6hcyXnJu+a0z1jAVphddvwqFAkql7A5i6NFoNBBCFPYwyELW1LDdhaJWrVqhU6dOKF26NC5cuIBPPvkECQkJ2LdvH1QqlUH7SZMmYdy4cQbTz549Cx8fnxcxZHJyGRkZ+VqeNUyF6UXWr4eHB2JiYjBv3jwkJyfnq9+CEhYWhgEDBiApKQmPHz8u7OGQBaypYYWQcdxVKBT45Zdf0KFDB5NtLl68iOjoaGzZsgVNmzY1mG/sXUpERAQePHgAPz+/ghg2kZ60tDQEBQUhNTVVUs2xhqkwvej6ValUSExMxJUrV/I17oJSsmRJJCYmci+tHbGmhu1uT9HzoqKiULRoUSQlJRkNRe7u7nB3dzeYrlKpjO5ZIrK1/NYZa5gKE+vXOHseu7Ox5ncl7wO3Frh+/Tru37+PsLCwwh4KERER2THZ7SnKyMhAUlKS7vmlS5dw7NgxBAUFISgoCOPGjUPnzp0RGhqKCxcuYPjw4YiJiUHLli0LcdRERERk72QXig4fPozGjRvrng8dOhQA0KdPH8yZMwfHjx/H4sWLkZKSgvDwcLRo0QKfffaZ0d2zRERERJaSXShq1KiR2Y86/v777y9wNEREROQs7P6cIiIiIiJbYCgiIiIiAkMREREREQCGIiIiIiIADEVEREREABiKiIiIiAAwFBEREREBYCgiIiIiAsBQRERERASAoYiIiIgIAEMREREREQCGIiIiIiIADEVEREREABiKiIiIiAAwFBEREREBYCgiIiIiAsBQRERERASAoYiIiIgIAEMRkcPRaDSFPYQ82cMYicj5uBT2AIjItpRKJebNm4fk5OTCHopRYWFhGDBgQGEPg4jIAEMRkQNKTk7GlStXCnsYRER2hYfPiIiIiMBQRERERASAoYiIiIgIAEMREREREQCGIiIiIiIADEVEREREABiKiIiIiAAwFBEREREBYCgiIiIiAsBQRERERASAoYiIiIgIgAxD0a5du9CuXTuEh4dDoVBgzZo1evOFEBgzZgzCwsLg6emJZs2a4fz584UzWCIiInIYsgtFmZmZqFy5MmbNmmV0/pQpUzBz5kzMnTsXBw4cgLe3N1q2bInHjx+/4JESERGRI3Ep7AE8LyEhAQkJCUbnCSEwY8YMjBo1Cu3btwcALFmyBCEhIVizZg26d+/+IodKREREDkR2ocicS5cu4datW2jWrJlumr+/P2rWrIl9+/YZDUXZ2dnIzs7WPU9LSwMAqNVqqNXqgh80Ob381pm1NaxSqfLV34vC1599YP0ax/q1H9b8ruwqFN26dQsAEBISojc9JCREN+95kyZNwrhx4wymnz17Fj4+PrYfJNFzMjIy8rW8NTXs4eGBmJiYfPX3oly6dImHve0A69c41q/9sKaG7SoUSTFy5EgMHTpU9zwtLQ0RERGIi4uDn59fIY6MnEXuO2OpHLWGS5cuXdhDIAuwfo1j/doPa2rYrkJRaGgoAOD27dsICwvTTb99+zaqVKlidBl3d3e4u7sbTFepVHazm5bsW37rzFFr2J7H7kxYv8bZ89idjTW/K9l9+syc0qVLIzQ0FFu3btVNS0tLw4EDB1C7du1CHBkRERHZO9ntKcrIyEBSUpLu+aVLl3Ds2DEEBQUhMjISH3zwASZMmIDY2FiULl0ao0ePRnh4ODp06FB4gyYiIiK7J7tQdPjwYTRu3Fj3PPdYdJ8+fbBo0SIMHz4cmZmZ6N+/P1JSUlCvXj1s3LgRHh4ehTVkIiIicgCyC0WNGjWCEMLkfIVCgfHjx2P8+PEvcFRERETk6OzqnCIiIiKigsJQRERERASGIiIiIiIADEVEREREABiKiIiIiAAwFBEREREBYCgiIiIiAsBQRERERASAoYiIiIgIAEMREREREQCGIiIiIiIADEVEREREABiKiIiIKA9qTWGPIG+2GKNL/lfhWDQaDZRKeWdFexgjEZGzExo1FEpVYQ/DLEvHqFICvXYBZ1JfwKAkKOsP/NAg/+thKHqOUqnEvHnzkJycXNhDMSosLAwDBgwo7GEQEVEeFEoVcn4eDkXZplCWaw71rjkQZ7eZXUYZ3xmq6j2hPvQTNEdXm19/XBOoGgyE5vRmaPbMN982sipUzYZDXPsT6q3TAU0OFEWj4NJpqsXbcyYVOPrA4uZ2SXIoSktLw+zZs7F9+3bcuXMH8+bNQ40aNfDgwQMsWrQIr7zyCmJiYmw51hcmOTkZV65cKexhEBGRndMFonWjIY79bLatsv472kC0/Wtods81v94qnbSB6MgKaH4bD0CYbhvbEKpmwyCSdkK9aiigeQqYXcJ5SToGc/36dcTHx2PMmDG4fv06jh8/joyMDABAUFAQ5s2bh2+++camAyUiIrI3VgWixkMsD0TtPrM8EHWdCZG0Sy8QSTG/DrCjFRDuZXz+VzWAkx2AdhHG57eL0M7/qobx+cW9tOvPTx/5JSkUDRs2DOnp6Th27Bh27twJIfR/IR06dMCWLVtsMkAiIiJ7pd41xyECEQDEFwHqhQD9Yg3nlfMHBpcD4vyBSdWMLz+5mnb+4HLa9s/rF6tdf376yC9JoWjTpk14//33Ub58eSgUCoP5UVFRuHbtWr4HR0REZM/yPIfITgIRAORotI8/bhvOu5oJXNYeMMLmm8aX3/TP9MsZ2vbP23M7/33kl6Rzih49eoRixYqZnJ+eni55QERERM7AngIRALTfCpxIAW5kGc7LzAEqrwUivYHTJj6h9uFBYP5ZbbjJzDGcv+0WEP3PueVS+8gvSXuKypcvj127dpmcv2bNGsTHx0seFBERkSOTQyBSFLPuw1B3HhsPK7kyc/IOK6dTjQeiXDey8t9HfkgKRR988AH++9//4osvvkBqqnZ0Go0GSUlJ6NWrF/bt24cPP/zQpgMlIiJyBLIIROEVoWo9RuomOCxJh89ef/11XLlyBaNGjcKnn34KAGjVqhWEEFAqlfj888/RoUMHW46TiIjI7skmEL2+AOLhVShCy0ndFIck+TpFn376KXr16oXVq1cjKSkJGo0G0dHR6NSpE6Kiomw5RiIiIrsnq0B09zzUW6ZB2Xep1M1xSPm6onVkZCQPkxEREeVBdoFo6dtAUCmJW6PVrRRQ2geYdQbIMHKeUKVAoGMk8MtV4PhDw/k+LsCgssClDGDF5fz1YavzjCSFoj///BP79+/Hu+++a3T+7NmzUadOHVSpUiU/YyMiIrJ7sgxET8yczWyBRqHATw2135fwBgYfeK4/AJtbAEU8gHfLAqHLDbdkUjVgYFnt97cfATuf+xi+NX2kZOdrc3QknWj96aefmr0447Zt2zBq1CjJgyIiInIEyvjODheIAEC8gHuEvIg+nidpT9GRI0cwcuRIk/Pr16+PSZMmSR4UERGRI7DqXmZ2EogA7V6dnjv/PbT1PAGg+SagQySw5qrxrRl5BLieqT189vxeImv7OJP6716l/JAUitLT0+HiYnpRpVKp+6g+ERGRs1If+snhAlEuU+cB5Tr+0Pi5RLkycoAvTtimj/gg8+0sJenwWWxsLDZt2mRy/saNG/kJNCIicnqao6vNzrfXQOSoJIWifv36Yf369Rg6dChSUlJ001NSUvDhhx9i48aN6Nevn63GSERE5HAYiORH0uGz999/H8eOHcOMGTMwc+ZMhIeHAwBu3rwJjUaDXr168aP6REREJsgiEPmH5WMLHJOkPUUKhQILFy7E1q1b8c477+Cll17CSy+9hIEDB2Lbtm1YvHgxFAqFrccKAEhMTIRCodB7lC1btkD6IiIisjVZBKKgUnBpM86qcQd7AMW9TM/3cQHK+ZtfRzl/wNvM7pjiXvnvIz/ydfHGxo0bo3HjxrYai8UqVKigd0kAcyd9ExERyYVsAlHvRcDTLABFLB772qbar603a+9o/ywfF+BYe6CUD/DNaeDDg4bLf1UDGFwOuJyhvdv98zeGbRIK/NY8f33kl6Q9RYXNxcUFoaGhukfRokULe0hERERmySoQZacj59dEq8bvotQ+6oYYzovw1oYVAGgebnz5Fv9ML+UDRHobzq8Xkv8+8kvSLhYhBObPn48FCxbg4sWLePjQ8DN3CoUCOTlGrsltA+fPn0d4eDg8PDxQu3ZtTJo0CZGRkUbbZmdnIzv730tdpqWlAQDUajXUarVBe5VKVSBjtjVjYyd5yu/vijVMhYn1a5wlP5dnt0V2gWhJX8A32KptOXpfu3dnwXnD+adTtXtvmodrrz9kzIgj2qtYb75p/LYcC84DTcL+/V5KH8a2xZoalhSKhg8fjunTp6NKlSp4/fXXERgYKGU1ktSsWROLFi1CXFwckpOTMW7cONSvXx8nTpyAr6+vQftJkyZh3DjD46Znz56Fj4+P3jQPDw/ExMQU2Nht6dKlS3j8+HFhD4MskJGRka/lWcNUmFi/xuVVv89uiywDUeY9XSiydFv67wWOPjDdRV6Hs9Zd0z5MuZEFNNpofh159WFsW6ypYUmhaPHixejcuTNWrFghZfF8SUhI0H1fqVIl1KxZEyVLlsSKFSuMXgZg5MiRGDp0qO55WloaIiIiEBcXBz8/vxcy5oJQunTpwh4CWSj3nbFUrGEqTKxf4yytX0VcE6gaDJRfIJKwLfbA2LZYU8OSQtGjR4/QrFkzKYvaXEBAAMqUKYOkpCSj893d3eHu7m4wXaVS2c1uWmPseezOJr+/K9YwFSbWr3GWjl3ugciabbEHxrbFmu2TdKJ106ZNcejQISmL2lxGRgYuXLiAsDBeb4GIiORFc3qzrAMR6ZMUimbPno39+/fj888/x/379209JrM++ugj7Ny5E5cvX8bevXvRsWNHqFQq9OjR44WOg4iIKC+aPfPBQGQ/JIWiuLg4XLx4EaNHj0ZwcDC8vb3h5+en9/D3L5irK12/fh09evRAXFwcunXrhiJFimD//v0oVqxYgfRHRIVHaOT/CTWh0RT2EMhO2XMg8lQBqxoDf7YzfTPWIeWBUx20X42pGqRdflVj7fry00cPG91uVdI5RZ07dy6wK1bn5b///W+h9EtEL55CqULOz8Mg7l38d6J/mPZKvE+ztNdZeZRiegWeAXBpmwi4eiFn/VggNdl0W1cPqBJGQREYCfVv4yHuGj9PEQCgdIGq6VAoIqtBoeLFY8l6sghEngGSx9+6BNDhnyvhDK8I9NipP1+pAKZUA1RK7ddvTgOa53aYDasIVArSPhJKAD9fkd7HEMMPn0si6dW8aNEi2/RORJQHce8icOuU9klQKbgkjAYePcz7D753Ubh0/AJQuSFn0evAg8um27p5QfXad1AElID6h74QN/823VbpClWX6VBExEO9+Qu4tPpU0naR85JFIPIuqn3DINHRB0D6U8DXFdhu5L2GRgB77gANQ7Vfnw9EgHa5rqW06zlm5KP+1vRx7AFQzQbXceZbHCKyD9b+we+9EHD3Rc6SNywLRMViof6xn2WBKKYB1Cvfh0i/K3FjyFnJJhD1Xgi4mrnJWB4upgPRq4FAN+BCuvE2rTYDsX7AeROfiJ9/DtiaDDx8AjzINpxvTR/eLsC+NtK25VmSb/Nx9epVvPPOO4iLi0NgYCB27doFALh37x7ef/99HD16NP+jIyIC5BmIzu803dYItcxPPZL7+ByBrAKRu6/2kHI+PMg2HVYA4KkGOJWi/WrKhXTjgcjaPnJsVL+S9hSdOnUK9evXh0ajQc2aNZGUlKS7pUfRokWxZ88eZGZmYsGCBbYZJRE5L/8w7SEzOw5EgPa8h167gDNGbm9Q2Mr6Az80KOxRODbZBaIlbwBu0vcUOSrJt/kICAjA/v37oVAoEBwcrDe/TZs2WL58uU0GSETOzaXNOMvPIZJpIMp1JtX8bRLIMckyED24DISa+FiYE5N0+GzXrl0YOHAgihUrZvRTaJGRkbhx40a+B0dEhKdZDhGIyDkpIqvKMxCRUZJCkUajgZeX6d1ud+/eNXpZdyIia+X8mugwgUipAAbGaR8qE1c1aRIKjKkMFDfxJ7a4l3Z+k1Dj81U26INsR9VsuMMEoi4lHb92JR0+q1q1KtavX493333XYF5OTg7++9//olatWvkeHBGR2esQ2VEgAoBOkcDHlf59Pues/vziXsBvzQEXJdAkzPgdw5c2AOqFaE8sjV6tvbP4s/qXAb555s+vlD7IdsS1Px0iEAHOUbuS9hSNHDkSGzduxMCBA3HixAkAwO3bt7Flyxa0aNECp0+fxogRI2w6UCIiPXYWiMg5qbdOd4hA5Cwk7SlKSEjAokWLMGTIEMyfPx8A8Prrr0MIAT8/PyxZsgQNGvCjDERUQGQRiBRQ1utv1bB/vgpc/+d/3vxzhvNvZAGtNwN1Q4AF542vo+cuoF8s8Mdtw3faz69Xah9kQ5ock7PsLRB9cVxbv45cu5Iv3tirVy906tQJmzdvxvnz56HRaBAdHY2WLVvC19dG19smInqeXAJR6zFQlmtu1dA1wvCQwPO23dI+TLmZBXz2l+n5ahv0QQVPFoHI1cOqMa+6Yv7Tk45Qu1aHoqysLERERGDEiBEYNmwYOnToUADDIiIyQk6BqFo3qHfNgUvDQRI3hpyVLAKRmxdUCaMkboHjsvqcIi8vL7i4uMDb27sgxkNEZJzcAtG60RBnt0ncGHJWsglEr30HRWCkxK1wXJJOtO7cuTNWrVoFIYzc4Y1kQ2jUhT2EPNnDGEkG5BiIjv0scWPIWckqEBWLhfq38RK3xHFJOqeoe/fuePfdd9G4cWO8/fbbKFWqFDw9PQ3aVa1aNd8DJOkUShU0N09AERwH9ZYpEFf/NNteWa8/lOWaQ71rTp7vgJXxnaGq3hPqQz9Bc3S1+XHENYGqwUBoTm+GZs/8f6cXjYJLp6mWbxA5J88A7d3uHTQQFXEHAszc8NJV+e9NNU3dQyraF0h5Atw3cQ+pvPpwkXwXTLKU7ALRj/3y/aZUDrWb24etalhSKGrUqJHu+927dxvMF0JAoVBAreZegMKmCI6DeuVgi08ateQPvrL+O9pAtP1raHbPNd9/lU7aQHRkBTS/jQfw795F7mckS7i0TQRUbg4ZiKJ8gSPtAF9X4N19xj9xs7E50DAU2HkLaPq74fz+ZYDZtYH0p0C1ddo7i1vbx7c1bbM9ZJwsA9HNv/N1mw+51G5uH0fM/JisISkULVy40Da9U4FTb5li0z/4yvrvQNV4iOWBqN1nRgMRkcVcvZCz6HWHC0QAEB+k/YMPAI3DDP/oKxVAvX9uLVkvWPtc89zLqHGY9quvK1AlyPAfiyV9VAnK/7aQcYpiMVAljJJfIMonudRubh+2qmFJoahPnz626Z0KnPlDZgxEJH8568c6ZCACgN+uA2uuAlE+wBQjw9YIYPgR7Tvq+ecM/6kA2uXi/ICLGcCG69L6+Po0MLRC/reHDKlaj4G4c9ahAhEgn9rN7WP9ddvUsOTrFOVKTk7GnTt3EBMTw0+k2RUGIrITqcmm59lxIAKAR2qgy3bzbb4+pX2YcvQBUHVd/vpYdpGhqKCIh1cdLhAB8qnd3D7ig2xTw5JPTVq7di3Kli2LEiVKoGrVqjhw4AAA4N69e4iPj8cvv/yS/9FRAZFHIFJEWn4ivtrESXpyYg9jdCh2HojIOag3THC4QOTIJO0pWrduHTp16oTatWujZ8+eSExM1M0rWrQoihcvjkWLFqFjx462GifZjEwCUWxDqJoNt3jUKiXQaxdwJtXiRV6osv7AD7yzzYsjk0CkjO8sbfzkPJ4+Nj2PgUh2JIWi8ePHo0GDBti+fTvu37+vF4oAoHbt2pg3b54txkc2JaNA1HUmxLU/oSht+cdezqSav8Q8OQm5BKJ/PoVJJIkcApEy32fQOBxJh89OnDiBbt26mZwfEhKCO3fuSB4UFQSZBaKkXdq7RxNZQ06BqPEQqA/9JG07yLnJIhC5QtV0qMQNcFySQpGXlxcyMzNNzr948SKKFCkieVBkazIMRKuGmr17tDnFvYAdrbSPcC/jbb6qAZzsALSLMD6/XYR2/lc1Cq4PsjG5BaLtX+d54dLnBXuwdp2eXAJRl+lQRFh3geX5dRy/diWFosaNG2Px4sXIyTH8p3br1i189913aNGiRb4HR7Yhz0D0VMqmAAD6xQL1QrSPfrGG88v5A4PLAXH+wKRqxtcxuZp2/uBy2vYF0QfZkBwDUR6vD2PaR7J2nZqcAlFMA6i3TLFq+PFFHL92JYWiiRMn4vr166hevTrmzZsHhUKB33//HaNGjULFihUhhMDYsWNtPVaSQFmvv0MFIgDYcxvI0Wgff9w2nH81E7icof1+803j69j0z/TLGdr2BdEH2Yirh0MEIgA4dp+167TkFohWvp/nrZ+e5wy1K+ksq7i4OOzZswdDhgzB6NGjIYTA1Knae1g1atQIs2bNQqlSpWw5TpLIqlt32EEgAoBtt4Dof45a3DDySdfMHKDyWiDSGzht4tNqHx4E5p/VvsgyjRzFs7aPeF4RuMCoEkZBEVDC7gMRABy6L7/apRdAjoHo/E6rb/PRfitwIsWxa9eiUHT8+HGULFkS/v7/7u+qUKECtmzZgocPHyIpKQkajQZRUVEoVqxYwYyUJFHvmuNQgSiXsRfMszJz8n7R5DXfFn1Q/ikCI6H+oa/dB6JcrF0n4x8Gl4TR8gtEEtx5bL62HKF2LTp8Fh8fj/Xr1+ueN2nSBFu3bgUABAYGonr16qhZsyYDkQzlebd7OwxE5FzUv413mEBEzselzTiHCETOwqJQ5Onpiaysf+Pbjh07cPu2kQN+ZFcYiMgeiLtJpmcyEJHcPc1iILIjFh0+q1y5MqZPnw6VSqU7hHbo0CF4eHiYXa5Tp075HyEVCDkEIkWxGKnDJ2IgIruQ82siA5EdsSgUzZgxA127dkW/fv0AAAqFAl9//TW+/vprk8soFAqo1WrbjJJsShaBKLwiVK3HWDXu5uGmr2jdrRRQ2geYdQbIMHICX6VAoGMk8MtV4PhDw/k+LsCgssClDGDFZWl90Askk0CkiGsiYfD65FC7sX752AAy71GK6Xl2HojkULu5fdjqPCOLQlH16tWRlJSECxcu4Pbt22jUqBE++eQTNG/e3DajoBdGNoHo9QUQD69CEVrO4rF/Xg04cBfY+dyR20ahwE8Ntd+X8AYGH3iuPwCbWwBFPIB3ywKhyw23ZFI1YGBZ7fe3H0nrg14QuQSiKp2gajBQwgb8Sy61O7tWvjaDpLDzQCSX2s3tIyXbBhsFC0PR//73P7z88suIi4tDXFwc+vTpg3bt2qFmTcvvW2Vrs2bNwtSpU3Hr1i1UrlwZ33zzDWrUMHGZTAIgs0B09zzUW6ZB2Xep1M3REaaHZjMvog+ygJwCUbvPoDm9GaryLSVsiBZr10nJIhApoKzXX+IGOG7tWhSKOnbsiB9++AE9e2pvfrhz50507NixQAdmzvLlyzF06FDMnTsXNWvWxIwZM9CyZUucPXsWwcHBhTYuOZNdIFr6NhBUyqpt+OSI4TsJQDut585/d7E+TwBovgnoEAmsuWp8a0YeAa5nanfjSu2DCpjcAtGRFdD8uTJfoUgutfvu/n/fkVMBk0sgaj0GynLSj/bIpXZz+ziTapsatigU+fr6IiUlRff88uXLyMjIyH/vEk2fPh1vv/02+vbtCwCYO3cu1q9fj//85z8YMWKEXtvs7GxkZ/+7Xy01VXvg8eHDh0bPeVKpVAgICNBbRk4CAgKQlpZm0flaKpUKOV7FoaifAFW1blBv/x6a41sBfyPXT/+HIrYBVHXfgubQGmj2/QT4mz4ZWlGiMlSNP4A4tQfqnbMB31Km2xaNgqrFxxDXzkC9eSrgWRwKr+JwsWJbLtwBKpk4t//MLeAMgCgXGK/qR8Cas9pvTa1jQ5L5+eb6iHIB0tJgdFvS0tIAAELi2x7WcHGIQAFVw/egCK4M9drPIO7cNFvHytpvQBnbBOoN0yEu/W2+baVXJL8+FEVKWVXDUS6A+rn6KuzaBQB1FuvXUpLq1/+fN4ue/nBpORJ46oKcX0cDalfT9ebqAVXzYVB4hEG9ejRE5mPTbZUukl4fml1LoIrvJLl+5VC7uX3E+tmmhhXCglYtWrTAyZMn8dZbb8Hf3x8fffQRevbsiapVTd9MTqFQ4MMPP8xzANZ68uQJvLy8sGrVKnTo0EE3vU+fPkhJScHatWv12icmJmLcuHE2HweRta5du4YSJUpYvRxrmOSA9Uv2zpIatigUJSUloXfv3ti/f792IYUiz8RVUJ8+u3nzJooXL469e/eidu3auunDhw/Hzp07ceCA/plYz79L0Wg0ePDgAYoUKQKFgqfHUsETQiA9PR3h4eFQKq2/3SBrmAoT65fsnTU1bNHhs5iYGOzduxePHz/GnTt3UKpUKcyYMQPt27e3yYALkru7O9zd3fWmBQQEFM5gyGk9e4sca7GGqbCxfsneWVrDVt0Q1sPDA5GRkRg7diyaNGmCkiVLShpcfhQtWhQqlcrgitq3b99GaGjoCx8PEREROQbr94UCGDt2LF566SVbj8Uibm5uqFatmu7ea4B2d+zWrVv1DqcRERERWcOiPUVvvvkmFAoF5s+fD5VKhTfffDPPZRQKBRYsWJDvARozdOhQ9OnTBy+//DJq1KiBGTNmIDMzU/dpNCIiIiJrWRSKtm3bBqVSCY1GA5VKhW3btuV5glxBnkD36quv4u7duxgzZgxu3bqFKlWqYOPGjQgJCSmwPomIiMixWfTpMyIiIiJHJ+mcIiIiIiJHY9Wnz3Klp6djz549uHDhAtLT0+Hr64uYmBjUq1cPPj4+th4jERERUYGzKhSp1WqMGjUK3377LbKysvQu4KhQKODl5YUhQ4Zg/Pjxki7yRURERFRYrDqnqHv37lixYgXKly+PHj164KWXXoKPjw8yMjLw999/46effsLZs2fRo0cP/PjjjwU5biIiIiKbsjgUbdmyBS1atMCgQYPw9ddfG90TpNFoMHjwYMydOxebN29GkyZNbD5gIiIiooJgcSjq3bs39u3bh3Pnzpn9uL1Go0FcXBzq1KmDxYsX22ygRERERAXJ4hN/Dh48iE6dOuV5/SGlUolOnToZ3JiViIiISM4sDkXJycmIiYmxqG1MTAySk5MlD4qIiIjoRbM4FGVkZMDb29uitl5eXsjIyJA8KCIiIqIXzeJQJIQo0Ft3EBERERUmi0+0ViqViIiIgL+/f55tU1NTcf36dajV6nwPkIiIiOhFsPjijQ0aNLB4T1GRIkUQFRUleVBERERELxpvCEtEREQE3hCWiIiICABDEREREREAhiIiIiIiAAxFRERERAAYigpNRkYGxo4di1atWiEoKAgKhQKLFi0yaKfRaLBo0SK88soriIiIgLe3N1566SVMmDABjx8/NrruBQsWoFy5cvDw8EBsbCy++eYbo+1u3LiBbt26ISAgAH5+fmjfvj0uXrxo0fifPHmCr7/+GvHx8fDz80NAQAAqVKiA/v3748yZM7p2e/fuRWJiIlJSUixaL9kHS+sXAN544w0oFAqDR9myZQ3aajQaTJkyBaVLl4aHhwcqVaqEZcuWGV3v6dOn0apVK/j4+CAoKAi9evXC3bt3rRr/Sy+9BG9vbxQpUgRVqlTBkCFDcPPmTV273377DYmJiRatk+zLyZMn0bVrV0RFRcHLywtFixZFgwYNsG7dOqPtLa031rCdE1QoLl26JACIyMhI0ahRIwFALFy40KBdenq6ACBq1aolJkyYIObPny/69u0rlEqlaNSokdBoNHrt586dKwCIzp07i/nz54tevXoJAGLy5MkG642NjRXBwcHiiy++ENOnTxcRERGiRIkS4t69e3mOv23btkKlUonXX39dzJo1S8yYMUO88847okSJEnrbMXXqVAFAXLp0ScqPiWTK0voVQog+ffoId3d38cMPP+g9/ve//xm0HTFihAAg3n77bTF//nzRpk0bAUAsW7ZMr921a9dE0aJFRXR0tPj666/FxIkTRWBgoKhcubLIzs42O/YnT56I+Ph44enpKd555x0xd+5c8eWXX4q+ffuKokWLiu3bt+vaDho0SPDPpGNav369aNmypUhMTBTz588XM2bMEPXr1xcAxLx58/TaWlNvrGH7xp9UIXn8+LFITk4WQghx6NAhk/9UsrOzxR9//GEwfdy4cQKA2Lx5s25aVlaWKFKkiGjTpo1e29dee014e3uLBw8e6KZ98cUXAoA4ePCgbtrp06eFSqUSI0eONDv2gwcPCgBi4sSJBvNycnL0QhVDkWOytH6F0IYib2/vPNd5/fp14erqKgYNGqSbptFoRP369UWJEiVETk6ObvrAgQOFp6enuHLlim7a5s2bjf5De96KFSsEALF06VKDeY8ePRKpqam65/yH4lxycnJE5cqVRVxcnN50S+uNNWz/LPpJlSpVSpQuXdqqR1RUVEGP3WHk9U/FmOPHjwsAYubMmbpp69evFwDE+vXr9dru3btXABA//PCDblr16tVF9erVDdbbokULER0dbbbvZcuWCQBix44dZtuNHTtWADB4PBuQfvjhB1G1alXh4eEhAgMDxauvviquXr2qt56GDRuKChUqiMOHD4vatWsLDw8PUapUKTFnzhyDPmfOnCnKly8vPD09RUBAgKhWrZrRPxxkO5aGopycHL0/1s+bNWuWACBOnjypN/2nn34SAMTu3bt104KDg0XXrl0N1lGmTBnRtGlTs+OdNGmSACAuX75stl2fPn2M1m8utVotvvrqK1G+fHnh7u4ugoODRf/+/fXefAghRMmSJUWbNm3E77//LipXrizc3d1FuXLlxOrVq/XaPXnyRCQmJoqYmBjh7u4ugoKCRN26dcWmTZvMjpNsq23btiIkJERvmqX1xhq2/xq26IrWDRs25H3PZObWrVsAgKJFi+qmHT16FADw8ssv67WtVq0alEoljh49itdffx0ajQbHjx/Hm2++abDeGjVqYNOmTUhPT4evr6/RvkuWLAkAWLp0KerWrQsXF+Nl1KlTJ5w7dw7Lli3DV199pRtrsWLFAAATJ07E6NGj0a1bN7z11lu4e/cuvvnmGzRo0ABHjx5FQECAbl0PHz5E69at0a1bN/To0QMrVqzAwIED4ebmptuO7777Du+//z66dOmCIUOG4PHjxzh+/DgOHDiAnj175vkzpYKTlZUFPz8/ZGVlITAwED169MAXX3wBHx8fXZujR4/C29sb5cqV01u2Ro0auvn16tXDjRs3cOfOHYM6z23722+/mR1Lbv0uWbIEo0aNMvm3bcCAAbh58yY2b96MH374wej8RYsWoW/fvnj//fdx6dIlfPvttzh69Cj++OMPuLq66tqeP38er776Kt555x306dMHCxcuRNeuXbFx40Y0b94cAJCYmIhJkybhrbfeQo0aNZCWlobDhw/jzz//1LUh28vMzMSjR4+QmpqK//3vf9iwYQNeffVV3Xxr6o017AA1XNipjKTtKWrWrJnw8/MTDx8+1E0bNGiQUKlURtsXK1ZMdO/eXQghxN27dwUAMX78eIN2ue90zpw5Y7JvjUYjGjZsKACIkJAQ0aNHDzFr1iy93cC5TB0+u3z5slCpVAaH4P7++2/h4uKiNz23r2nTpummZWdniypVqojg4GDx5MkTIYQQ7du3FxUqVDA5bioYedXviBEjxMcffyyWL18uli1bpnv3WrduXfH06VNduzZt2hjdw5yZmSkAiBEjRuj1t2TJEoO2w4YNEwDE48ePTY43KytLxMXFCQCiZMmS4o033hALFiwQt2/fNmhr6tDD7t27jR6+2Lhxo8H0kiVLCgB676pTU1NFWFiYiI+P102rXLmywaFvKngDBgzQ7UFRKpWiS5cuentKrKk31rD91zA/fWaHPv/8c2zZsgWTJ0/W25vy6NEjuLm5GV3Gw8MDjx490rUDAHd3d6Ptnm1jjEKhwO+//44JEyYgMDAQy5Ytw6BBg1CyZEm8+uqrFn3S7Oeff4ZGo0G3bt1w79493SM0NBSxsbHYvn27XnsXFxcMGDBA99zNzQ0DBgzAnTt3cOTIEQBAQEAArl+/jkOHDuXZP704kyZNwuTJk9GtWzd0794dixYtwsSJE/HHH39g1apVunaPHj2yqCbzW7+enp44cOAAhg0bBgBYtGgR+vXrh7CwMAwePBjZ2dl5btPKlSvh7++P5s2b69VvtWrV4OPjY1C/4eHh6Nixo+65n58fevfujaNHj+r2+gYEBODkyZM4f/58nv2T7XzwwQfYvHkzFi9ejISEBKjVajx58kQ335p6Yw3bfw3nKxQ9ffoUf//9N/bs2YNdu3YZPMj2li9fjlGjRqFfv34YOHCg3jxPT0+9F/OzHj9+DE9PT107AEZfOLkf889tY4q7uzs+/fRTnD59Gjdv3sSyZctQq1YtrFixAu+9916e23H+/HkIIRAbG4tixYrpPU6fPo07d+7otQ8PD4e3t7fetDJlygAALl++DAD4+OOP4ePjgxo1aiA2NhaDBg3CH3/8kedY6MX78MMPoVQqsWXLFt00T09Pi2rSFvXr7++PKVOm4PLly7h8+TIWLFiAuLg4fPvtt/jss8/yHP/58+eRmpqK4OBgg/rNyMgwqN+YmBiDQxzP1+/48eORkpKCMmXKoGLFihg2bBiOHz+e51gof8qWLYtmzZqhd+/e+PXXX5GRkYF27dpB/HNbUGvqjTVs/zVs0TlFz9NoNBg5ciRmz56NrKwsk+3UarXkgZGhzZs3o3fv3mjTpg3mzp1rMD8sLAxqtRp37txBcHCwbvqTJ09w//59hIeHAwCCgoLg7u6O5ORkg3XkTstta4mwsDB0794dnTt3RoUKFbBixQosWrTI5LlGgLaGFAoFNmzYAJVKZTD/2XNNLFWuXDmcPXsWv/76KzZu3IjVq1dj9uzZGDNmDMaNG2f1+qjgeHp6okiRInjw4IFuWlhYGLZv3w4hhN4f3+drMiwsTG/6s5KTk3X1bamSJUvizTffRMeOHREVFYWlS5diwoQJZpfRaDQIDg7G0qVLjc7PPW/OGg0aNMCFCxewdu1abNq0Cd9//z2++uorzJ07F2+99ZbV6yNpunTpggEDBuDcuXOIi4uzqt5Yw/Zfw5JC0eeff46pU6diwIABqFevHnr16oUvvvgCAQEBmD17NhQKBaZMmWLrsTq1AwcOoGPHjnj55ZexYsUKo4GjSpUqAIDDhw+jdevWuumHDx+GRqPRzVcqlahYsSIOHz5stJ+oqCiTJ1mb4+rqikqVKuH8+fO6Q2GmTgCMjo6GEAKlS5fWvdsw5+bNm8jMzNTbW3Tu3DkAQKlSpXTTvL298eqrr+LVV1/FkydP0KlTJ0ycOBEjR47U7Zamwpeeno579+7p/eGtUqUKvv/+e5w+fRrly5fXTT9w4IBuPgAUL14cxYoVM1q/Bw8e1LWzVmBgIKKjo3HixAndNHP1u2XLFtStWzfPd/QAkJSUZPCP0lj9BgUFoW/fvujbty8yMjLQoEEDJCYm2s0/FEeQe9gqNTUVgHX1xhp2gBqWciJSdHS0ePXVV4UQQty7d08oFAqxdetWIYT2BNiqVavmea0b+ldeJ6qeOnVKFClSRFSoUMHgo5LPysrKEkFBQaJt27Z6019//XXh5eUl7t+/r5s2efJkAUAcOnRIN+3MmTNCpVKJjz/+2Ox4z507Z/Sk6ocPH4rw8HARGBioux7HnDlzBABx9OhRvbZJSUlCpVKJnj17GlyAUqPR6F3ryNyJ1sWKFdOdaG3sopPDhg0TSqVSpKWlmd0mks5c/T569Mjozz73ZNKff/5ZN+3atWsmr/FSvHhxvWu8vPPOO8LT01Pv8g1btmwRAIxequFZx44dE3fv3jWYfvnyZeHp6SkqVaqkm/bxxx8LAHofaBBCiB07dggARv/OPX36VK+9uZNUq1SpoptmrH67du0qihYtanZ7SBpjJyU/efJEVK1aVXh6eor09HTddEvrjTVs/zUsaU/R9evXMXz4cAD/niiWexzUzc0Nr7/+OqZPn47PP/9calZzCt9++y1SUlJ0l2Rft24drl+/DgAYPHgw/P39kZ6ejpYtW+Lhw4cYNmwY1q9fr7eO6Oho1K5dG4D2kMRnn32GQYMGoWvXrmjZsiV2796NH3/8ERMnTkRQUJBuuXfffRffffcd2rRpg48++giurq6YPn06QkJC8H//939mx/3XX3+hZ8+eSEhIQP369REUFIQbN25g8eLFuHnzJmbMmKE7JFatWjUAwKefforu3bvD1dUV7dq1Q3R0NCZMmICRI0fi8uXL6NChA3x9fXHp0iX88ssv6N+/Pz766CNdn+Hh4fjiiy9w+fJllClTBsuXL8exY8cwf/583cdGW7RogdDQUNStWxchISE4ffo0vv32W7Rp00bSni8yz5L6vXXrFuLj49GjRw/dbT1+//13/Pbbb2jVqhXat2+vW1+JEiXwwQcfYOrUqXj69CmqV6+ONWvWYPfu3Vi6dKneYdZPPvkEK1euROPGjTFkyBBkZGRg6tSpqFixIvr27Wt23Js3b8bYsWPxyiuvoFatWvDx8cHFixfxn//8B9nZ2Xq3RMit3/fffx8tW7aESqVC9+7d0bBhQwwYMACTJk3CsWPH0KJFC7i6uuL8+fNYuXIlvv76a3Tp0kW3njJlyqBfv344dOgQQkJC8J///Ae3b9/GwoULdW3Kly+PRo0aoVq1aggKCsLhw4exatUqi87RI+sNGDAAaWlpaNCgAYoXL45bt25h6dKlOHPmDKZNm6Z3CN/SemMNO0ANS0lS4eHheu/a/f39xTfffKN7Pn36dIuuYOvsctO3sUfuR9hzb6dg6tGnTx+D9c6fP1/ExcUJNzc3ER0dLb766iuDvTFCaN/VdOnSRfj5+QkfHx/Rtm1bcf78+TzHffv2bTF58mTRsGFDERYWJlxcXERgYKBo0qSJWLVqlUH7zz77TBQvXlwolUqDj+evXr1a1KtXT3h7ewtvb29RtmxZMWjQIHH27FldG2MXbyxZsqT49ttv9fqZN2+eaNCggShSpIhwd3cX0dHRYtiwYWYvGEjSWVK/Dx8+FK+//rqIiYkRXl5ewt3dXVSoUEF8/vnnuj18z1Kr1eLzzz8XJUuWFG5ubqJChQrixx9/NNr/iRMnRIsWLYSXl5cICAgQr732mrh161ae47548aIYM2aMqFWrlggODhYuLi6iWLFiok2bNmLbtm16bXNycsTgwYNFsWLFhEKhMPho8/z580W1atWEp6en8PX1FRUrVhTDhw8XN2/e1Ps55V74rlKlSsLd3V2ULVtWrFy5Um9dEyZMEDVq1BABAQHC09NTlC1bVkycONHoz4nyb9myZaJZs2YiJCRE9zesWbNmYu3atUbbW1pvrGH7rmGFEP+cYm+Ftm3bwtfXV3eTu44dO+Lvv//G4sWLodFo0Lt3b4SHh/OTP2QTjRo1wr179/SOkxPZi1KlSuGll17Cr7/+WthDIZLEmWpY0kfy+/fvj+zsbN3HCSdOnIiUlBQ0aNAADRs2RFpaGqZNm2bTgRIREREVJEnnFL3yyit45ZVXdM/Lly+PCxcuYPv27XBxcUGdOnX0zl8hIiIikjtJocgYf39/dOjQwVarIyIiInqhJJ1TdPXqVVy9ehX16tXTTfvrr78wbdo0ZGdno0ePHgxIREREZFckhaIOHTogIyNDd4n+27dvo1y5cnjy5Al8fX1x584drFy5Ep06dbL5gImIiIgKgqQTrQ8ePIjmzZvrni9ZsgSPHj3CX3/9hRs3bqBp06b48ssvbTZIIiIiooIm6ZyiBw8e6N1b69dff0XDhg0RHR0NAOjUqRM++eQT24zQxjQaDW7evAlfX1+Tlz8nsiUhBNLT0xEeHg6lMl/3YAbAGqYXi/VL9s6aGpYUiooVK4YrV64AAFJSUrB//35MnjxZNz8nJwc5OTlSVl3gbt68iYiIiMIeBjmha9euoUSJEvleD2uYCgPrl+ydJTUsKRQ1a9YMM2fOhJ+fH3bs2AGNRqN3YvWpU6dkW/S5t3u4du0a/Pz8Cnk05AzS0tIQERFhs1uNsIbpRWL9kr2zpoYlhaLJkyfj3Llz+Oijj+Dm5oYvv/wSpUuXBgBkZ2djxYoV6Nmzp5RVF7jc3bV+fn58QdILZatDBaxhKgysX7J3ltSwpFAUEhKCP/74A6mpqfD09ISbm5tunkajwdatW2W7p4iIiIjImHxdvNHf399gmqenJypXrpyf1RIRERG9cBaFoiVLlgAAevXqBYVCoXuel969e0sfGREREdELZNHFG5VKJRQKBR49egQ3NzeLPpapUCigVqttMkhbSktLg7+/P1JTU3k8m14IW9cca5heJNYv2Ttras6iPUWXLl0CAN25Q7nPiYiIbEmj0djkekgFyR7GSNJYFIpKliyp+/7p06dITU1FUFCQTa5ZQURElEupVGLevHlITk4u7KEYFRYWhgEDBhT2MKiAWH2itVKpRLVq1TBt2jS8//77BTEmIiJyYsnJyboLBBO9SFbv/1OpVChZsiSys7MLYjxEREREhULSQdHBgwdj/vz5ePDgga3HQ0RERFQoJF2nSK1Ww93dHdHR0ejSpQtKlSoFT09PvTYKhQIffvihTQZJREREVNAkhaKPPvpI9/2CBQuMtmEoIiIiInsi6fDZpUuX8nxcvHhR0oB27dqFdu3aITw8HAqFAmvWrNGb/8Ybb0ChUOg9WrVqJakvIkek0WgKewh5socxEpHzkbSn6NmP6NtaZmYmKleujDfffBOdOnUy2qZVq1ZYuHCh7rm7u3uBjYfI3vAjzURE0uTr3mcPHjzAli1bcPnyZQBAqVKl0LRpUxQpUkTyOhMSEpCQkGC2jbu7O0JDQyX3YY49XJTLHsZIhYsfaSYisp7kUJSYmIgvvvjC4KP5bm5uGD58OMaPH5/vwZmyY8cOBAcHIzAwEE2aNMGECRNMBrHs7Gy9MaalpQHQnixu7DYkKpXKLt5ly/EWKmRcfn9XUmrYHrCG7QPr1zjWr/2w5nclKRR99tlnGD9+PNq0aYP33nsPZcqUAQCcPXsW3377LSZOnAhXV1eMHj1ayurNatWqFTp16oTSpUvjwoUL+OSTT5CQkIB9+/YZfTFNmjQJ48aNM5h+9uxZ+Pj46E3z8PBATEyMXbzLvnTpEh4/flzYwyALZGRk5Gt5KTVsD1jD9oH1axzr135YU8MW3RD2ecWLF8fLL7+MtWvXGp3frl07HDlyBDdv3rR21fqDUyjwyy+/oEOHDibbXLx4EdHR0diyZQuaNm1qMN/Yu5SIiAg8ePDA6I3hVCoVEhMTZRuKSpYsicTERL5LsSNpaWkICgqSfANM1jAVJtavPtav/bGmhiXtKUpNTTX7ia/WrVtjx44dUlZttaioKBQtWhRJSUlGQ5G7u7vRE7FVKpXd7KY1xp7H7mzy+7tiDVNhYv0aZ89jdzbW/K4kna1bt25dHDhwwOT8AwcOoG7dulJWbbXr16/j/v37CAsLeyH9ERERkWOSFIrmzp2Lffv24cMPP0RSUhI0Gg00Gg2SkpLwwQcfYP/+/Zg7d66kAWVkZODYsWM4duwYAO1x22PHjuHq1avIyMjAsGHDsH//fly+fBlbt25F+/btERMTg5YtW0rqj4iIiAiQePisUqVK0Gg0mDlzJmbOnKn7eHjuBdnc3d1RqVIlvWUUCgVSU1PzXPfhw4fRuHFj3fOhQ4cCAPr06YM5c+bg+PHjWLx4MVJSUhAeHo4WLVrgs88+47WKiIiIKF8khaLOnTtDoVDYeiwAgEaNGsHcud+///57gfRLREREzk1SKFq0aJGNh0FERERUuKw+pygrKwvVqlWTfM4QERERkRxZHYq8vLxw6dKlAjt8RkRERFQYJH36rFWrVjy3h4iIiByKpFA0evRonDt3Dr169cKePXtw48YNPHjwwOBBREREZC8knWhdoUIFAMCpU6fw008/mWzHy6ATERGRvZAUisaMGcNzioiIiMihSApFiYmJNh4GERGRYxEaNRRKed8jzR7G+CJJCkVERERknkKpQs7PwwCFEqrWYyAeXoV6wwTg6WPTC/mHwaXNOOBpFnJ+TQQepZhu6xkAl7aJgKsXctaPBVKTTbd19YAqYRQUgZFQ/zYe4m4SFEWj4NJpqsStc0ySQtH48ePzbKNQKDB69GgpqyciInIMCiVUCaMg7pyFeunbwJMs022DSsElYTTw6CFylvQFMu+ZbutdFC4dvwBUbshZ9Drw4LLptm5eUL32HRQBJaD+oS/Ezb8BAKbvHeG8bH74TKFQQAjBUERERE5P1XqM5YGo9yIgO92yQNR7IeDui5wlb1gWiIrFQv1jP10gIuMkfSRfo9EYPHJycnDhwgV8+OGHePnll3Hnzh1bj5WIiMiuiIdXGYjsiKRQZHRFSiVKly6NL7/8ErGxsRg8eLCtVk1ERGSX1BsmMBDZEZuFomc1aNAAv/32W0GsmoiIyH6YO6magUh2CiQUHT58GEplgayaiIjI/skhECn5AfTnSfqJLFmyxOj0lJQU7Nq1Cz///DPeeuutfA2MiIjIIckiELlC1XSoxA1wXJJC0RtvvGFyXtGiRTFixAiMGTNG6piIiIgck1wCUZfpUERUlbgRjktSKLp06ZLBNIVCgcDAQPj6+uZ7UERERA5HToEopgHUW6bApdWnEjfGMUkKRSVLlrT1OIiIiByX3ALRyvch0u9aPHy1AFQyv+WpWgOo8nk6s03Osjpz5gxWrlyJ5ORkxMXFoW/fvvDz87PFqomIiOybHAPR+Z1AaHmLN0GlAIYcAJLSgDsmPlDnqQJCPYFLGabXU9oHuPUIeKQ2Pj/YQ/vV2j7K+gM/NDC/DZawOBR9++23mDlzJvbu3YuiRYvqpq9btw5du3bFkydPdNO++eYb7N+/X68dERGR0/EP0966Q26BSIK9d4CjDyQtqpPf5QuaxTua/ve//yE6Olov6OTk5OCtt96CSqXCwoUL8ffff2Py5Mm4cuUKJk6cWCADJiIishcubcY5RCByFhaHolOnTqFWrVp607Zv3467d+/iww8/RJ8+fVChQgUMHz4c3bp148UbiSjfhMbEPnYZsYcxUiF6msVAZEcsPnx2//59RERE6E3bunUrFAoFOnbsqDe9bt26+Pnnn20zQiJyWgqlCjk/D4O4d9FwptIFqqZDoYioCvWWKRBX/zS7LmW9/lCWaw71rjkQZ7eZbxvfGarqPaE+9BM0R1ebHl/RKLh0mmrRtpBzyvk1kYHIjlgcikJCQnDr1i29abt374aXlxcqV66sN93NzQ1ubm62GSEROTVx7yJw65T+RN11VuKhXjk4jz/4Cihbj9EGonWjIY6Zf8OmrP+ONhBt/xqa3XPNDy6uiWUbQc7rUYrpeXYeiLqV0p44PesMkJFjOL9SINAxEvjlKnD8oeF8HxdgUFntSdMrLuevj9Op+diQZ1gcil5++WUsXrwYgwcPhq+vL06ePImDBw+iffv2cHHRX82ZM2dQokQJ24yQiOhZVv3B/ycQVetmeSBqPMSiQKSo0gmqBgMlbAAR7D4QNQoFfmqo/b6ENzD4gP58BYDNLYAiHsC7ZYHQ5YB4bh2TqgEDy2q/v/0I2Hlbeh8p2TbYKFhxTtHYsWNx5coVxMbGomnTpqhbty4UCgVGjhxp0PaXX35BnTp1bDNCIqJccgpE7T6D5vRmCRtBTk8WgUgBZb3+EjcAEM8nnALwIvp4nsV7iipWrIht27Zh4sSJuHjxImrVqoWPPvoI1apV02u3Y8cOeHl5oWvXrjYfLBE5MbkFoiMroPlzJVTlW0rYGHJacglE/xxSlmrnbaDnzn8PbT1PAGi+CegQCay5ariXCABGHgGuZ2oPnz2/l8jaPs6k/rtXKT+sunhjnTp1sH79erNtGjVqhL//NvOLIyKylhwD0W/jgdByEjaGnJacAlG1blDvmgOXhoMkbozp84ByHX9o/FyiXBk5wBcnbNNHfJD5dpbK5wWxiYgKmNJFnoHI6HtfIhPkFojWjc7zU5jOiKGIiGRN1XQoAxHZNzkGojxeH86KoYiIZE0RUZWBiOyXZwADkR2RXSjatWsX2rVrh/DwcCgUCqxZs0ZvvhACY8aMQVhYGDw9PdGsWTOcP3++cAZLRAVOvWWKQwQitUbyoi+E3Mdnr1zaJjpMIAr2AIp7mZ7v4wKU8ze/jnL+gLeZs5mLe+W/j/yw6kTrFyEzMxOVK1fGm2++iU6dOhnMnzJlCmbOnInFixejdOnSGD16NFq2bIlTp07Bw8OjEEZMRAXJ/JWq7SMQAYBKCUw+Dnz0kvb5+/uBQ/f123ipgGWNgHAv4L+XgGlGTkL9v5eA7qWBm1lA9x2GdxuvXgSYWcu6Pmx1h3EywtULOYtet/tABABrm2q/tt4MbNO/ljN8XIBj7YFSPsA3p4EPDxou/1UNYHA54HIGUHktkPncxRibhAK/Nc9fH/klu1CUkJCAhIQEo/OEEJgxYwZGjRqF9u3bAwCWLFmCkJAQrFmzBt27d3+RQyWiQmU/gShXkDvg8s/++RAv4OhzO7nL+WvDCgBUCTJ+R/HcT9mEewEPnxheybddRP77INvJWT/WIQIR8G9d1Q0xDCwR3tqwAgDNw40v3+Kf6aV8gEhvw9qtF5L/PvJLdqHInEuXLuHWrVto1qyZbpq/vz9q1qyJffv2GQ1F2dnZyM7+91KXaWlpAAC1Wg212vBGjiqVqgBGbnvGxk7ylN/flTPXsOltkUcgUkRWBWD5tqy9CpQP0D5fYOSo/+lU7Tvg5uHaa7gYM+KI9krAm28av7XBgvNAkzBpfRjbDtavcRbXb2qy6QYyCUSWbsvR+9q9O3KsXXPbYk0N21Uoyr33WkhIiN50Y/dlyzVp0iSMGzfOYPrZs2fh4+OjN83DwwMxMTE2Gm3BunTpEh4/flzYwyALZGRk5Gt5Z61h09sik0AU2xCqZsOt2pY7j4FGG80OIc9DAuuuaR+m3MiS3oex7WD9Gie9fv8hk0Bkzbb032t+z2Jh1m6u/NawXYUiKUaOHImhQ4fqnqelpSEiIgJxcXHw8/MrxJHlT+nSpQt7CGSh3HfGUrGGnyWjQNR1JsS1P6EoXdNhXo/GtoP1a1y+fucyCUTK+M4AHOv/SX5r2K5CUWhoKADg9u3bCAsL002/ffs2qlSpYnQZd3d3uLu7G0xXqVR2s5vWGEvGLjRqKJTy3kZ7GGN+5bfOnLmG9cksECXtgnrPd1C+tdyufw/PMrYdrF/jJI9dLoGo/jtQVe8JwH4OWVoivzVsV6GodOnSCA0NxdatW3UhKC0tDQcOHMDAgbxb9fMUShVyfh4GFCkFVYOB0JzeDM2e+eaXiawKVbPhENf+hHrrdECTY7ptsRioWo+BeHgV6g0TgKdmDuf5h8GlzTjgaRZyfk0EHqVAUTQKLp2mStw6ci4yDESrhgLBsVI2hpyVnAJR4yFQH/pJF4xIS3ahKCMjA0lJSbrnly5dwrFjxxAUFITIyEh88MEHmDBhAmJjY3UfyQ8PD0eHDh0Kb9BylhuILD5HYhhE0k7tH3zNU9NtwytClTAK4s5ZqJe+DTzJMj2GoFJwSRgNPHqInCV9gcx7gNmREOmTZSAy8/ogMiC3QLT9a2jO72Ioeo7sLt54+PBhxMfHIz4+HgAwdOhQxMfHY8yYMQCA4cOHY/Dgwejfvz+qV6+OjIwMbNy4kdcoMsGqQGThH3xFeEWoXl8Acfe8ZYGo9yIgO10vEBFZSlmvv8MGIk8VsKox8Gc70ze0HFIeONVB+9WYqkHa5Vc11q5PSh89oiQNnywlx0CUx+sjL3Kp3dw+bFXDsgtFjRo1ghDC4LFo0SIAgEKhwPjx43Hr1i08fvwYW7ZsQZkyZQp30DKmOb2ZgYjsmrJcc4cMRADQugTQIRKoFAQMr2g4X6kAplQDyvhrvyoVhm2GVdQu3yESSCghrY8h5fK/LWSCq4fDBSJAPrWb24etalh2oYhsS3sOEQMR2S/1rjkOGYgA7ceb0/9Z5XYjl7PRCGDPHe33e+5onz8vd7n0p8AxIx+XtqQPY8uRbagSRjlcIALkU7u5fdiqhmV3ThG9OAxEZA/E2W1m59trIAKAi+lA9Gog0A24kG68TavNQKwfcN7Ep4rnnwO2Jmuvbv0g23C+JX28dwDY10baNpB5isBIqH/o61CBCJBP7eb24e1imxpmKHJSsghEngGSx08E2HcgyvUg2/g/hFxPNcCpFPPrMPUPw9I+cngz2AKj/m28wwWiXHKo3dw+TJ1zZC0ePnNCsghE3kW1d48mkkgOgUhRzD6uvkyFR9xNMj3TjgORo2IocjKyCUS9FwKuXtI3hJyaLAJReEWoWo+Rugnk7BiIZImhyInIKhC5+2rvHk1kJdkEotcXQDy8KnUzyJnJJBAp4ppIGLxjYyhyErILREveMH/3aCIjZBWI7p7XXsndmvErgIFx2ofKyEeUAaBJKDCmMlDcxI7U4l7a+U1Cjc9X2aAPKkByCURVOkHVwLo7QXQp6fi1yxOtnYAsA9GDy0CoiSt6ERkhu0C09G0gqJRV29ApEvi40r/P55zVn1/cC/itOeCiBJqEGb9j+NIGQL0Q7cnR0au1dxZ/Vv8ywDe18tcHFRA5BaJ2n0FzejNU5VtaPHxnqF3uKXJwisiq8gxERFaQZSAy9/ogep7cAtGRFXneC9MZcU+Rg1M1G275vcxkHIjUGkAl8whvD2O0R8r4zlBV7+kQgejnq8D1fxadf85w/o0soPVmoG4IsOC88XX03AX0iwX+uG34Tvv59Urtg2xMjoHot/FAqHWXgf7iuLZ+Hbl2GYocnLj2p90HIkAbNnrtAs6kSl5FgSrrD/zQoLBH4ZgcJRAB2ivwPn9I4HnbbmkfptzMAj77y/R8tQ36IBtSusgzEEm4JfeqK9qrTJviCLXLUOTg1Fun230gynUm1fwLkhyT+tBPDhGIyDmpmg6FIiLe7gORs2AocnSaHJOz7CkQkfPSHF1tdj4DEcmZIqIq1CsHMxDZCZ4B4aRkEYhcPSSOnkiLgYjkTr1lCgORHWEockKyCERuXlAljLJq3P5upucVcQeifU3Pd1UC5QO0X02J9tWuR2of9GLJIhD5h+VjC7TkULsu/E9QYMTVP83Mte9AJIfaze3DVjXMl4KTkU0geu07KAIjrRr72iZAlJEXR5QvkNQZONtJe50LYzY2B4631341pn8Z7fJJnaX3QS+OLAJRUCm4tBmXj62QT+1+W1Pa+Ck/7DsQyaV2c/uwVQ0zFDkRWQWiYrFQ/zbeqvF7uwJVjNwJOT4I8HXVft/YyBt3pQKoF6z9vl6w9vnzcpfzldiHwsTVV8n2ZBOIei8CnubvsJocalepML4cFSR5BCJFZFUpgwcgn9rN7cNWNcxQ5CRkF4h+7Gf+7tFGbE8GNlw3nP7bdWDNVeD4A2DK34bzNQIYfgQ4m6r9qjHyN2LK39rl11yV1ofgofoXQlaBKDsdOb8mSt4WQB61qxHA16fztRlkFZkEotiGUDUbLmUDAMindnP7sFUN89NnTkCWgejm31bf5mP4YeCR2nD6IzXQZbv5Zb8+pX2YcvQBUHWd6fmW9EEFS3aBaElfwDdY+gZBPrW77CIwtIL5NmQLMgpEXWdCXPsTitLSjjvJpXZz+4gPsk0Nc0+Rg1MUi5FnICKygiwDkbnXB5EBmQWipF3a69iRHoYiB6dqPYaBiOyaIq4JAxHZORkGolVDzV7HzlkxFDk48fAqAxHZNVWDgQxEZNfkGYhMvz6cGc8pcnDqDRMYiMiuaU5vZiAiu6Ws1x/Kcs0ZiOwE9xQ5uqePTc+z00BU3AvY0Ur7CPcy3uarGsDJDkC7COPz20Vo539Vo+D6INvQ7JkPRwlEwR6sXWfjSIFofh3Hr12GImclh0CklLajsl8sUC9E++gXazi/nD8wuBwQ5w9MqmZ8HZOraecPLqdtXxB9UMGzp0AEAO0jWbvORr1rjkMEIgCIL+L4tctQ5IxkEYhcoWo6VNLw99wGcjTaxx+3DedfzQQuZ2i/33zT+Do2/TP9coa2fUH0QQVLFoHIM8CqMR+7z9p1NuLsNrPz7SUQAc5RuzynyNnIJRB1mQ5FhLSrqW67BUT/c+P0G0b+t2XmAJXXApHewOlU4+v48CAw/6z2RZZp5AMY1vYRzysCv1CyCETeReHSNtGqcR+6L7/apcJjT4EIANpvBU6kOHbtMhQ5EzkFopgGUG+ZApdWn0raFGMvmGdl5uT9oslrvi36INuTTSDqvRBwNXHigxmsXQLkEYgUxWKsGvOdx+ZryxFql4fPnIXcAtHK9/O4ezSRIVkFIndf5KwfK31jyGnJIhCFV4Sq9Ripm+CwGIqcgRwD0fmdEjeGnJXsAtGSN4DUZMnbQ85JNoHo9QUQD69K3QyHxVDk6PzDGIjI7skyEJl7fRAZIatAdPe89jp2pIehyMG5tBnnMIGoebjped1KAR+/BPiYOEuuUiAwtrL2qzE+Ltrlu5WS3gcVDEVkVYcORHKo3Vg/KwZMksguEC192/x17Cwgh9rN7cNWNWx3f94TExMxbtw4vWlxcXE4c+ZMIY1I5p5mOUQgAoDPqwEH7gI7n/uoZqNQ4KeG2u9LeAODD+jPVwDY3AIo4gG8WxYIXW74J2ZSNWBgWe33tx9J64MKhqrZcIiknQ4ZiORSu7Nr2WBjyCRZBiJzrw8LyKV2c/tIyc7X5ujYXSgCgAoVKmDLli265y4udrkZL0TOr4kOEYjMEab/ZthVH2ScuPanQwYigLXrDJTxnaGq3tOhAhHguLVrl2nCxcUFoaGhhT0M+/AoxfQ8OwtEnxwxfCcBaKf13AmU9gFmGdlhKAA03wR0iATWXDX+Z2bkEeB6JnApQ3ofVDDUW6c7ZCAC5FO77+7/9x052ZYjBiJAPrWb28eZVNvUsF2GovPnzyM8PBweHh6oXbs2Jk2ahMjISKNts7OzkZ397361tLQ0AIBarYZarTZor1KpCmbQNmZs7M8zuy0yCkSWbou5K5iuuGx++eMPtQ9TMnKAL06YX0defQDGt8WS7TPH6WtYY+Qqb/+QSyDKz+tRDrV7XltSrF8rWPo7Vx/6SfaBSGr9yqF2c/vIvYBufmvY7kJRzZo1sWjRIsTFxSE5ORnjxo1D/fr1ceLECfj6+hq0nzRpksE5SABw9uxZ+Pj46E3z8PBATIx1F7MqLJcuXcLjx6ZPkjO7LbIIRAoo6/XP/7bIjLFtycjIyNc6WcPGySIQuXrYZFvkgvVrOUt/55qjq82up7ADkTXbYg/yW8N2F4oSEhJ031eqVAk1a9ZEyZIlsWLFCvTr18+g/ciRIzF06L/32EpLS0NERATi4uLg52e/H7koXbq0tAXlEohaj4GyXHMA+dgWGTK2LbnvjKViDRuSRSBy84IqYRQAx6lh1q/lbPE7l0MgAhynfoH817DdhaLnBQQEoEyZMkhKSjI6393dHe7u7gbTVSqV3eymNUbS2OUUiKp1g3rXHLg0HGTXv4fnGduW/G4fa1ifbALRa99BEag9bG/Pv4dnsX4tl9+xyyIQ+YcBcJz6BfJfw3Z/naKMjAxcuHABYWFhhT0UeZNbIFo3Os+7R5tT3Ev7MMXHBSjnb34d5fwBbzNvC2zRB9mWrAJRsViofxtv9TawdkkWgSiolPY6dlYI9nD82rW7UPTRRx9h586duHz5Mvbu3YuOHTtCpVKhR48ehT00+ZJjIDr2s8SNAZqEAhc6ax9NjHwI0ccFONYe+LsD8FUN4+v4qoZ2/l/tjb9AbdEH2ZbsAtGP/SDuGt9DbUr1IqxdZyebQNR7EfDUuk+irW3q+LVrd6Ho+vXr6NGjB+Li4tCtWzcUKVIE+/fvR7FixQp7aPLkGeBQgQgA6oUALkrto26I4fwIb6DUP+dvmroKdot/ppfyASK9C6YPsh1ZBiJzrw8TqhRh7TozWQWi7HTtdeys4Ay1a3fnFP33v/8t7CHYFZe2iYDKzWECEQAsOA80Cfv3++edTgW+Oa190Yw8YnwdI45or6a6+aa2fUH0QbahKBYDVcIouw9EALD2KlA+QPs9a9e5yC4QLekL+AZbtQ1H7wOZOY5du3YXishKrl7IWfS6wwQiALiRBTTaaL7NhwfNz193TfuwVR+518gg21O1HgNx56zdByIAuPNYfrVLBU+WgSjzntWhqP9e4OgD0/MdoXYZihxczvqxDhWIyPmIh1cdIhCRc1LENYGqwUD5BSIyyu7OKSIrpSabnsdARHZAvWECAxHZLQYi+8JQ5KwYiMhePDV9pV0GIpI7zenNDER2hKHIGckkECnjO0sbPxEgj0Ck5BkIZJ5mz3wwENkPhiJnI5dAVP8dqKr3tGroU14GPI1cmNRTBaxqDPzZzvQJz0PKA6c6aL8aUzVIu/yqxtL7oBdIFoHIFaqmQ03Pt4BcardHlKThUz7ZcyCSS+3m9mGrGmYociZyCkSNh0B96Cerht84DEgoYTi9dQmgQyRQKQgYXtFIfwpgSjWgjL/2q1Jh2GZYRe3yHSKl9aEwsk4qIHIJRF2mQxFRVeJGaMmhdpUKYEi5fG0GSSCLQOQZIHn8cqnd3D5sVcMMRc5CboFo+9d53j36eZlPgWNGPg569AGQ/s/fk+1GzivXCGDPHe33e+5onz8vd7l0iX0I03vHyZbkFIhiGkC9ZYrEDdGSQ+1qhPHlqODIIhB5F9Vex04iudRubh+2qmEeEHcGcgxEu+cCoSb2qZrwyjbgYrrh9IvpQPRqINANuGBkPgC02gzE+gHnTdwsef45YGsy8PAJ8CBbWh9UwOQWiFa+D5F+V+LGaMmldt87AOxrI20byDqyCUS9FwKuZm4ylge51G5uH94utqlh7ilydK4e8gxEEqQ9MT3vQbb5sPJUA5xK0X415UK68RempX1QAZJjIDL7+rCcHGo3x8y6yXZkFYjcfbXXscsHOdRubh+2qmHuKXJwqoRRUASUsPtARE7MPwwuCaMdMhCR85BdIFryBuAmfU+Ro+KeIgenCIxkICK75tJmHAMR2TVZBiJzrw8nxj1FDk7923gGIrJvT7MYiMhuKSKrQtVsGAORneCeIgcn7iaZnmmngUilAAbGaR8qEx+FbxIKjKkMFDexd7i4l3Z+k9CC64NsI+fXRIcJRErWrtNRNRvuMIGoS0nHr13uKXJWdhqIAKB/GeCbWv8+n3NWf35xL+C35oCLEmgSZvyuy0sbAPVCtCfnRa/W3p05P33wruMF6FGK6Xl2FIgAoFMk8HGlf5/LoXbzuis55Y+49qdDBCLAOWqXe4qckUwCkSKuiYTBE/3DzgIROSf11ukOEYicBfcUORu5BKIqnaBqMFDCBmivbWHs+1w3soDWm4G6IcCC88bX0XMX0C8W+OO24bsVKX2EeFi3DZRPsghECijr9bdq2D9fBa7/U29yqV0qYJock7PsLRB9cVxbv45cuwxFzkROgajdZ9Cc3gxV+ZZWb4ZaGO5Wfd62W9qHKTezgM/+sl0fDEUvkFwCUesxUJZrbtXQNTKsXSocsghErtb94Vp1RXuVaVMcoXZ5+MxZyC0QHVnxz92jiawgp0BUrRvUu+ZI3BByZrIIRG5eUCWMkrgFjouhyBnIMRD9Nl7ChpBTk1sgWjca4uw2iRtDzko2gei176AIjJS4FY6LocjRKV1kGoh4B1WyghwDUR6vD6LnySoQFYuFmm9ODTAUOThV06EOE4j83UzPK+IORPuanu+qBMoHaL+aEu2rXY/UPqiAeAY4dCCSQ+268D9BgZNdIPqxn/nr2FlADrWb24etapgvBQeniKjqEIEIANY2AaKMvDiifIGkzsDZTtrrXBizsTlwvL32qzH9y2iXT+osvQ8qGC5tEx02EMmldr+tKW38ZBlZBiJzrw8LyKV2c/uwVQ0zFDk49ZYpDhGIAMDbFagSZDg9PgjwddV+3zjMyJgVQL1g7ff1grXPn5e7nK/EPhQmrr5KNuDq5ZCBCJBH7SoVxpcj21AUi3G4QATIp3Zz+7BVDTMUOThx9U8zc+0nEAHA9mRgw3XD6b9dB9ZcBY4/AKYYea1rBDD8CHA2VftVY2QoU/7WLr/mqrQ+BE+RKjA568c6ZCAC5FG7GgF8fTr/20LGqVqPcbhABMindnP7sFUN8zpFTsu+AhEADD8MPFIbTn+kBrpsN7/s16e0D1OOPgCqrjM935I+qICkJpueZ8eBCJBP7S67CAytYL4NSSMeXnW4QATIp3Zz+4gPsk0Nc0+RU5JHIFJEVpUyeCItOw9E5BzUGyY4XCByZAxFTkcmgSi2IVTNhkvZACLZBCJlfGdp4yfn8fSx6XkMRLLDUORUZBSIus6EuGbufCciE+QSiOq/A1X1ntK2gUgOgUjJM2iex1DkNGQWiJJ2ae8eTWQNOQWixkOgPvSTtO0g5yaLQOQKVdOhEjfAcTEUOQUZBqJVQ83ePdqc4l7AjlbaR7iX8TZf1QBOdgDaRRif3y5CO/+rGgXXB9mY3ALR9q+hObraqk0I9mDtOj25BKIu06GIsO68zvl1HL92GYqcgDwD0VMpmwIA6BcL1AvRPvrFGs4v5w8MLgfE+QOTqhlfx+Rq2vmDy2nbF0QfZENyDER5vD6MaR/J2nVqcgpEMQ2g3jLFquHHF3H82rXbUDRr1iyUKlUKHh4eqFmzJg4ePFjYQ5IlZb3+DhWIAGDPbSBHo338cdtw/tVM4HKG9vvNN42vY9M/0y9naNsXRB9kI64eDhGIAODYfdau05JbIFr5fh7XsTPkDLVrl2dZLV++HEOHDsXcuXNRs2ZNzJgxAy1btsTZs2cRHBxc2MOTFWW55g4ViABg2y0g+p+jFjeMfNI1MweovBaI9AZOpxpfx4cHgflntS+yTCNH8aztI55XBC4wqoRRUASUsPtABACH7suvdukFkGMgOr8TCC1v1Wa03wqcSHHs2rXLPUXTp0/H22+/jb59+6J8+fKYO3cuvLy88J///KewhyY76l1zHCoQ5bqRZfxFkyszJ+8XzelU4y9MW/ZB+acIjHSIQJSLtetk/MPkGYgkuPPY8WvX7vYUPXnyBEeOHMHIkSN105RKJZo1a4Z9+/YZtM/OzkZ2drbueWqq9qf58OFDqNWGl0dWqVQICAjQW0ZOAgICkJaWZnTsz1OpVMi5dhbC38jB2X8oK70CVbVuUG//HprjWwEzbRWxDaCq+xY0h9ZAs+8nwD/GdNsSlaFq/AHEqT1Q75wN+JYybONVHC5WbEuUC6D2yLNpoYhyAdLSYHRb0tLSAABC4r1AnL2G1bvmQpP52HRtKl2gavgeFMGVoV77GcSdm2brWFn7DShjm0C9YTrEpb/Nt83j9eEoNcz6tZy19Ssafgw8vI+c3ycBLoGAf6Dxxp7+cGk5EnjqgpxfRwNqV9O16eoBVfNhUHiEQb16NITE14ej1C9gwxoWdubGjRsCgNi7d6/e9GHDhokaNWoYtB87dqyAdncGH3wU6uPatWuSap41zIccHqxfPuz9YUkNK4Swr1tZ3rx5E8WLF8fevXtRu3Zt3fThw4dj586dOHDggF7759+laDQaPHjwAEWKFIGCtzanF0AIgfT0dISHh0OptP6INWuYChPrl+ydNTVsd4fPihYtCpVKhdu39U9Nv337NkJDQw3au7u7w93dXW9aQEBAQQ6RyIC/v7/kZVnDVNhYv2TvLK1huzvR2s3NDdWqVcPWrVt10zQaDbZu3aq354iIiIjIGna3pwgAhg4dij59+uDll19GjRo1MGPGDGRmZqJv376FPTQiIiKyU3YZil599VXcvXsXY8aMwa1bt1ClShVs3LgRISEhhT00IiIislN2d6I1ERERUUGwu3OKiIiIiAoCQxERERERGIqIiIiIADAUEREREQFgKCIiIiICwFBEREREBIChiIiIiAgAQxERERERAIYiIiIiIgAMRUREREQAGIqIyIFcvnwZCoUCx44dK+yhkANq1KgRPvjgg8IeBhUghiKymkajkXV/b7zxBhQKBSZPnqw3fc2aNVAoFBavx9QfwEWLFiEgIMCqMcmNeMG/Q6FRW9xWoVCYfSQmJhbcQC1gLnjZ+z9N9YstC6v7mzt3Lnx9fZGTk6OblpGRAVdXVzRq1Eiv7Y4dO6BQKHDhwgUbjNQ4R/4b4axcCnsAZH+USiXmzZuH5OTkAu8rLCwMAwYMsHo5Dw8PfPHFFxgwYAACAwMLYGT2TaFUImf9eIgbf5lu5BkAl7aJgKsXctaPBVLN/L5dPaBKGAVFYCTUv42HuJv0b19Fo+DSaarFY3u2rpYvX44xY8bg7Nmzumk+Pj4Wr4uso1ICvXYBZ1L1p5f1B8ZUBq5nAaOPAtnPZVx3FfBZPFDCCxj3F3D2ueUBoEcU0DkSWH0VWHZRu84fGlg3vsaNGyMjIwOHDx9GrVq1AAC7d+9GaGgoDhw4gMePH8PDwwMAsH37dkRGRiI6OtqqPoQQUKvVcHHhv0dnxD1FJElycjKuXLlS4A+pwatZs2YIDQ3FpEmTTLZZvXo1KlSoAHd3d5QqVQrTpk2T+uPAnDlzEB0dDTc3N8TFxeGHH37Qm69QKDBnzhwkJCTA09MTUVFRWLVqleT+bEHc+Au4dcr4I/0OXFqNBFRuyFn0OnB2q+m2Dy5D1ez/oAgoAfUPfSH+/p/efHHvolXjCg0N1T38/f2hUCh0z4ODgzF9+nSUKFEC7u7uqFKlCjZu3GhyXWq1Gm+++SbKli2Lq1evAgDWrl2LqlWrwsPDA1FRURg3bpzengeFQoHvv/8eHTt2hJeXF2JjY/G///1P0s/44cOH6N27NwIDA+Hl5YWEhAScP39eNz93j8KaNWsQGxsLDw8PtGzZEteuXZPUny2cTQOOPtB/vBIJxPoDjcOAcC/D+cW9tPNi/YH2kYbz/3oIDCkHlPTVfv3roWHwskRcXBzCwsKwY8cO3bQdO3agffv2KF26NPbv3683vXHjxsjOzsb777+P4OBgeHh4oF69ejh06JBeO4VCgQ0bNqBatWpwd3fHnj17kJmZid69e8PHxwdhYWH5+vsA2OffCGfEUEQOSaVS4fPPP8c333yD69evG8w/cuQIunXrhu7du+Pvv/9GYmIiRo8ejUWLFlnd1y+//IIhQ4bg//7v/3DixAkMGDAAffv2xfbt2/XajR49Gp07d8Zff/2F1157Dd27d8fp06elbmLB8S4Kl94LAXdf5Cx5A3hw2XRbNy+oXvsOimKxUP/YD+Lm3wU6tK+//hrTpk3Dl19+iePHj6Nly5Z45ZVX9IJGruzsbHTt2hXHjh3D7t27ERkZid27d6N3794YMmQITp06hXnz5mHRokWYOHGi3rLjxo1Dt27dcPz4cbRu3RqvvfYaHjx4YPV433jjDRw+fBj/+9//sG/fPggh0Lp1azx9+lTXJisrCxMnTsSSJUvwxx9/ICUlBd27d7f+h2MjQhhO2/7Pe5P0p8AxIz+Gow+0855t+yyNAPbc0X6/5472uVSNGzfWe21t374djRo1QsOGDXXTHz16hAMHDqBx48YYPnw4Vq9ejcWLF+PPP/9ETEwMWrZsafD7HDFiBCZPnozTp0+jUqVKGDZsGHbu3Im1a9di06ZN2LFjB/78809JY3a4vxEOTCGEsZcAkXmJiYm4cuVKgfdTsmRJq88heeONN5CSkoI1a9agdu3aKF++PBYsWIA1a9agY8eOEELgtddew927d7Fp0ybdcsOHD8f69etx8uRJANrzBfbu3Qs3Nze99efk5MDDwwMpKSkAgLp166JChQqYP3++rk23bt2QmZmJ9evXA9C+C3znnXcwZ84cXZtatWqhatWqmD17tlXbZytP53fW7s15VkEEotDycO2/WtIYFy1ahA8++ED3sy5evDgGDRqETz75RNemRo0aqF69OmbNmoXLly+jdOnS2L17NxITE5GdnY1ff/0V/v7+ALR7EJs2bYqRI0fqlv/xxx8xfPhw3Lx5E4D2dzVq1Ch89tlnAIDMzEz4+Phgw4YNaNWqla4PT09PKJX67ysfPXqEwYMHY8aMGTh//jzKlCmDP/74A3Xq1AEA3L9/HxEREVi8eDG6du2KRYsWoW/fvti/fz9q1qwJADhz5gzKlSuHAwcOoEaNGpJ+bvlRfZ025Dwv2hd4+AR4kG18uSB3INANuJBufL6rEoj1A86nAU81QHwQcKid9eP7/vvvdTXx6NEjBAUF4ebNm9iyZQvmzp2LnTt3Ytu2bWjatCkuX76M2NhYLFq0CD179gQAPH36FKVKlcIHH3yAYcOG6fYorVmzBu3btwegPU+pSJEi+PHHH9G1a1cAwIMHD1CiRAn0798fM2bMAOD4fyOcEfcUkUP74osvsHjxYoN3W6dPn0bdunX1ptWtWxfnz5+HWv3vCROvvfYajh07pvcYP368Ret6vs/atWsbPJfVu8CC2kOktM25GWlpabh586ZFP+sePXogMzMTmzZt0gUiAPjrr78wfvx4+Pj46B5vv/02kpOTkZWVpWtXqVIl3ffe3t7w8/PDnTt39PpYvny5QW28/PLLuvmnT5+Gi4uLLuwAQJEiRRAXF6c3XhcXF1SvXl33vGzZsggICJBXbUAbdkwFIkA7z1QgArRB6FSK9mt+NGrUCJmZmTh06BB2796NMmXKoFixYmjYsKHuvKIdO3YgKioKqampePr0qV7NuLq6okaNGgY/32d/dxcuXMCTJ0/0fndBQUGIi4szGI9T/Y1wAjyTjBxagwYN0LJlS4wcORJvvPGG1cv7+/sjJiZGb1pwcLCNRicjBRaIXKFqOtTWo81T69at8eOPP2Lfvn1o0qSJbnpGRgbGjRuHTp06GSyTe4IuoP3H+SyFQmHwKciIiAiD2vD09LTF8MmMmJgYlChRAtu3b8fDhw/RsGFDAEB4eDgiIiKwd+9ebN++Xe/3bglvb29J43GavxFOgnuKyOFNnjwZ69atw759+3TTypUrhz/++EOv3R9//IEyZcpApVJZtX5T6ypfvrzetGdPAs19Xq5cOav6KhAFGYi6TIcioqpNhunn54fw8HCLftYDBw7E5MmT8corr2Dnzp266VWrVsXZs2cRExNj8Hj+UFh+lStXDjk5OThw4IBu2v3793H27Fm98ebk5ODw4cO652fPnkVKSoo8akOmGjdujB07dmDHjh16H8Vv0KABNmzYgIMHD6Jx48a6E5ufrZmnT5/i0KFDBjXzrOjoaLi6uur97h4+fIhz585JGq/d/41wItxTRJKEhYXZTT8VK1bEa6+9hpkzZ+qm/d///R+qV6+Ozz77DK+++ir27duHb7/9VtKx+2HDhqFbt26Ij49Hs2bNsG7dOvz888/YsmWLXruVK1fi5ZdfRr169bB06VIcPHgQCxYsyPf2SaUoGgXx/Mfu3byAUBP/LJ7/2L1Gbbqt0gWqpkOhiKgKzcEfoKrzpk3GPGzYMIwdOxbR0dGoUqUKFi5ciGPHjmHp0qUGbQcPHgy1Wo22bdtiw4YNqFevHsaMGYO2bdsiMjISXbp0gVKpxF9//YUTJ05gwoQJNhljrtjYWLRv3x5vv/025s2bB19fX4wYMQLFixfXnbsCaPdKDR48GDNnzoSLiwvee+891KpVq1DOJwK0H5WXez+NGzfGoEGD8PTpU92eIgBo2LAh3nvvPTx58gSNGzeGt7c3Bg4ciGHDhiEoKAiRkZGYMmUKsrKy0K9fP5Pr9/HxQb9+/TBs2DAUKVIEwcHB+PTTTyUHZ3v9G+GUBJGV1Gq1rPvr06ePaN++vd60S5cuCTc3N/Fsya9atUqUL19euLq6isjISDF16lS9ZRo2bCiGDBlisP6FCxcKf39/vWmzZ88WUVFRwtXVVZQpU0YsWbJEbz4AMWvWLNG8eXPh7u4uSpUqJZYvX27VdtmSRp1jF/09/7NWq9UiMTFRFC9eXLi6uorKlSuLDRs26OZfunRJABBHjx7VTZs2bZrw9fUVf/zxhxBCiI0bN4o6deoIT09P4efnJ2rUqCHmz5+vaw9A/PLLL3rj8Pf3FwsXLjTZR67na+bBgweiV69ewt/fX3h6eoqWLVuKc+fOGWzf6tWrRVRUlHB3dxfNmjUTV65csf6HZQM5L/alLXI00pbL/R2ULVtWb/rly5cFABEXF6eb9ujRIzF48GBRtGhR4e7uLurWrSsOHjyom799+3YBQDx8+FBvXenp6eL1118XXl5eIiQkREyZMsXg9+vIfyOcFT99RvQCKBQK/PLLL+jQoUNhD4Vk5PlP15Hz4t8IeeA5RURERERgKCIiIiICwIs3EhEREQHgniIiIiIiAAxFRERERAAYioiIiIgAMBQRERERAWAoIiIiIgLAUEREREQEgKGIiIiICABDEREREREAhiIiIiIiAAxFRERERAAYioiIiIgAMBQRERERAWAoIiIiIgLAUEREREQEAPh//1rsSgNxj0AAAAAASUVORK5CYII=",
188
+ "text/plain": [
189
+ "<Figure size 600x400 with 6 Axes>"
190
+ ]
191
+ },
192
+ "metadata": {},
193
+ "output_type": "display_data"
194
+ }
195
+ ],
196
+ "source": [
197
+ "plot_surprisal_differences_checkpoints(seeds=[53], checkpoints=CHECKPOINTS, pos_encodings=False)\n",
198
+ "plt.savefig(f\"figures/hop_surprisals_no_pos_encodings.pdf\", format=\"pdf\", bbox_inches=\"tight\")"
199
+ ]
200
+ }
201
+ ],
202
+ "metadata": {
203
+ "kernelspec": {
204
+ "display_name": "babyenv",
205
+ "language": "python",
206
+ "name": "python3"
207
+ },
208
+ "language_info": {
209
+ "codemirror_mode": {
210
+ "name": "ipython",
211
+ "version": 3
212
+ },
213
+ "file_extension": ".py",
214
+ "mimetype": "text/x-python",
215
+ "name": "python",
216
+ "nbconvert_exporter": "python",
217
+ "pygments_lexer": "ipython3",
218
+ "version": "3.10.11"
219
+ }
220
+ },
221
+ "nbformat": 4,
222
+ "nbformat_minor": 2
223
+ }
impossible_llm.yaml ADDED
@@ -0,0 +1,154 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: impossible_llm
2
+ channels:
3
+ - defaults
4
+ dependencies:
5
+ - _libgcc_mutex=0.1=main
6
+ - _openmp_mutex=5.1=1_gnu
7
+ - ca-certificates=2024.7.2=h06a4308_0
8
+ - ld_impl_linux-64=2.38=h1181459_1
9
+ - libffi=3.4.4=h6a678d5_1
10
+ - libgcc-ng=11.2.0=h1234567_1
11
+ - libgomp=11.2.0=h1234567_1
12
+ - libstdcxx-ng=11.2.0=h1234567_1
13
+ - ncurses=6.4=h6a678d5_0
14
+ - openssl=3.0.15=h5eee18b_0
15
+ - pip=24.2=py39h06a4308_0
16
+ - python=3.9.19=h955ad1f_1
17
+ - readline=8.2=h5eee18b_0
18
+ - setuptools=72.1.0=py39h06a4308_0
19
+ - sqlite=3.45.3=h5eee18b_0
20
+ - tk=8.6.14=h39e8969_0
21
+ - wheel=0.44.0=py39h06a4308_0
22
+ - xz=5.4.6=h5eee18b_1
23
+ - zlib=1.2.13=h5eee18b_1
24
+ - pip:
25
+ - accelerate==0.34.2
26
+ - aiohappyeyeballs==2.4.2
27
+ - aiohttp==3.10.6
28
+ - aiosignal==1.3.1
29
+ - asttokens==2.2.1
30
+ - async-timeout==4.0.3
31
+ - attrs==24.2.0
32
+ - backcall==0.2.0
33
+ - black==24.8.0
34
+ - blessed==1.20.0
35
+ - certifi==2023.11.17
36
+ - charset-normalizer==3.3.2
37
+ - click==8.1.7
38
+ - cmake==3.30.3
39
+ - comm==0.1.2
40
+ - contourpy==1.2.0
41
+ - cycler==0.12.1
42
+ - data==0.4
43
+ - datasets==3.0.1
44
+ - debugpy==1.6.7
45
+ - decorator==5.1.1
46
+ - dill==0.3.8
47
+ - emoji==2.8.0
48
+ - exceptiongroup==1.1.0
49
+ - executing==1.2.0
50
+ - filelock==3.12.2
51
+ - fonttools==4.45.1
52
+ - frozenlist==1.4.1
53
+ - fsspec==2023.10.0
54
+ - funcsigs==1.0.2
55
+ - future==0.18.3
56
+ - gmpy2==2.1.0
57
+ - gpustat==1.1.1
58
+ - huggingface-hub==0.25.0
59
+ - idna==3.6
60
+ - importlib-metadata==6.0.0
61
+ - importlib-resources==6.4.5
62
+ - iniconfig==2.0.0
63
+ - ipykernel==6.23.1
64
+ - ipython==8.0.0
65
+ - jedi==0.18.2
66
+ - jinja2==3.1.2
67
+ - joblib==1.3.2
68
+ - jupyter-client==8.1.0
69
+ - jupyter-core==5.1.0
70
+ - kiwisolver==1.4.5
71
+ - latex==0.7.0
72
+ - latexcodec==1.0.0
73
+ - lit==18.1.8
74
+ - markupsafe==2.1.2
75
+ - matplotlib==3.8.2
76
+ - matplotlib-inline==0.1.6
77
+ - mizani==0.9.3
78
+ - mpmath==1.2.1
79
+ - multidict==6.1.0
80
+ - multiprocess==0.70.16
81
+ - mypy-extensions==1.0.0
82
+ - nest-asyncio==1.5.6
83
+ - networkx==2.8.6
84
+ - nltk==3.8.1
85
+ - numpy==1.26.2
86
+ - nvidia-cublas-cu11==11.10.3.66
87
+ - nvidia-cuda-cupti-cu11==11.7.101
88
+ - nvidia-cuda-nvrtc-cu11==11.7.99
89
+ - nvidia-cuda-runtime-cu11==11.7.99
90
+ - nvidia-cudnn-cu11==8.5.0.96
91
+ - nvidia-cufft-cu11==10.9.0.58
92
+ - nvidia-curand-cu11==10.2.10.91
93
+ - nvidia-cusolver-cu11==11.4.0.1
94
+ - nvidia-cusparse-cu11==11.7.4.91
95
+ - nvidia-ml-py==12.560.30
96
+ - nvidia-nccl-cu11==2.14.3
97
+ - nvidia-nvtx-cu11==11.7.91
98
+ - packaging==23.0
99
+ - pandas==2.1.3
100
+ - parso==0.8.3
101
+ - pathspec==0.12.1
102
+ - patsy==0.5.3
103
+ - peft==0.13.0
104
+ - pexpect==4.8.0
105
+ - pickleshare==0.7.5
106
+ - pillow==10.1.0
107
+ - platformdirs==2.5.2
108
+ - plotnine==0.12.4
109
+ - pluggy==1.3.0
110
+ - pluralizer==1.2.0
111
+ - prompt-toolkit==3.0.30
112
+ - protobuf==4.25.1
113
+ - psutil==5.9.1
114
+ - ptyprocess==0.7.0
115
+ - pure-eval==0.2.2
116
+ - pyarrow==17.0.0
117
+ - pygments==2.15.0
118
+ - pyparsing==3.1.1
119
+ - pytest==7.4.3
120
+ - python-dateutil==2.8.2
121
+ - pytz==2023.3.post1
122
+ - pyyaml==6.0.1
123
+ - pyzmq==23.0.0
124
+ - regex==2023.10.3
125
+ - requests==2.32.3
126
+ - safetensors==0.4.5
127
+ - scikit-learn==1.3.2
128
+ - scipy==1.11.4
129
+ - seaborn==0.13.0
130
+ - sentencepiece==0.2.0
131
+ - shutilwhich==1.1.0
132
+ - six==1.16.0
133
+ - stack-data==0.6.0
134
+ - stanza==1.9.2
135
+ - statsmodels==0.14.0
136
+ - sympy==1.11.1
137
+ - tempdir==0.7.1
138
+ - threadpoolctl==3.2.0
139
+ - tokenizers==0.20.0
140
+ - tomli==2.0.1
141
+ - torch==2.0.0
142
+ - tornado==6.2
143
+ - tqdm==4.66.5
144
+ - traitlets==5.7.1
145
+ - transformers==4.45.1
146
+ - triton==2.0.0
147
+ - typing-extensions==4.6.0
148
+ - tzdata==2023.3
149
+ - urllib3==2.1.0
150
+ - wcwidth==0.2.5
151
+ - xxhash==3.5.0
152
+ - yarl==1.13.0
153
+ - zipp==3.12.0
154
+ prefix: /home/yiren/new_ssd2/chunhui/miniconda/envs/impossible_llm
impossible_llm_update.yaml ADDED
@@ -0,0 +1,162 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: impossible_llm
2
+ channels:
3
+ - defaults
4
+ dependencies:
5
+ - _libgcc_mutex=0.1=main
6
+ - _openmp_mutex=5.1=1_gnu
7
+ - ca-certificates=2024.7.2=h06a4308_0
8
+ - ld_impl_linux-64=2.38=h1181459_1
9
+ - libffi=3.4.4=h6a678d5_1
10
+ - libgcc-ng=11.2.0=h1234567_1
11
+ - libgomp=11.2.0=h1234567_1
12
+ - libstdcxx-ng=11.2.0=h1234567_1
13
+ - ncurses=6.4=h6a678d5_0
14
+ - openssl=3.0.15=h5eee18b_0
15
+ - pip=24.2=py39h06a4308_0
16
+ - python=3.9.19=h955ad1f_1
17
+ - readline=8.2=h5eee18b_0
18
+ - setuptools=72.1.0=py39h06a4308_0
19
+ - sqlite=3.45.3=h5eee18b_0
20
+ - tk=8.6.14=h39e8969_0
21
+ - wheel=0.44.0=py39h06a4308_0
22
+ - xz=5.4.6=h5eee18b_1
23
+ - zlib=1.2.13=h5eee18b_1
24
+ - pip:
25
+ - accelerate==1.0.0
26
+ - aiohappyeyeballs==2.4.2
27
+ - aiohttp==3.10.6
28
+ - aiosignal==1.3.1
29
+ - annotated-types==0.7.0
30
+ - asttokens==2.2.1
31
+ - async-timeout==4.0.3
32
+ - attrs==24.2.0
33
+ - backcall==0.2.0
34
+ - black==24.8.0
35
+ - blessed==1.20.0
36
+ - certifi==2023.11.17
37
+ - charset-normalizer==3.3.2
38
+ - click==8.1.7
39
+ - cmake==3.30.3
40
+ - comm==0.1.2
41
+ - contourpy==1.2.0
42
+ - cycler==0.12.1
43
+ - data==0.4
44
+ - datasets==3.0.1
45
+ - debugpy==1.6.7
46
+ - decorator==5.1.1
47
+ - deepspeed==0.15.2
48
+ - dill==0.3.8
49
+ - emoji==2.8.0
50
+ - exceptiongroup==1.1.0
51
+ - executing==1.2.0
52
+ - filelock==3.12.2
53
+ - fonttools==4.45.1
54
+ - frozenlist==1.4.1
55
+ - fsspec==2023.10.0
56
+ - funcsigs==1.0.2
57
+ - future==0.18.3
58
+ - gmpy2==2.1.0
59
+ - gpustat==1.1.1
60
+ - hjson==3.1.0
61
+ - huggingface-hub==0.25.0
62
+ - idna==3.6
63
+ - importlib-metadata==6.0.0
64
+ - importlib-resources==6.4.5
65
+ - iniconfig==2.0.0
66
+ - ipykernel==6.23.1
67
+ - ipython==8.0.0
68
+ - jedi==0.18.2
69
+ - jinja2==3.1.2
70
+ - joblib==1.3.2
71
+ - jupyter-client==8.1.0
72
+ - jupyter-core==5.1.0
73
+ - kiwisolver==1.4.5
74
+ - latex==0.7.0
75
+ - latexcodec==1.0.0
76
+ - lit==18.1.8
77
+ - markupsafe==2.1.2
78
+ - matplotlib==3.8.2
79
+ - matplotlib-inline==0.1.6
80
+ - mizani==0.9.3
81
+ - mpmath==1.2.1
82
+ - msgpack==1.1.0
83
+ - multidict==6.1.0
84
+ - multiprocess==0.70.16
85
+ - mypy-extensions==1.0.0
86
+ - nest-asyncio==1.5.6
87
+ - networkx==2.8.6
88
+ - ninja==1.11.1.1
89
+ - nltk==3.8.1
90
+ - numpy==1.26.2
91
+ - nvidia-cublas-cu11==11.10.3.66
92
+ - nvidia-cuda-cupti-cu11==11.7.101
93
+ - nvidia-cuda-nvrtc-cu11==11.7.99
94
+ - nvidia-cuda-runtime-cu11==11.7.99
95
+ - nvidia-cudnn-cu11==8.5.0.96
96
+ - nvidia-cufft-cu11==10.9.0.58
97
+ - nvidia-curand-cu11==10.2.10.91
98
+ - nvidia-cusolver-cu11==11.4.0.1
99
+ - nvidia-cusparse-cu11==11.7.4.91
100
+ - nvidia-ml-py==12.560.30
101
+ - nvidia-nccl-cu11==2.14.3
102
+ - nvidia-nvtx-cu11==11.7.91
103
+ - packaging==23.0
104
+ - pandas==2.1.3
105
+ - parso==0.8.3
106
+ - pathspec==0.12.1
107
+ - patsy==0.5.3
108
+ - peft==0.13.0
109
+ - pexpect==4.8.0
110
+ - pickleshare==0.7.5
111
+ - pillow==10.1.0
112
+ - platformdirs==2.5.2
113
+ - plotnine==0.12.4
114
+ - pluggy==1.3.0
115
+ - pluralizer==1.2.0
116
+ - prompt-toolkit==3.0.30
117
+ - protobuf==4.25.1
118
+ - psutil==5.9.1
119
+ - ptyprocess==0.7.0
120
+ - pure-eval==0.2.2
121
+ - py-cpuinfo==9.0.0
122
+ - pyarrow==17.0.0
123
+ - pydantic==2.9.2
124
+ - pydantic-core==2.23.4
125
+ - pygments==2.15.0
126
+ - pyparsing==3.1.1
127
+ - pytest==7.4.3
128
+ - python-dateutil==2.8.2
129
+ - pytz==2023.3.post1
130
+ - pyyaml==6.0.1
131
+ - pyzmq==23.0.0
132
+ - regex==2023.10.3
133
+ - requests==2.32.3
134
+ - safetensors==0.4.5
135
+ - scikit-learn==1.3.2
136
+ - scipy==1.11.4
137
+ - seaborn==0.13.0
138
+ - sentencepiece==0.2.0
139
+ - shutilwhich==1.1.0
140
+ - six==1.16.0
141
+ - stack-data==0.6.0
142
+ - stanza==1.9.2
143
+ - statsmodels==0.14.0
144
+ - sympy==1.11.1
145
+ - tempdir==0.7.1
146
+ - threadpoolctl==3.2.0
147
+ - tokenizers==0.20.0
148
+ - tomli==2.0.1
149
+ - torch==2.0.0
150
+ - tornado==6.2
151
+ - tqdm==4.66.5
152
+ - traitlets==5.7.1
153
+ - transformers==4.45.1
154
+ - triton==2.0.0
155
+ - typing-extensions==4.12.2
156
+ - tzdata==2023.3
157
+ - urllib3==2.1.0
158
+ - wcwidth==0.2.5
159
+ - xxhash==3.5.0
160
+ - yarl==1.13.0
161
+ - zipp==3.12.0
162
+ prefix: /home/yiren/new_ssd2/chunhui/miniconda/envs/impossible_llm
requirements.txt ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ certifi==2023.11.17
2
+ charset-normalizer==3.3.2
3
+ click==8.1.7
4
+ contourpy==1.1.1
5
+ cycler==0.12.1
6
+ data==0.4
7
+ emoji==2.8.0
8
+ fonttools==4.45.1
9
+ fsspec==2023.10.0
10
+ funcsigs==1.0.2
11
+ future==0.18.3
12
+ huggingface-hub==0.19.4
13
+ idna==3.6
14
+ iniconfig==2.0.0
15
+ joblib==1.3.2
16
+ kiwisolver==1.4.5
17
+ latex==0.7.0
18
+ matplotlib==3.7.5
19
+ mizani==0.9.3
20
+ nltk==3.8.1
21
+ numpy==1.24.4
22
+ pandas==2.0.3
23
+ patsy==0.5.3
24
+ Pillow==10.1.0
25
+ plotnine==0.12.4
26
+ pluggy==1.3.0
27
+ pluralizer==1.2.0
28
+ protobuf==4.25.1
29
+ pyparsing==3.1.1
30
+ pytest==7.4.3
31
+ pytz==2023.3.post1
32
+ PyYAML==6.0.1
33
+ regex==2023.10.3
34
+ requests==2.31.0
35
+ safetensors==0.4.1
36
+ scikit-learn==1.3.2
37
+ scipy==1.10.1
38
+ seaborn==0.13.0
39
+ shutilwhich==1.1.0
40
+ stanza==1.6.1
41
+ statsmodels==0.14.0
42
+ tempdir==0.7.1
43
+ threadpoolctl==3.2.0
44
+ tokenizers==0.15.0
45
+ tomli==2.0.1
46
+ torch==2.0.0
47
+ tqdm==4.66.1
48
+ transformers==4.35.2
49
+ triton==2.0.0
50
+ tzdata==2023.3
51
+ urllib3==2.1.0
52
+ asttokens==2.0.5
53
+ comm==0.1.2
54
+ debugpy==1.6.7
55
+ decorator==5.1.1
56
+ exceptiongroup==1.1.0
57
+ executing==1.2.0
58
+ filelock==3.12.2
59
+ gmpy2==2.1.0
60
+ importlib-metadata==6.0.0
61
+ ipykernel==6.23.1
62
+ ipython==8.0.0
63
+ jedi==0.18.2
64
+ Jinja2==3.1.2
65
+ jupyter_client==8.1.0
66
+ jupyter_core==5.1.0
67
+ latexcodec==1.0.0
68
+ MarkupSafe==2.1.2
69
+ matplotlib-inline==0.1.6
70
+ mpmath==1.2.1
71
+ nest-asyncio==1.5.6
72
+ networkx==2.8.6
73
+ packaging==23.0
74
+ parso==0.8.3
75
+ pexpect==4.8.0
76
+ pickleshare==0.7.5
77
+ platformdirs==2.5.2
78
+ prompt-toolkit==3.0.30
79
+ psutil==5.9.1
80
+ ptyprocess==0.7.0
81
+ pure-eval==0.2.2
82
+ Pygments==2.15.0
83
+ python-dateutil==2.8.2
84
+ pyzmq==23.0.0
85
+ six==1.16.0
86
+ stack-data==0.6.0
87
+ sympy==1.11.1
88
+ tornado==6.2
89
+ traitlets==5.7.1
90
+ typing_extensions==4.6.0
91
+ wcwidth==0.2.5
92
+ zipp==3.12.0
93
+
94
+
requirements_1.txt ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ certifi==2023.11.17
2
+ charset-normalizer==3.3.2
3
+ click==8.1.7
4
+ contourpy==1.2.0
5
+ cycler==0.12.1
6
+ data==0.4
7
+ emoji==2.8.0
8
+ fonttools==4.45.1
9
+ fsspec==2023.10.0
10
+ funcsigs==1.0.2
11
+ future==0.18.3
12
+ huggingface-hub==0.19.4
13
+ idna==3.6
14
+ iniconfig==2.0.0
15
+ joblib==1.3.2
16
+ kiwisolver==1.4.5
17
+ latex==0.7.0
18
+ matplotlib==3.8.2
19
+ mizani==0.9.3
20
+ nltk==3.8.1
21
+ numpy==1.26.2
22
+ pandas==2.1.3
23
+ patsy==0.5.3
24
+ Pillow==10.1.0
25
+ plotnine==0.12.4
26
+ pluggy==1.3.0
27
+ pluralizer==1.2.0
28
+ protobuf==4.25.1
29
+ pyparsing==3.1.1
30
+ pytest==7.4.3
31
+ pytz==2023.3.post1
32
+ PyYAML==6.0.1
33
+ regex==2023.10.3
34
+ requests==2.31.0
35
+ safetensors==0.4.1
36
+ scikit-learn==1.3.2
37
+ scipy==1.11.4
38
+ seaborn==0.13.0
39
+ shutilwhich==1.1.0
40
+ stanza==1.6.1
41
+ statsmodels==0.14.0
42
+ tempdir==0.7.1
43
+ threadpoolctl==3.2.0
44
+ tokenizers==0.15.0
45
+ tomli==2.0.1
46
+ torch==2.0.0
47
+ tqdm==4.66.1
48
+ transformers==4.35.2
49
+ triton==2.0.0
50
+ tzdata==2023.3
51
+ urllib3==2.1.0
requirements_2.txt ADDED
@@ -0,0 +1,82 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ asttokens==2.2.1
2
+ comm==0.1.2
3
+ debugpy==1.6.7
4
+ decorator==5.1.1
5
+ exceptiongroup==1.1.0
6
+ executing==1.2.0
7
+ filelock==3.12.2
8
+ gmpy2==2.1.0
9
+ importlib-metadata==6.0.0
10
+ ipykernel==6.23.1
11
+ ipython==8.0.0
12
+ jedi==0.18.2
13
+ Jinja2==3.1.2
14
+ jupyter_client==8.1.0
15
+ jupyter_core==5.1.0
16
+ latexcodec==1.0.0
17
+ MarkupSafe==2.1.2
18
+ matplotlib-inline==0.1.6
19
+ mpmath==1.2.1
20
+ nest-asyncio==1.5.6
21
+ networkx==2.8.6
22
+ packaging==23.0
23
+ parso==0.8.3
24
+ pexpect==4.8.0
25
+ pickleshare==0.7.5
26
+ platformdirs==2.5.2
27
+ prompt-toolkit==3.0.30
28
+ psutil==5.9.1
29
+ ptyprocess==0.7.0
30
+ pure-eval==0.2.2
31
+ Pygments==2.15.0
32
+ python-dateutil==2.8.2
33
+ pyzmq==23.0.0
34
+ six==1.16.0
35
+ stack-data==0.6.0
36
+ sympy==1.11.1
37
+ tornado==6.2
38
+ traitlets==5.7.1
39
+ typing_extensions==4.6.0
40
+ wcwidth==0.2.5
41
+ zipp==3.12.0
42
+ # asttokens @ file:///home/conda/feedstock_root/build_artifacts/asttokens_1698341106958/work
43
+ # comm @ file:///home/conda/feedstock_root/build_artifacts/comm_1691044910542/work
44
+ # debugpy @ file:///croot/debugpy_1690905042057/work
45
+ # decorator @ file:///home/conda/feedstock_root/build_artifacts/decorator_1641555617451/work
46
+ # exceptiongroup @ file:///home/conda/feedstock_root/build_artifacts/exceptiongroup_1700579780973/work
47
+ # executing @ file:///home/conda/feedstock_root/build_artifacts/executing_1698579936712/work
48
+ # filelock @ file:///croot/filelock_1700591183607/work
49
+ # gmpy2 @ file:///tmp/build/80754af9/gmpy2_1645455533097/work
50
+ # importlib-metadata @ file:///home/conda/feedstock_root/build_artifacts/importlib-metadata_1688754491823/work
51
+ # ipykernel @ file:///home/conda/feedstock_root/build_artifacts/ipykernel_1698244021190/work
52
+ # ipython @ file:///home/conda/feedstock_root/build_artifacts/ipython_1701092366260/work
53
+ # jedi @ file:///home/conda/feedstock_root/build_artifacts/jedi_1696326070614/work
54
+ # Jinja2 @ file:///croot/jinja2_1666908132255/work
55
+ # jupyter_client @ file:///home/conda/feedstock_root/build_artifacts/jupyter_client_1699283905679/work
56
+ # jupyter_core @ file:///home/conda/feedstock_root/build_artifacts/jupyter_core_1698673647019/work
57
+ # latexcodec @ file:///home/conda/feedstock_root/build_artifacts/latexcodec_1592937263153/work
58
+ # MarkupSafe @ file:///opt/conda/conda-bld/markupsafe_1654597864307/work
59
+ # matplotlib-inline @ file:///home/conda/feedstock_root/build_artifacts/matplotlib-inline_1660814786464/work
60
+ # mpmath @ file:///croot/mpmath_1690848262763/work
61
+ # nest-asyncio @ file:///home/conda/feedstock_root/build_artifacts/nest-asyncio_1697083700168/work
62
+ # networkx @ file:///croot/networkx_1690561992265/work
63
+ # packaging @ file:///home/conda/feedstock_root/build_artifacts/packaging_1696202382185/work
64
+ # parso @ file:///home/conda/feedstock_root/build_artifacts/parso_1638334955874/work
65
+ # pexpect @ file:///home/conda/feedstock_root/build_artifacts/pexpect_1667297516076/work
66
+ # pickleshare @ file:///home/conda/feedstock_root/build_artifacts/pickleshare_1602536217715/work
67
+ # platformdirs @ file:///home/conda/feedstock_root/build_artifacts/platformdirs_1699715570510/work
68
+ # prompt-toolkit @ file:///home/conda/feedstock_root/build_artifacts/prompt-toolkit_1699963054032/work
69
+ # psutil @ file:///home/conda/feedstock_root/build_artifacts/psutil_1695367094274/work
70
+ # ptyprocess @ file:///home/conda/feedstock_root/build_artifacts/ptyprocess_1609419310487/work/dist/ptyprocess-0.7.0-py2.py3-none-any.whl
71
+ # pure-eval @ file:///home/conda/feedstock_root/build_artifacts/pure_eval_1642875951954/work
72
+ # Pygments @ file:///home/conda/feedstock_root/build_artifacts/pygments_1700607939962/work
73
+ # python-dateutil @ file:///home/conda/feedstock_root/build_artifacts/python-dateutil_1626286286081/work
74
+ # pyzmq @ file:///home/conda/feedstock_root/build_artifacts/pyzmq_1652965335788/work
75
+ # six @ file:///home/conda/feedstock_root/build_artifacts/six_1620240208055/work
76
+ # stack-data @ file:///home/conda/feedstock_root/build_artifacts/stack_data_1669632077133/work
77
+ # sympy @ file:///croot/sympy_1668202399572/work
78
+ # tornado @ file:///home/conda/feedstock_root/build_artifacts/tornado_1695373560918/work
79
+ # traitlets @ file:///home/conda/feedstock_root/build_artifacts/traitlets_1701095650114/work
80
+ # typing_extensions @ file:///croot/typing_extensions_1690297465030/work
81
+ # wcwidth @ file:///home/conda/feedstock_root/build_artifacts/wcwidth_1700607916581/work
82
+ # zipp @ file:///home/conda/feedstock_root/build_artifacts/zipp_1695255097490/work
test.py ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from huggingface_hub import upload_folder
2
+
3
+ upload_folder(
4
+ folder_path="/home/yiren/new_ssd2/chunhui/yaning/project/mission-impossible-language-models",
5
+ repo_id="Yaning1001/impossible_llm",
6
+ path_in_repo="mission-impossible-language-models"
7
+ )
8
+
9
+ # import torch
10
+ # from transformers import AutoModelForCausalLM, AutoTokenizer
11
+
12
+ # model_id_1 = "meta-llama/Llama-3.2-3B"
13
+ # model_id_2 = "Qwen/Qwen2.5-7B"
14
+
15
+ # # Check your authentication - this line should succeed without errors!
16
+ # tokenizer_0 = AutoTokenizer.from_pretrained('gpt2')
17
+ # tokenizer_1 = AutoTokenizer.from_pretrained(model_id_1)
18
+ # tokenizer_2 = AutoTokenizer.from_pretrained(model_id_2)
19
+ # tokenizer_2.pad_token = tokenizer_1.pad_token
20
+
21
+ # print("tokenizer_0.pad_token:", type(tokenizer_0.pad_token))
22
+ # print("tokenizer_1.pad_token:", type(tokenizer_1.pad_token))
23
+ # print("tokenizer_2.pad_token:", type(tokenizer_2.pad_token))
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/artifacts/models--meta-llama--Llama-3.2-3B/refs/main ADDED
@@ -0,0 +1 @@
 
 
1
+ 13afe5124825b4f3751f836b40dafda64c1ed062
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/artifacts/models--meta-llama--Llama-3.2-3B/snapshots/5cc0ffe09ee49f7be6ca7c794ee6bd7245e84e60/generation_config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 128000,
4
+ "eos_token_id": 128001,
5
+ "transformers_version": "4.45.0.dev0",
6
+ "do_sample": true,
7
+ "temperature": 0.6,
8
+ "top_p": 0.9
9
+ }
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/artifacts/models--meta-llama--Llama-3.2-3B/snapshots/5cc0ffe09ee49f7be6ca7c794ee6bd7245e84e60/model.safetensors.index.json ADDED
@@ -0,0 +1,261 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 6425499648
4
+ },
5
+ "weight_map": {
6
+ "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
7
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
8
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
9
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
10
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
11
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
12
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
13
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
14
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
15
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
16
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
17
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
18
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
19
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
20
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
21
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
22
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
23
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
24
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
25
+ "model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
26
+ "model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
27
+ "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
28
+ "model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
29
+ "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
30
+ "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
31
+ "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
32
+ "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
33
+ "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
34
+ "model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
35
+ "model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
36
+ "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
37
+ "model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
38
+ "model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
39
+ "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
40
+ "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
41
+ "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
42
+ "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
43
+ "model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
44
+ "model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
45
+ "model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
46
+ "model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
47
+ "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
48
+ "model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
49
+ "model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
50
+ "model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
51
+ "model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
52
+ "model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
53
+ "model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
54
+ "model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
55
+ "model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
56
+ "model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
57
+ "model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
58
+ "model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
59
+ "model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
60
+ "model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
61
+ "model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
62
+ "model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
63
+ "model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
64
+ "model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
65
+ "model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
66
+ "model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
67
+ "model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
68
+ "model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
69
+ "model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
70
+ "model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
71
+ "model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
72
+ "model.layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
73
+ "model.layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
74
+ "model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
75
+ "model.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
76
+ "model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
77
+ "model.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
78
+ "model.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
79
+ "model.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors",
80
+ "model.layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
81
+ "model.layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
82
+ "model.layers.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
83
+ "model.layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
84
+ "model.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
85
+ "model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
86
+ "model.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
87
+ "model.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
88
+ "model.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors",
89
+ "model.layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
90
+ "model.layers.17.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
91
+ "model.layers.17.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
92
+ "model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
93
+ "model.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
94
+ "model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
95
+ "model.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
96
+ "model.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
97
+ "model.layers.18.input_layernorm.weight": "model-00001-of-00002.safetensors",
98
+ "model.layers.18.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
99
+ "model.layers.18.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
100
+ "model.layers.18.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
101
+ "model.layers.18.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
102
+ "model.layers.18.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
103
+ "model.layers.18.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
104
+ "model.layers.18.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
105
+ "model.layers.18.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
106
+ "model.layers.19.input_layernorm.weight": "model-00001-of-00002.safetensors",
107
+ "model.layers.19.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
108
+ "model.layers.19.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
109
+ "model.layers.19.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
110
+ "model.layers.19.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
111
+ "model.layers.19.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
112
+ "model.layers.19.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
113
+ "model.layers.19.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
114
+ "model.layers.19.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
115
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
116
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
117
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
118
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
119
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
120
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
121
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
122
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
123
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
124
+ "model.layers.20.input_layernorm.weight": "model-00002-of-00002.safetensors",
125
+ "model.layers.20.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
126
+ "model.layers.20.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
127
+ "model.layers.20.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
128
+ "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
129
+ "model.layers.20.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
130
+ "model.layers.20.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
131
+ "model.layers.20.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
132
+ "model.layers.20.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
133
+ "model.layers.21.input_layernorm.weight": "model-00002-of-00002.safetensors",
134
+ "model.layers.21.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
135
+ "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
136
+ "model.layers.21.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
137
+ "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
138
+ "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
139
+ "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
140
+ "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
141
+ "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
142
+ "model.layers.22.input_layernorm.weight": "model-00002-of-00002.safetensors",
143
+ "model.layers.22.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
144
+ "model.layers.22.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
145
+ "model.layers.22.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
146
+ "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
147
+ "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
148
+ "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
149
+ "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
150
+ "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
151
+ "model.layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors",
152
+ "model.layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
153
+ "model.layers.23.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
154
+ "model.layers.23.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
155
+ "model.layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
156
+ "model.layers.23.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
157
+ "model.layers.23.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
158
+ "model.layers.23.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
159
+ "model.layers.23.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
160
+ "model.layers.24.input_layernorm.weight": "model-00002-of-00002.safetensors",
161
+ "model.layers.24.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
162
+ "model.layers.24.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
163
+ "model.layers.24.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
164
+ "model.layers.24.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
165
+ "model.layers.24.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
166
+ "model.layers.24.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
167
+ "model.layers.24.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
168
+ "model.layers.24.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
169
+ "model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors",
170
+ "model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
171
+ "model.layers.25.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
172
+ "model.layers.25.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
173
+ "model.layers.25.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
174
+ "model.layers.25.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
175
+ "model.layers.25.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
176
+ "model.layers.25.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
177
+ "model.layers.25.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
178
+ "model.layers.26.input_layernorm.weight": "model-00002-of-00002.safetensors",
179
+ "model.layers.26.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
180
+ "model.layers.26.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
181
+ "model.layers.26.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
182
+ "model.layers.26.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
183
+ "model.layers.26.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
184
+ "model.layers.26.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
185
+ "model.layers.26.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
186
+ "model.layers.26.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
187
+ "model.layers.27.input_layernorm.weight": "model-00002-of-00002.safetensors",
188
+ "model.layers.27.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
189
+ "model.layers.27.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
190
+ "model.layers.27.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
191
+ "model.layers.27.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
192
+ "model.layers.27.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
193
+ "model.layers.27.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
194
+ "model.layers.27.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
195
+ "model.layers.27.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
196
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
197
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
198
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
199
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
200
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
201
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
202
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
203
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
204
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
205
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
206
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
207
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
208
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
209
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
210
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
211
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
212
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
213
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
214
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
215
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
216
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
217
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
218
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
219
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
220
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
221
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
222
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
223
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
224
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
225
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
226
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
227
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
228
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
229
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
230
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
231
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
232
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
233
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
234
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
235
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
236
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
237
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
238
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
239
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
240
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
241
+ "model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
242
+ "model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
243
+ "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
244
+ "model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
245
+ "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
246
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
247
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
248
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
249
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
250
+ "model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
251
+ "model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
252
+ "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
253
+ "model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
254
+ "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
255
+ "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
256
+ "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
257
+ "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
258
+ "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
259
+ "model.norm.weight": "model-00002-of-00002.safetensors"
260
+ }
261
+ }
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1050/tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6fba7b84ac8c089d417b794dfc0527040604b90e2cdfe6e9df5b55afe9eab61a
3
+ size 17210282
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1050/tokenizer_config.json ADDED
@@ -0,0 +1,2078 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "128000": {
4
+ "content": "<|begin_of_text|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "128001": {
12
+ "content": "<|end_of_text|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "128002": {
20
+ "content": "<|reserved_special_token_0|>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "128003": {
28
+ "content": "<|reserved_special_token_1|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "128004": {
36
+ "content": "<|finetune_right_pad_id|>",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ },
43
+ "128005": {
44
+ "content": "<|reserved_special_token_2|>",
45
+ "lstrip": false,
46
+ "normalized": false,
47
+ "rstrip": false,
48
+ "single_word": false,
49
+ "special": true
50
+ },
51
+ "128006": {
52
+ "content": "<|start_header_id|>",
53
+ "lstrip": false,
54
+ "normalized": false,
55
+ "rstrip": false,
56
+ "single_word": false,
57
+ "special": true
58
+ },
59
+ "128007": {
60
+ "content": "<|end_header_id|>",
61
+ "lstrip": false,
62
+ "normalized": false,
63
+ "rstrip": false,
64
+ "single_word": false,
65
+ "special": true
66
+ },
67
+ "128008": {
68
+ "content": "<|eom_id|>",
69
+ "lstrip": false,
70
+ "normalized": false,
71
+ "rstrip": false,
72
+ "single_word": false,
73
+ "special": true
74
+ },
75
+ "128009": {
76
+ "content": "<|eot_id|>",
77
+ "lstrip": false,
78
+ "normalized": false,
79
+ "rstrip": false,
80
+ "single_word": false,
81
+ "special": true
82
+ },
83
+ "128010": {
84
+ "content": "<|python_tag|>",
85
+ "lstrip": false,
86
+ "normalized": false,
87
+ "rstrip": false,
88
+ "single_word": false,
89
+ "special": true
90
+ },
91
+ "128011": {
92
+ "content": "<|reserved_special_token_3|>",
93
+ "lstrip": false,
94
+ "normalized": false,
95
+ "rstrip": false,
96
+ "single_word": false,
97
+ "special": true
98
+ },
99
+ "128012": {
100
+ "content": "<|reserved_special_token_4|>",
101
+ "lstrip": false,
102
+ "normalized": false,
103
+ "rstrip": false,
104
+ "single_word": false,
105
+ "special": true
106
+ },
107
+ "128013": {
108
+ "content": "<|reserved_special_token_5|>",
109
+ "lstrip": false,
110
+ "normalized": false,
111
+ "rstrip": false,
112
+ "single_word": false,
113
+ "special": true
114
+ },
115
+ "128014": {
116
+ "content": "<|reserved_special_token_6|>",
117
+ "lstrip": false,
118
+ "normalized": false,
119
+ "rstrip": false,
120
+ "single_word": false,
121
+ "special": true
122
+ },
123
+ "128015": {
124
+ "content": "<|reserved_special_token_7|>",
125
+ "lstrip": false,
126
+ "normalized": false,
127
+ "rstrip": false,
128
+ "single_word": false,
129
+ "special": true
130
+ },
131
+ "128016": {
132
+ "content": "<|reserved_special_token_8|>",
133
+ "lstrip": false,
134
+ "normalized": false,
135
+ "rstrip": false,
136
+ "single_word": false,
137
+ "special": true
138
+ },
139
+ "128017": {
140
+ "content": "<|reserved_special_token_9|>",
141
+ "lstrip": false,
142
+ "normalized": false,
143
+ "rstrip": false,
144
+ "single_word": false,
145
+ "special": true
146
+ },
147
+ "128018": {
148
+ "content": "<|reserved_special_token_10|>",
149
+ "lstrip": false,
150
+ "normalized": false,
151
+ "rstrip": false,
152
+ "single_word": false,
153
+ "special": true
154
+ },
155
+ "128019": {
156
+ "content": "<|reserved_special_token_11|>",
157
+ "lstrip": false,
158
+ "normalized": false,
159
+ "rstrip": false,
160
+ "single_word": false,
161
+ "special": true
162
+ },
163
+ "128020": {
164
+ "content": "<|reserved_special_token_12|>",
165
+ "lstrip": false,
166
+ "normalized": false,
167
+ "rstrip": false,
168
+ "single_word": false,
169
+ "special": true
170
+ },
171
+ "128021": {
172
+ "content": "<|reserved_special_token_13|>",
173
+ "lstrip": false,
174
+ "normalized": false,
175
+ "rstrip": false,
176
+ "single_word": false,
177
+ "special": true
178
+ },
179
+ "128022": {
180
+ "content": "<|reserved_special_token_14|>",
181
+ "lstrip": false,
182
+ "normalized": false,
183
+ "rstrip": false,
184
+ "single_word": false,
185
+ "special": true
186
+ },
187
+ "128023": {
188
+ "content": "<|reserved_special_token_15|>",
189
+ "lstrip": false,
190
+ "normalized": false,
191
+ "rstrip": false,
192
+ "single_word": false,
193
+ "special": true
194
+ },
195
+ "128024": {
196
+ "content": "<|reserved_special_token_16|>",
197
+ "lstrip": false,
198
+ "normalized": false,
199
+ "rstrip": false,
200
+ "single_word": false,
201
+ "special": true
202
+ },
203
+ "128025": {
204
+ "content": "<|reserved_special_token_17|>",
205
+ "lstrip": false,
206
+ "normalized": false,
207
+ "rstrip": false,
208
+ "single_word": false,
209
+ "special": true
210
+ },
211
+ "128026": {
212
+ "content": "<|reserved_special_token_18|>",
213
+ "lstrip": false,
214
+ "normalized": false,
215
+ "rstrip": false,
216
+ "single_word": false,
217
+ "special": true
218
+ },
219
+ "128027": {
220
+ "content": "<|reserved_special_token_19|>",
221
+ "lstrip": false,
222
+ "normalized": false,
223
+ "rstrip": false,
224
+ "single_word": false,
225
+ "special": true
226
+ },
227
+ "128028": {
228
+ "content": "<|reserved_special_token_20|>",
229
+ "lstrip": false,
230
+ "normalized": false,
231
+ "rstrip": false,
232
+ "single_word": false,
233
+ "special": true
234
+ },
235
+ "128029": {
236
+ "content": "<|reserved_special_token_21|>",
237
+ "lstrip": false,
238
+ "normalized": false,
239
+ "rstrip": false,
240
+ "single_word": false,
241
+ "special": true
242
+ },
243
+ "128030": {
244
+ "content": "<|reserved_special_token_22|>",
245
+ "lstrip": false,
246
+ "normalized": false,
247
+ "rstrip": false,
248
+ "single_word": false,
249
+ "special": true
250
+ },
251
+ "128031": {
252
+ "content": "<|reserved_special_token_23|>",
253
+ "lstrip": false,
254
+ "normalized": false,
255
+ "rstrip": false,
256
+ "single_word": false,
257
+ "special": true
258
+ },
259
+ "128032": {
260
+ "content": "<|reserved_special_token_24|>",
261
+ "lstrip": false,
262
+ "normalized": false,
263
+ "rstrip": false,
264
+ "single_word": false,
265
+ "special": true
266
+ },
267
+ "128033": {
268
+ "content": "<|reserved_special_token_25|>",
269
+ "lstrip": false,
270
+ "normalized": false,
271
+ "rstrip": false,
272
+ "single_word": false,
273
+ "special": true
274
+ },
275
+ "128034": {
276
+ "content": "<|reserved_special_token_26|>",
277
+ "lstrip": false,
278
+ "normalized": false,
279
+ "rstrip": false,
280
+ "single_word": false,
281
+ "special": true
282
+ },
283
+ "128035": {
284
+ "content": "<|reserved_special_token_27|>",
285
+ "lstrip": false,
286
+ "normalized": false,
287
+ "rstrip": false,
288
+ "single_word": false,
289
+ "special": true
290
+ },
291
+ "128036": {
292
+ "content": "<|reserved_special_token_28|>",
293
+ "lstrip": false,
294
+ "normalized": false,
295
+ "rstrip": false,
296
+ "single_word": false,
297
+ "special": true
298
+ },
299
+ "128037": {
300
+ "content": "<|reserved_special_token_29|>",
301
+ "lstrip": false,
302
+ "normalized": false,
303
+ "rstrip": false,
304
+ "single_word": false,
305
+ "special": true
306
+ },
307
+ "128038": {
308
+ "content": "<|reserved_special_token_30|>",
309
+ "lstrip": false,
310
+ "normalized": false,
311
+ "rstrip": false,
312
+ "single_word": false,
313
+ "special": true
314
+ },
315
+ "128039": {
316
+ "content": "<|reserved_special_token_31|>",
317
+ "lstrip": false,
318
+ "normalized": false,
319
+ "rstrip": false,
320
+ "single_word": false,
321
+ "special": true
322
+ },
323
+ "128040": {
324
+ "content": "<|reserved_special_token_32|>",
325
+ "lstrip": false,
326
+ "normalized": false,
327
+ "rstrip": false,
328
+ "single_word": false,
329
+ "special": true
330
+ },
331
+ "128041": {
332
+ "content": "<|reserved_special_token_33|>",
333
+ "lstrip": false,
334
+ "normalized": false,
335
+ "rstrip": false,
336
+ "single_word": false,
337
+ "special": true
338
+ },
339
+ "128042": {
340
+ "content": "<|reserved_special_token_34|>",
341
+ "lstrip": false,
342
+ "normalized": false,
343
+ "rstrip": false,
344
+ "single_word": false,
345
+ "special": true
346
+ },
347
+ "128043": {
348
+ "content": "<|reserved_special_token_35|>",
349
+ "lstrip": false,
350
+ "normalized": false,
351
+ "rstrip": false,
352
+ "single_word": false,
353
+ "special": true
354
+ },
355
+ "128044": {
356
+ "content": "<|reserved_special_token_36|>",
357
+ "lstrip": false,
358
+ "normalized": false,
359
+ "rstrip": false,
360
+ "single_word": false,
361
+ "special": true
362
+ },
363
+ "128045": {
364
+ "content": "<|reserved_special_token_37|>",
365
+ "lstrip": false,
366
+ "normalized": false,
367
+ "rstrip": false,
368
+ "single_word": false,
369
+ "special": true
370
+ },
371
+ "128046": {
372
+ "content": "<|reserved_special_token_38|>",
373
+ "lstrip": false,
374
+ "normalized": false,
375
+ "rstrip": false,
376
+ "single_word": false,
377
+ "special": true
378
+ },
379
+ "128047": {
380
+ "content": "<|reserved_special_token_39|>",
381
+ "lstrip": false,
382
+ "normalized": false,
383
+ "rstrip": false,
384
+ "single_word": false,
385
+ "special": true
386
+ },
387
+ "128048": {
388
+ "content": "<|reserved_special_token_40|>",
389
+ "lstrip": false,
390
+ "normalized": false,
391
+ "rstrip": false,
392
+ "single_word": false,
393
+ "special": true
394
+ },
395
+ "128049": {
396
+ "content": "<|reserved_special_token_41|>",
397
+ "lstrip": false,
398
+ "normalized": false,
399
+ "rstrip": false,
400
+ "single_word": false,
401
+ "special": true
402
+ },
403
+ "128050": {
404
+ "content": "<|reserved_special_token_42|>",
405
+ "lstrip": false,
406
+ "normalized": false,
407
+ "rstrip": false,
408
+ "single_word": false,
409
+ "special": true
410
+ },
411
+ "128051": {
412
+ "content": "<|reserved_special_token_43|>",
413
+ "lstrip": false,
414
+ "normalized": false,
415
+ "rstrip": false,
416
+ "single_word": false,
417
+ "special": true
418
+ },
419
+ "128052": {
420
+ "content": "<|reserved_special_token_44|>",
421
+ "lstrip": false,
422
+ "normalized": false,
423
+ "rstrip": false,
424
+ "single_word": false,
425
+ "special": true
426
+ },
427
+ "128053": {
428
+ "content": "<|reserved_special_token_45|>",
429
+ "lstrip": false,
430
+ "normalized": false,
431
+ "rstrip": false,
432
+ "single_word": false,
433
+ "special": true
434
+ },
435
+ "128054": {
436
+ "content": "<|reserved_special_token_46|>",
437
+ "lstrip": false,
438
+ "normalized": false,
439
+ "rstrip": false,
440
+ "single_word": false,
441
+ "special": true
442
+ },
443
+ "128055": {
444
+ "content": "<|reserved_special_token_47|>",
445
+ "lstrip": false,
446
+ "normalized": false,
447
+ "rstrip": false,
448
+ "single_word": false,
449
+ "special": true
450
+ },
451
+ "128056": {
452
+ "content": "<|reserved_special_token_48|>",
453
+ "lstrip": false,
454
+ "normalized": false,
455
+ "rstrip": false,
456
+ "single_word": false,
457
+ "special": true
458
+ },
459
+ "128057": {
460
+ "content": "<|reserved_special_token_49|>",
461
+ "lstrip": false,
462
+ "normalized": false,
463
+ "rstrip": false,
464
+ "single_word": false,
465
+ "special": true
466
+ },
467
+ "128058": {
468
+ "content": "<|reserved_special_token_50|>",
469
+ "lstrip": false,
470
+ "normalized": false,
471
+ "rstrip": false,
472
+ "single_word": false,
473
+ "special": true
474
+ },
475
+ "128059": {
476
+ "content": "<|reserved_special_token_51|>",
477
+ "lstrip": false,
478
+ "normalized": false,
479
+ "rstrip": false,
480
+ "single_word": false,
481
+ "special": true
482
+ },
483
+ "128060": {
484
+ "content": "<|reserved_special_token_52|>",
485
+ "lstrip": false,
486
+ "normalized": false,
487
+ "rstrip": false,
488
+ "single_word": false,
489
+ "special": true
490
+ },
491
+ "128061": {
492
+ "content": "<|reserved_special_token_53|>",
493
+ "lstrip": false,
494
+ "normalized": false,
495
+ "rstrip": false,
496
+ "single_word": false,
497
+ "special": true
498
+ },
499
+ "128062": {
500
+ "content": "<|reserved_special_token_54|>",
501
+ "lstrip": false,
502
+ "normalized": false,
503
+ "rstrip": false,
504
+ "single_word": false,
505
+ "special": true
506
+ },
507
+ "128063": {
508
+ "content": "<|reserved_special_token_55|>",
509
+ "lstrip": false,
510
+ "normalized": false,
511
+ "rstrip": false,
512
+ "single_word": false,
513
+ "special": true
514
+ },
515
+ "128064": {
516
+ "content": "<|reserved_special_token_56|>",
517
+ "lstrip": false,
518
+ "normalized": false,
519
+ "rstrip": false,
520
+ "single_word": false,
521
+ "special": true
522
+ },
523
+ "128065": {
524
+ "content": "<|reserved_special_token_57|>",
525
+ "lstrip": false,
526
+ "normalized": false,
527
+ "rstrip": false,
528
+ "single_word": false,
529
+ "special": true
530
+ },
531
+ "128066": {
532
+ "content": "<|reserved_special_token_58|>",
533
+ "lstrip": false,
534
+ "normalized": false,
535
+ "rstrip": false,
536
+ "single_word": false,
537
+ "special": true
538
+ },
539
+ "128067": {
540
+ "content": "<|reserved_special_token_59|>",
541
+ "lstrip": false,
542
+ "normalized": false,
543
+ "rstrip": false,
544
+ "single_word": false,
545
+ "special": true
546
+ },
547
+ "128068": {
548
+ "content": "<|reserved_special_token_60|>",
549
+ "lstrip": false,
550
+ "normalized": false,
551
+ "rstrip": false,
552
+ "single_word": false,
553
+ "special": true
554
+ },
555
+ "128069": {
556
+ "content": "<|reserved_special_token_61|>",
557
+ "lstrip": false,
558
+ "normalized": false,
559
+ "rstrip": false,
560
+ "single_word": false,
561
+ "special": true
562
+ },
563
+ "128070": {
564
+ "content": "<|reserved_special_token_62|>",
565
+ "lstrip": false,
566
+ "normalized": false,
567
+ "rstrip": false,
568
+ "single_word": false,
569
+ "special": true
570
+ },
571
+ "128071": {
572
+ "content": "<|reserved_special_token_63|>",
573
+ "lstrip": false,
574
+ "normalized": false,
575
+ "rstrip": false,
576
+ "single_word": false,
577
+ "special": true
578
+ },
579
+ "128072": {
580
+ "content": "<|reserved_special_token_64|>",
581
+ "lstrip": false,
582
+ "normalized": false,
583
+ "rstrip": false,
584
+ "single_word": false,
585
+ "special": true
586
+ },
587
+ "128073": {
588
+ "content": "<|reserved_special_token_65|>",
589
+ "lstrip": false,
590
+ "normalized": false,
591
+ "rstrip": false,
592
+ "single_word": false,
593
+ "special": true
594
+ },
595
+ "128074": {
596
+ "content": "<|reserved_special_token_66|>",
597
+ "lstrip": false,
598
+ "normalized": false,
599
+ "rstrip": false,
600
+ "single_word": false,
601
+ "special": true
602
+ },
603
+ "128075": {
604
+ "content": "<|reserved_special_token_67|>",
605
+ "lstrip": false,
606
+ "normalized": false,
607
+ "rstrip": false,
608
+ "single_word": false,
609
+ "special": true
610
+ },
611
+ "128076": {
612
+ "content": "<|reserved_special_token_68|>",
613
+ "lstrip": false,
614
+ "normalized": false,
615
+ "rstrip": false,
616
+ "single_word": false,
617
+ "special": true
618
+ },
619
+ "128077": {
620
+ "content": "<|reserved_special_token_69|>",
621
+ "lstrip": false,
622
+ "normalized": false,
623
+ "rstrip": false,
624
+ "single_word": false,
625
+ "special": true
626
+ },
627
+ "128078": {
628
+ "content": "<|reserved_special_token_70|>",
629
+ "lstrip": false,
630
+ "normalized": false,
631
+ "rstrip": false,
632
+ "single_word": false,
633
+ "special": true
634
+ },
635
+ "128079": {
636
+ "content": "<|reserved_special_token_71|>",
637
+ "lstrip": false,
638
+ "normalized": false,
639
+ "rstrip": false,
640
+ "single_word": false,
641
+ "special": true
642
+ },
643
+ "128080": {
644
+ "content": "<|reserved_special_token_72|>",
645
+ "lstrip": false,
646
+ "normalized": false,
647
+ "rstrip": false,
648
+ "single_word": false,
649
+ "special": true
650
+ },
651
+ "128081": {
652
+ "content": "<|reserved_special_token_73|>",
653
+ "lstrip": false,
654
+ "normalized": false,
655
+ "rstrip": false,
656
+ "single_word": false,
657
+ "special": true
658
+ },
659
+ "128082": {
660
+ "content": "<|reserved_special_token_74|>",
661
+ "lstrip": false,
662
+ "normalized": false,
663
+ "rstrip": false,
664
+ "single_word": false,
665
+ "special": true
666
+ },
667
+ "128083": {
668
+ "content": "<|reserved_special_token_75|>",
669
+ "lstrip": false,
670
+ "normalized": false,
671
+ "rstrip": false,
672
+ "single_word": false,
673
+ "special": true
674
+ },
675
+ "128084": {
676
+ "content": "<|reserved_special_token_76|>",
677
+ "lstrip": false,
678
+ "normalized": false,
679
+ "rstrip": false,
680
+ "single_word": false,
681
+ "special": true
682
+ },
683
+ "128085": {
684
+ "content": "<|reserved_special_token_77|>",
685
+ "lstrip": false,
686
+ "normalized": false,
687
+ "rstrip": false,
688
+ "single_word": false,
689
+ "special": true
690
+ },
691
+ "128086": {
692
+ "content": "<|reserved_special_token_78|>",
693
+ "lstrip": false,
694
+ "normalized": false,
695
+ "rstrip": false,
696
+ "single_word": false,
697
+ "special": true
698
+ },
699
+ "128087": {
700
+ "content": "<|reserved_special_token_79|>",
701
+ "lstrip": false,
702
+ "normalized": false,
703
+ "rstrip": false,
704
+ "single_word": false,
705
+ "special": true
706
+ },
707
+ "128088": {
708
+ "content": "<|reserved_special_token_80|>",
709
+ "lstrip": false,
710
+ "normalized": false,
711
+ "rstrip": false,
712
+ "single_word": false,
713
+ "special": true
714
+ },
715
+ "128089": {
716
+ "content": "<|reserved_special_token_81|>",
717
+ "lstrip": false,
718
+ "normalized": false,
719
+ "rstrip": false,
720
+ "single_word": false,
721
+ "special": true
722
+ },
723
+ "128090": {
724
+ "content": "<|reserved_special_token_82|>",
725
+ "lstrip": false,
726
+ "normalized": false,
727
+ "rstrip": false,
728
+ "single_word": false,
729
+ "special": true
730
+ },
731
+ "128091": {
732
+ "content": "<|reserved_special_token_83|>",
733
+ "lstrip": false,
734
+ "normalized": false,
735
+ "rstrip": false,
736
+ "single_word": false,
737
+ "special": true
738
+ },
739
+ "128092": {
740
+ "content": "<|reserved_special_token_84|>",
741
+ "lstrip": false,
742
+ "normalized": false,
743
+ "rstrip": false,
744
+ "single_word": false,
745
+ "special": true
746
+ },
747
+ "128093": {
748
+ "content": "<|reserved_special_token_85|>",
749
+ "lstrip": false,
750
+ "normalized": false,
751
+ "rstrip": false,
752
+ "single_word": false,
753
+ "special": true
754
+ },
755
+ "128094": {
756
+ "content": "<|reserved_special_token_86|>",
757
+ "lstrip": false,
758
+ "normalized": false,
759
+ "rstrip": false,
760
+ "single_word": false,
761
+ "special": true
762
+ },
763
+ "128095": {
764
+ "content": "<|reserved_special_token_87|>",
765
+ "lstrip": false,
766
+ "normalized": false,
767
+ "rstrip": false,
768
+ "single_word": false,
769
+ "special": true
770
+ },
771
+ "128096": {
772
+ "content": "<|reserved_special_token_88|>",
773
+ "lstrip": false,
774
+ "normalized": false,
775
+ "rstrip": false,
776
+ "single_word": false,
777
+ "special": true
778
+ },
779
+ "128097": {
780
+ "content": "<|reserved_special_token_89|>",
781
+ "lstrip": false,
782
+ "normalized": false,
783
+ "rstrip": false,
784
+ "single_word": false,
785
+ "special": true
786
+ },
787
+ "128098": {
788
+ "content": "<|reserved_special_token_90|>",
789
+ "lstrip": false,
790
+ "normalized": false,
791
+ "rstrip": false,
792
+ "single_word": false,
793
+ "special": true
794
+ },
795
+ "128099": {
796
+ "content": "<|reserved_special_token_91|>",
797
+ "lstrip": false,
798
+ "normalized": false,
799
+ "rstrip": false,
800
+ "single_word": false,
801
+ "special": true
802
+ },
803
+ "128100": {
804
+ "content": "<|reserved_special_token_92|>",
805
+ "lstrip": false,
806
+ "normalized": false,
807
+ "rstrip": false,
808
+ "single_word": false,
809
+ "special": true
810
+ },
811
+ "128101": {
812
+ "content": "<|reserved_special_token_93|>",
813
+ "lstrip": false,
814
+ "normalized": false,
815
+ "rstrip": false,
816
+ "single_word": false,
817
+ "special": true
818
+ },
819
+ "128102": {
820
+ "content": "<|reserved_special_token_94|>",
821
+ "lstrip": false,
822
+ "normalized": false,
823
+ "rstrip": false,
824
+ "single_word": false,
825
+ "special": true
826
+ },
827
+ "128103": {
828
+ "content": "<|reserved_special_token_95|>",
829
+ "lstrip": false,
830
+ "normalized": false,
831
+ "rstrip": false,
832
+ "single_word": false,
833
+ "special": true
834
+ },
835
+ "128104": {
836
+ "content": "<|reserved_special_token_96|>",
837
+ "lstrip": false,
838
+ "normalized": false,
839
+ "rstrip": false,
840
+ "single_word": false,
841
+ "special": true
842
+ },
843
+ "128105": {
844
+ "content": "<|reserved_special_token_97|>",
845
+ "lstrip": false,
846
+ "normalized": false,
847
+ "rstrip": false,
848
+ "single_word": false,
849
+ "special": true
850
+ },
851
+ "128106": {
852
+ "content": "<|reserved_special_token_98|>",
853
+ "lstrip": false,
854
+ "normalized": false,
855
+ "rstrip": false,
856
+ "single_word": false,
857
+ "special": true
858
+ },
859
+ "128107": {
860
+ "content": "<|reserved_special_token_99|>",
861
+ "lstrip": false,
862
+ "normalized": false,
863
+ "rstrip": false,
864
+ "single_word": false,
865
+ "special": true
866
+ },
867
+ "128108": {
868
+ "content": "<|reserved_special_token_100|>",
869
+ "lstrip": false,
870
+ "normalized": false,
871
+ "rstrip": false,
872
+ "single_word": false,
873
+ "special": true
874
+ },
875
+ "128109": {
876
+ "content": "<|reserved_special_token_101|>",
877
+ "lstrip": false,
878
+ "normalized": false,
879
+ "rstrip": false,
880
+ "single_word": false,
881
+ "special": true
882
+ },
883
+ "128110": {
884
+ "content": "<|reserved_special_token_102|>",
885
+ "lstrip": false,
886
+ "normalized": false,
887
+ "rstrip": false,
888
+ "single_word": false,
889
+ "special": true
890
+ },
891
+ "128111": {
892
+ "content": "<|reserved_special_token_103|>",
893
+ "lstrip": false,
894
+ "normalized": false,
895
+ "rstrip": false,
896
+ "single_word": false,
897
+ "special": true
898
+ },
899
+ "128112": {
900
+ "content": "<|reserved_special_token_104|>",
901
+ "lstrip": false,
902
+ "normalized": false,
903
+ "rstrip": false,
904
+ "single_word": false,
905
+ "special": true
906
+ },
907
+ "128113": {
908
+ "content": "<|reserved_special_token_105|>",
909
+ "lstrip": false,
910
+ "normalized": false,
911
+ "rstrip": false,
912
+ "single_word": false,
913
+ "special": true
914
+ },
915
+ "128114": {
916
+ "content": "<|reserved_special_token_106|>",
917
+ "lstrip": false,
918
+ "normalized": false,
919
+ "rstrip": false,
920
+ "single_word": false,
921
+ "special": true
922
+ },
923
+ "128115": {
924
+ "content": "<|reserved_special_token_107|>",
925
+ "lstrip": false,
926
+ "normalized": false,
927
+ "rstrip": false,
928
+ "single_word": false,
929
+ "special": true
930
+ },
931
+ "128116": {
932
+ "content": "<|reserved_special_token_108|>",
933
+ "lstrip": false,
934
+ "normalized": false,
935
+ "rstrip": false,
936
+ "single_word": false,
937
+ "special": true
938
+ },
939
+ "128117": {
940
+ "content": "<|reserved_special_token_109|>",
941
+ "lstrip": false,
942
+ "normalized": false,
943
+ "rstrip": false,
944
+ "single_word": false,
945
+ "special": true
946
+ },
947
+ "128118": {
948
+ "content": "<|reserved_special_token_110|>",
949
+ "lstrip": false,
950
+ "normalized": false,
951
+ "rstrip": false,
952
+ "single_word": false,
953
+ "special": true
954
+ },
955
+ "128119": {
956
+ "content": "<|reserved_special_token_111|>",
957
+ "lstrip": false,
958
+ "normalized": false,
959
+ "rstrip": false,
960
+ "single_word": false,
961
+ "special": true
962
+ },
963
+ "128120": {
964
+ "content": "<|reserved_special_token_112|>",
965
+ "lstrip": false,
966
+ "normalized": false,
967
+ "rstrip": false,
968
+ "single_word": false,
969
+ "special": true
970
+ },
971
+ "128121": {
972
+ "content": "<|reserved_special_token_113|>",
973
+ "lstrip": false,
974
+ "normalized": false,
975
+ "rstrip": false,
976
+ "single_word": false,
977
+ "special": true
978
+ },
979
+ "128122": {
980
+ "content": "<|reserved_special_token_114|>",
981
+ "lstrip": false,
982
+ "normalized": false,
983
+ "rstrip": false,
984
+ "single_word": false,
985
+ "special": true
986
+ },
987
+ "128123": {
988
+ "content": "<|reserved_special_token_115|>",
989
+ "lstrip": false,
990
+ "normalized": false,
991
+ "rstrip": false,
992
+ "single_word": false,
993
+ "special": true
994
+ },
995
+ "128124": {
996
+ "content": "<|reserved_special_token_116|>",
997
+ "lstrip": false,
998
+ "normalized": false,
999
+ "rstrip": false,
1000
+ "single_word": false,
1001
+ "special": true
1002
+ },
1003
+ "128125": {
1004
+ "content": "<|reserved_special_token_117|>",
1005
+ "lstrip": false,
1006
+ "normalized": false,
1007
+ "rstrip": false,
1008
+ "single_word": false,
1009
+ "special": true
1010
+ },
1011
+ "128126": {
1012
+ "content": "<|reserved_special_token_118|>",
1013
+ "lstrip": false,
1014
+ "normalized": false,
1015
+ "rstrip": false,
1016
+ "single_word": false,
1017
+ "special": true
1018
+ },
1019
+ "128127": {
1020
+ "content": "<|reserved_special_token_119|>",
1021
+ "lstrip": false,
1022
+ "normalized": false,
1023
+ "rstrip": false,
1024
+ "single_word": false,
1025
+ "special": true
1026
+ },
1027
+ "128128": {
1028
+ "content": "<|reserved_special_token_120|>",
1029
+ "lstrip": false,
1030
+ "normalized": false,
1031
+ "rstrip": false,
1032
+ "single_word": false,
1033
+ "special": true
1034
+ },
1035
+ "128129": {
1036
+ "content": "<|reserved_special_token_121|>",
1037
+ "lstrip": false,
1038
+ "normalized": false,
1039
+ "rstrip": false,
1040
+ "single_word": false,
1041
+ "special": true
1042
+ },
1043
+ "128130": {
1044
+ "content": "<|reserved_special_token_122|>",
1045
+ "lstrip": false,
1046
+ "normalized": false,
1047
+ "rstrip": false,
1048
+ "single_word": false,
1049
+ "special": true
1050
+ },
1051
+ "128131": {
1052
+ "content": "<|reserved_special_token_123|>",
1053
+ "lstrip": false,
1054
+ "normalized": false,
1055
+ "rstrip": false,
1056
+ "single_word": false,
1057
+ "special": true
1058
+ },
1059
+ "128132": {
1060
+ "content": "<|reserved_special_token_124|>",
1061
+ "lstrip": false,
1062
+ "normalized": false,
1063
+ "rstrip": false,
1064
+ "single_word": false,
1065
+ "special": true
1066
+ },
1067
+ "128133": {
1068
+ "content": "<|reserved_special_token_125|>",
1069
+ "lstrip": false,
1070
+ "normalized": false,
1071
+ "rstrip": false,
1072
+ "single_word": false,
1073
+ "special": true
1074
+ },
1075
+ "128134": {
1076
+ "content": "<|reserved_special_token_126|>",
1077
+ "lstrip": false,
1078
+ "normalized": false,
1079
+ "rstrip": false,
1080
+ "single_word": false,
1081
+ "special": true
1082
+ },
1083
+ "128135": {
1084
+ "content": "<|reserved_special_token_127|>",
1085
+ "lstrip": false,
1086
+ "normalized": false,
1087
+ "rstrip": false,
1088
+ "single_word": false,
1089
+ "special": true
1090
+ },
1091
+ "128136": {
1092
+ "content": "<|reserved_special_token_128|>",
1093
+ "lstrip": false,
1094
+ "normalized": false,
1095
+ "rstrip": false,
1096
+ "single_word": false,
1097
+ "special": true
1098
+ },
1099
+ "128137": {
1100
+ "content": "<|reserved_special_token_129|>",
1101
+ "lstrip": false,
1102
+ "normalized": false,
1103
+ "rstrip": false,
1104
+ "single_word": false,
1105
+ "special": true
1106
+ },
1107
+ "128138": {
1108
+ "content": "<|reserved_special_token_130|>",
1109
+ "lstrip": false,
1110
+ "normalized": false,
1111
+ "rstrip": false,
1112
+ "single_word": false,
1113
+ "special": true
1114
+ },
1115
+ "128139": {
1116
+ "content": "<|reserved_special_token_131|>",
1117
+ "lstrip": false,
1118
+ "normalized": false,
1119
+ "rstrip": false,
1120
+ "single_word": false,
1121
+ "special": true
1122
+ },
1123
+ "128140": {
1124
+ "content": "<|reserved_special_token_132|>",
1125
+ "lstrip": false,
1126
+ "normalized": false,
1127
+ "rstrip": false,
1128
+ "single_word": false,
1129
+ "special": true
1130
+ },
1131
+ "128141": {
1132
+ "content": "<|reserved_special_token_133|>",
1133
+ "lstrip": false,
1134
+ "normalized": false,
1135
+ "rstrip": false,
1136
+ "single_word": false,
1137
+ "special": true
1138
+ },
1139
+ "128142": {
1140
+ "content": "<|reserved_special_token_134|>",
1141
+ "lstrip": false,
1142
+ "normalized": false,
1143
+ "rstrip": false,
1144
+ "single_word": false,
1145
+ "special": true
1146
+ },
1147
+ "128143": {
1148
+ "content": "<|reserved_special_token_135|>",
1149
+ "lstrip": false,
1150
+ "normalized": false,
1151
+ "rstrip": false,
1152
+ "single_word": false,
1153
+ "special": true
1154
+ },
1155
+ "128144": {
1156
+ "content": "<|reserved_special_token_136|>",
1157
+ "lstrip": false,
1158
+ "normalized": false,
1159
+ "rstrip": false,
1160
+ "single_word": false,
1161
+ "special": true
1162
+ },
1163
+ "128145": {
1164
+ "content": "<|reserved_special_token_137|>",
1165
+ "lstrip": false,
1166
+ "normalized": false,
1167
+ "rstrip": false,
1168
+ "single_word": false,
1169
+ "special": true
1170
+ },
1171
+ "128146": {
1172
+ "content": "<|reserved_special_token_138|>",
1173
+ "lstrip": false,
1174
+ "normalized": false,
1175
+ "rstrip": false,
1176
+ "single_word": false,
1177
+ "special": true
1178
+ },
1179
+ "128147": {
1180
+ "content": "<|reserved_special_token_139|>",
1181
+ "lstrip": false,
1182
+ "normalized": false,
1183
+ "rstrip": false,
1184
+ "single_word": false,
1185
+ "special": true
1186
+ },
1187
+ "128148": {
1188
+ "content": "<|reserved_special_token_140|>",
1189
+ "lstrip": false,
1190
+ "normalized": false,
1191
+ "rstrip": false,
1192
+ "single_word": false,
1193
+ "special": true
1194
+ },
1195
+ "128149": {
1196
+ "content": "<|reserved_special_token_141|>",
1197
+ "lstrip": false,
1198
+ "normalized": false,
1199
+ "rstrip": false,
1200
+ "single_word": false,
1201
+ "special": true
1202
+ },
1203
+ "128150": {
1204
+ "content": "<|reserved_special_token_142|>",
1205
+ "lstrip": false,
1206
+ "normalized": false,
1207
+ "rstrip": false,
1208
+ "single_word": false,
1209
+ "special": true
1210
+ },
1211
+ "128151": {
1212
+ "content": "<|reserved_special_token_143|>",
1213
+ "lstrip": false,
1214
+ "normalized": false,
1215
+ "rstrip": false,
1216
+ "single_word": false,
1217
+ "special": true
1218
+ },
1219
+ "128152": {
1220
+ "content": "<|reserved_special_token_144|>",
1221
+ "lstrip": false,
1222
+ "normalized": false,
1223
+ "rstrip": false,
1224
+ "single_word": false,
1225
+ "special": true
1226
+ },
1227
+ "128153": {
1228
+ "content": "<|reserved_special_token_145|>",
1229
+ "lstrip": false,
1230
+ "normalized": false,
1231
+ "rstrip": false,
1232
+ "single_word": false,
1233
+ "special": true
1234
+ },
1235
+ "128154": {
1236
+ "content": "<|reserved_special_token_146|>",
1237
+ "lstrip": false,
1238
+ "normalized": false,
1239
+ "rstrip": false,
1240
+ "single_word": false,
1241
+ "special": true
1242
+ },
1243
+ "128155": {
1244
+ "content": "<|reserved_special_token_147|>",
1245
+ "lstrip": false,
1246
+ "normalized": false,
1247
+ "rstrip": false,
1248
+ "single_word": false,
1249
+ "special": true
1250
+ },
1251
+ "128156": {
1252
+ "content": "<|reserved_special_token_148|>",
1253
+ "lstrip": false,
1254
+ "normalized": false,
1255
+ "rstrip": false,
1256
+ "single_word": false,
1257
+ "special": true
1258
+ },
1259
+ "128157": {
1260
+ "content": "<|reserved_special_token_149|>",
1261
+ "lstrip": false,
1262
+ "normalized": false,
1263
+ "rstrip": false,
1264
+ "single_word": false,
1265
+ "special": true
1266
+ },
1267
+ "128158": {
1268
+ "content": "<|reserved_special_token_150|>",
1269
+ "lstrip": false,
1270
+ "normalized": false,
1271
+ "rstrip": false,
1272
+ "single_word": false,
1273
+ "special": true
1274
+ },
1275
+ "128159": {
1276
+ "content": "<|reserved_special_token_151|>",
1277
+ "lstrip": false,
1278
+ "normalized": false,
1279
+ "rstrip": false,
1280
+ "single_word": false,
1281
+ "special": true
1282
+ },
1283
+ "128160": {
1284
+ "content": "<|reserved_special_token_152|>",
1285
+ "lstrip": false,
1286
+ "normalized": false,
1287
+ "rstrip": false,
1288
+ "single_word": false,
1289
+ "special": true
1290
+ },
1291
+ "128161": {
1292
+ "content": "<|reserved_special_token_153|>",
1293
+ "lstrip": false,
1294
+ "normalized": false,
1295
+ "rstrip": false,
1296
+ "single_word": false,
1297
+ "special": true
1298
+ },
1299
+ "128162": {
1300
+ "content": "<|reserved_special_token_154|>",
1301
+ "lstrip": false,
1302
+ "normalized": false,
1303
+ "rstrip": false,
1304
+ "single_word": false,
1305
+ "special": true
1306
+ },
1307
+ "128163": {
1308
+ "content": "<|reserved_special_token_155|>",
1309
+ "lstrip": false,
1310
+ "normalized": false,
1311
+ "rstrip": false,
1312
+ "single_word": false,
1313
+ "special": true
1314
+ },
1315
+ "128164": {
1316
+ "content": "<|reserved_special_token_156|>",
1317
+ "lstrip": false,
1318
+ "normalized": false,
1319
+ "rstrip": false,
1320
+ "single_word": false,
1321
+ "special": true
1322
+ },
1323
+ "128165": {
1324
+ "content": "<|reserved_special_token_157|>",
1325
+ "lstrip": false,
1326
+ "normalized": false,
1327
+ "rstrip": false,
1328
+ "single_word": false,
1329
+ "special": true
1330
+ },
1331
+ "128166": {
1332
+ "content": "<|reserved_special_token_158|>",
1333
+ "lstrip": false,
1334
+ "normalized": false,
1335
+ "rstrip": false,
1336
+ "single_word": false,
1337
+ "special": true
1338
+ },
1339
+ "128167": {
1340
+ "content": "<|reserved_special_token_159|>",
1341
+ "lstrip": false,
1342
+ "normalized": false,
1343
+ "rstrip": false,
1344
+ "single_word": false,
1345
+ "special": true
1346
+ },
1347
+ "128168": {
1348
+ "content": "<|reserved_special_token_160|>",
1349
+ "lstrip": false,
1350
+ "normalized": false,
1351
+ "rstrip": false,
1352
+ "single_word": false,
1353
+ "special": true
1354
+ },
1355
+ "128169": {
1356
+ "content": "<|reserved_special_token_161|>",
1357
+ "lstrip": false,
1358
+ "normalized": false,
1359
+ "rstrip": false,
1360
+ "single_word": false,
1361
+ "special": true
1362
+ },
1363
+ "128170": {
1364
+ "content": "<|reserved_special_token_162|>",
1365
+ "lstrip": false,
1366
+ "normalized": false,
1367
+ "rstrip": false,
1368
+ "single_word": false,
1369
+ "special": true
1370
+ },
1371
+ "128171": {
1372
+ "content": "<|reserved_special_token_163|>",
1373
+ "lstrip": false,
1374
+ "normalized": false,
1375
+ "rstrip": false,
1376
+ "single_word": false,
1377
+ "special": true
1378
+ },
1379
+ "128172": {
1380
+ "content": "<|reserved_special_token_164|>",
1381
+ "lstrip": false,
1382
+ "normalized": false,
1383
+ "rstrip": false,
1384
+ "single_word": false,
1385
+ "special": true
1386
+ },
1387
+ "128173": {
1388
+ "content": "<|reserved_special_token_165|>",
1389
+ "lstrip": false,
1390
+ "normalized": false,
1391
+ "rstrip": false,
1392
+ "single_word": false,
1393
+ "special": true
1394
+ },
1395
+ "128174": {
1396
+ "content": "<|reserved_special_token_166|>",
1397
+ "lstrip": false,
1398
+ "normalized": false,
1399
+ "rstrip": false,
1400
+ "single_word": false,
1401
+ "special": true
1402
+ },
1403
+ "128175": {
1404
+ "content": "<|reserved_special_token_167|>",
1405
+ "lstrip": false,
1406
+ "normalized": false,
1407
+ "rstrip": false,
1408
+ "single_word": false,
1409
+ "special": true
1410
+ },
1411
+ "128176": {
1412
+ "content": "<|reserved_special_token_168|>",
1413
+ "lstrip": false,
1414
+ "normalized": false,
1415
+ "rstrip": false,
1416
+ "single_word": false,
1417
+ "special": true
1418
+ },
1419
+ "128177": {
1420
+ "content": "<|reserved_special_token_169|>",
1421
+ "lstrip": false,
1422
+ "normalized": false,
1423
+ "rstrip": false,
1424
+ "single_word": false,
1425
+ "special": true
1426
+ },
1427
+ "128178": {
1428
+ "content": "<|reserved_special_token_170|>",
1429
+ "lstrip": false,
1430
+ "normalized": false,
1431
+ "rstrip": false,
1432
+ "single_word": false,
1433
+ "special": true
1434
+ },
1435
+ "128179": {
1436
+ "content": "<|reserved_special_token_171|>",
1437
+ "lstrip": false,
1438
+ "normalized": false,
1439
+ "rstrip": false,
1440
+ "single_word": false,
1441
+ "special": true
1442
+ },
1443
+ "128180": {
1444
+ "content": "<|reserved_special_token_172|>",
1445
+ "lstrip": false,
1446
+ "normalized": false,
1447
+ "rstrip": false,
1448
+ "single_word": false,
1449
+ "special": true
1450
+ },
1451
+ "128181": {
1452
+ "content": "<|reserved_special_token_173|>",
1453
+ "lstrip": false,
1454
+ "normalized": false,
1455
+ "rstrip": false,
1456
+ "single_word": false,
1457
+ "special": true
1458
+ },
1459
+ "128182": {
1460
+ "content": "<|reserved_special_token_174|>",
1461
+ "lstrip": false,
1462
+ "normalized": false,
1463
+ "rstrip": false,
1464
+ "single_word": false,
1465
+ "special": true
1466
+ },
1467
+ "128183": {
1468
+ "content": "<|reserved_special_token_175|>",
1469
+ "lstrip": false,
1470
+ "normalized": false,
1471
+ "rstrip": false,
1472
+ "single_word": false,
1473
+ "special": true
1474
+ },
1475
+ "128184": {
1476
+ "content": "<|reserved_special_token_176|>",
1477
+ "lstrip": false,
1478
+ "normalized": false,
1479
+ "rstrip": false,
1480
+ "single_word": false,
1481
+ "special": true
1482
+ },
1483
+ "128185": {
1484
+ "content": "<|reserved_special_token_177|>",
1485
+ "lstrip": false,
1486
+ "normalized": false,
1487
+ "rstrip": false,
1488
+ "single_word": false,
1489
+ "special": true
1490
+ },
1491
+ "128186": {
1492
+ "content": "<|reserved_special_token_178|>",
1493
+ "lstrip": false,
1494
+ "normalized": false,
1495
+ "rstrip": false,
1496
+ "single_word": false,
1497
+ "special": true
1498
+ },
1499
+ "128187": {
1500
+ "content": "<|reserved_special_token_179|>",
1501
+ "lstrip": false,
1502
+ "normalized": false,
1503
+ "rstrip": false,
1504
+ "single_word": false,
1505
+ "special": true
1506
+ },
1507
+ "128188": {
1508
+ "content": "<|reserved_special_token_180|>",
1509
+ "lstrip": false,
1510
+ "normalized": false,
1511
+ "rstrip": false,
1512
+ "single_word": false,
1513
+ "special": true
1514
+ },
1515
+ "128189": {
1516
+ "content": "<|reserved_special_token_181|>",
1517
+ "lstrip": false,
1518
+ "normalized": false,
1519
+ "rstrip": false,
1520
+ "single_word": false,
1521
+ "special": true
1522
+ },
1523
+ "128190": {
1524
+ "content": "<|reserved_special_token_182|>",
1525
+ "lstrip": false,
1526
+ "normalized": false,
1527
+ "rstrip": false,
1528
+ "single_word": false,
1529
+ "special": true
1530
+ },
1531
+ "128191": {
1532
+ "content": "<|reserved_special_token_183|>",
1533
+ "lstrip": false,
1534
+ "normalized": false,
1535
+ "rstrip": false,
1536
+ "single_word": false,
1537
+ "special": true
1538
+ },
1539
+ "128192": {
1540
+ "content": "<|reserved_special_token_184|>",
1541
+ "lstrip": false,
1542
+ "normalized": false,
1543
+ "rstrip": false,
1544
+ "single_word": false,
1545
+ "special": true
1546
+ },
1547
+ "128193": {
1548
+ "content": "<|reserved_special_token_185|>",
1549
+ "lstrip": false,
1550
+ "normalized": false,
1551
+ "rstrip": false,
1552
+ "single_word": false,
1553
+ "special": true
1554
+ },
1555
+ "128194": {
1556
+ "content": "<|reserved_special_token_186|>",
1557
+ "lstrip": false,
1558
+ "normalized": false,
1559
+ "rstrip": false,
1560
+ "single_word": false,
1561
+ "special": true
1562
+ },
1563
+ "128195": {
1564
+ "content": "<|reserved_special_token_187|>",
1565
+ "lstrip": false,
1566
+ "normalized": false,
1567
+ "rstrip": false,
1568
+ "single_word": false,
1569
+ "special": true
1570
+ },
1571
+ "128196": {
1572
+ "content": "<|reserved_special_token_188|>",
1573
+ "lstrip": false,
1574
+ "normalized": false,
1575
+ "rstrip": false,
1576
+ "single_word": false,
1577
+ "special": true
1578
+ },
1579
+ "128197": {
1580
+ "content": "<|reserved_special_token_189|>",
1581
+ "lstrip": false,
1582
+ "normalized": false,
1583
+ "rstrip": false,
1584
+ "single_word": false,
1585
+ "special": true
1586
+ },
1587
+ "128198": {
1588
+ "content": "<|reserved_special_token_190|>",
1589
+ "lstrip": false,
1590
+ "normalized": false,
1591
+ "rstrip": false,
1592
+ "single_word": false,
1593
+ "special": true
1594
+ },
1595
+ "128199": {
1596
+ "content": "<|reserved_special_token_191|>",
1597
+ "lstrip": false,
1598
+ "normalized": false,
1599
+ "rstrip": false,
1600
+ "single_word": false,
1601
+ "special": true
1602
+ },
1603
+ "128200": {
1604
+ "content": "<|reserved_special_token_192|>",
1605
+ "lstrip": false,
1606
+ "normalized": false,
1607
+ "rstrip": false,
1608
+ "single_word": false,
1609
+ "special": true
1610
+ },
1611
+ "128201": {
1612
+ "content": "<|reserved_special_token_193|>",
1613
+ "lstrip": false,
1614
+ "normalized": false,
1615
+ "rstrip": false,
1616
+ "single_word": false,
1617
+ "special": true
1618
+ },
1619
+ "128202": {
1620
+ "content": "<|reserved_special_token_194|>",
1621
+ "lstrip": false,
1622
+ "normalized": false,
1623
+ "rstrip": false,
1624
+ "single_word": false,
1625
+ "special": true
1626
+ },
1627
+ "128203": {
1628
+ "content": "<|reserved_special_token_195|>",
1629
+ "lstrip": false,
1630
+ "normalized": false,
1631
+ "rstrip": false,
1632
+ "single_word": false,
1633
+ "special": true
1634
+ },
1635
+ "128204": {
1636
+ "content": "<|reserved_special_token_196|>",
1637
+ "lstrip": false,
1638
+ "normalized": false,
1639
+ "rstrip": false,
1640
+ "single_word": false,
1641
+ "special": true
1642
+ },
1643
+ "128205": {
1644
+ "content": "<|reserved_special_token_197|>",
1645
+ "lstrip": false,
1646
+ "normalized": false,
1647
+ "rstrip": false,
1648
+ "single_word": false,
1649
+ "special": true
1650
+ },
1651
+ "128206": {
1652
+ "content": "<|reserved_special_token_198|>",
1653
+ "lstrip": false,
1654
+ "normalized": false,
1655
+ "rstrip": false,
1656
+ "single_word": false,
1657
+ "special": true
1658
+ },
1659
+ "128207": {
1660
+ "content": "<|reserved_special_token_199|>",
1661
+ "lstrip": false,
1662
+ "normalized": false,
1663
+ "rstrip": false,
1664
+ "single_word": false,
1665
+ "special": true
1666
+ },
1667
+ "128208": {
1668
+ "content": "<|reserved_special_token_200|>",
1669
+ "lstrip": false,
1670
+ "normalized": false,
1671
+ "rstrip": false,
1672
+ "single_word": false,
1673
+ "special": true
1674
+ },
1675
+ "128209": {
1676
+ "content": "<|reserved_special_token_201|>",
1677
+ "lstrip": false,
1678
+ "normalized": false,
1679
+ "rstrip": false,
1680
+ "single_word": false,
1681
+ "special": true
1682
+ },
1683
+ "128210": {
1684
+ "content": "<|reserved_special_token_202|>",
1685
+ "lstrip": false,
1686
+ "normalized": false,
1687
+ "rstrip": false,
1688
+ "single_word": false,
1689
+ "special": true
1690
+ },
1691
+ "128211": {
1692
+ "content": "<|reserved_special_token_203|>",
1693
+ "lstrip": false,
1694
+ "normalized": false,
1695
+ "rstrip": false,
1696
+ "single_word": false,
1697
+ "special": true
1698
+ },
1699
+ "128212": {
1700
+ "content": "<|reserved_special_token_204|>",
1701
+ "lstrip": false,
1702
+ "normalized": false,
1703
+ "rstrip": false,
1704
+ "single_word": false,
1705
+ "special": true
1706
+ },
1707
+ "128213": {
1708
+ "content": "<|reserved_special_token_205|>",
1709
+ "lstrip": false,
1710
+ "normalized": false,
1711
+ "rstrip": false,
1712
+ "single_word": false,
1713
+ "special": true
1714
+ },
1715
+ "128214": {
1716
+ "content": "<|reserved_special_token_206|>",
1717
+ "lstrip": false,
1718
+ "normalized": false,
1719
+ "rstrip": false,
1720
+ "single_word": false,
1721
+ "special": true
1722
+ },
1723
+ "128215": {
1724
+ "content": "<|reserved_special_token_207|>",
1725
+ "lstrip": false,
1726
+ "normalized": false,
1727
+ "rstrip": false,
1728
+ "single_word": false,
1729
+ "special": true
1730
+ },
1731
+ "128216": {
1732
+ "content": "<|reserved_special_token_208|>",
1733
+ "lstrip": false,
1734
+ "normalized": false,
1735
+ "rstrip": false,
1736
+ "single_word": false,
1737
+ "special": true
1738
+ },
1739
+ "128217": {
1740
+ "content": "<|reserved_special_token_209|>",
1741
+ "lstrip": false,
1742
+ "normalized": false,
1743
+ "rstrip": false,
1744
+ "single_word": false,
1745
+ "special": true
1746
+ },
1747
+ "128218": {
1748
+ "content": "<|reserved_special_token_210|>",
1749
+ "lstrip": false,
1750
+ "normalized": false,
1751
+ "rstrip": false,
1752
+ "single_word": false,
1753
+ "special": true
1754
+ },
1755
+ "128219": {
1756
+ "content": "<|reserved_special_token_211|>",
1757
+ "lstrip": false,
1758
+ "normalized": false,
1759
+ "rstrip": false,
1760
+ "single_word": false,
1761
+ "special": true
1762
+ },
1763
+ "128220": {
1764
+ "content": "<|reserved_special_token_212|>",
1765
+ "lstrip": false,
1766
+ "normalized": false,
1767
+ "rstrip": false,
1768
+ "single_word": false,
1769
+ "special": true
1770
+ },
1771
+ "128221": {
1772
+ "content": "<|reserved_special_token_213|>",
1773
+ "lstrip": false,
1774
+ "normalized": false,
1775
+ "rstrip": false,
1776
+ "single_word": false,
1777
+ "special": true
1778
+ },
1779
+ "128222": {
1780
+ "content": "<|reserved_special_token_214|>",
1781
+ "lstrip": false,
1782
+ "normalized": false,
1783
+ "rstrip": false,
1784
+ "single_word": false,
1785
+ "special": true
1786
+ },
1787
+ "128223": {
1788
+ "content": "<|reserved_special_token_215|>",
1789
+ "lstrip": false,
1790
+ "normalized": false,
1791
+ "rstrip": false,
1792
+ "single_word": false,
1793
+ "special": true
1794
+ },
1795
+ "128224": {
1796
+ "content": "<|reserved_special_token_216|>",
1797
+ "lstrip": false,
1798
+ "normalized": false,
1799
+ "rstrip": false,
1800
+ "single_word": false,
1801
+ "special": true
1802
+ },
1803
+ "128225": {
1804
+ "content": "<|reserved_special_token_217|>",
1805
+ "lstrip": false,
1806
+ "normalized": false,
1807
+ "rstrip": false,
1808
+ "single_word": false,
1809
+ "special": true
1810
+ },
1811
+ "128226": {
1812
+ "content": "<|reserved_special_token_218|>",
1813
+ "lstrip": false,
1814
+ "normalized": false,
1815
+ "rstrip": false,
1816
+ "single_word": false,
1817
+ "special": true
1818
+ },
1819
+ "128227": {
1820
+ "content": "<|reserved_special_token_219|>",
1821
+ "lstrip": false,
1822
+ "normalized": false,
1823
+ "rstrip": false,
1824
+ "single_word": false,
1825
+ "special": true
1826
+ },
1827
+ "128228": {
1828
+ "content": "<|reserved_special_token_220|>",
1829
+ "lstrip": false,
1830
+ "normalized": false,
1831
+ "rstrip": false,
1832
+ "single_word": false,
1833
+ "special": true
1834
+ },
1835
+ "128229": {
1836
+ "content": "<|reserved_special_token_221|>",
1837
+ "lstrip": false,
1838
+ "normalized": false,
1839
+ "rstrip": false,
1840
+ "single_word": false,
1841
+ "special": true
1842
+ },
1843
+ "128230": {
1844
+ "content": "<|reserved_special_token_222|>",
1845
+ "lstrip": false,
1846
+ "normalized": false,
1847
+ "rstrip": false,
1848
+ "single_word": false,
1849
+ "special": true
1850
+ },
1851
+ "128231": {
1852
+ "content": "<|reserved_special_token_223|>",
1853
+ "lstrip": false,
1854
+ "normalized": false,
1855
+ "rstrip": false,
1856
+ "single_word": false,
1857
+ "special": true
1858
+ },
1859
+ "128232": {
1860
+ "content": "<|reserved_special_token_224|>",
1861
+ "lstrip": false,
1862
+ "normalized": false,
1863
+ "rstrip": false,
1864
+ "single_word": false,
1865
+ "special": true
1866
+ },
1867
+ "128233": {
1868
+ "content": "<|reserved_special_token_225|>",
1869
+ "lstrip": false,
1870
+ "normalized": false,
1871
+ "rstrip": false,
1872
+ "single_word": false,
1873
+ "special": true
1874
+ },
1875
+ "128234": {
1876
+ "content": "<|reserved_special_token_226|>",
1877
+ "lstrip": false,
1878
+ "normalized": false,
1879
+ "rstrip": false,
1880
+ "single_word": false,
1881
+ "special": true
1882
+ },
1883
+ "128235": {
1884
+ "content": "<|reserved_special_token_227|>",
1885
+ "lstrip": false,
1886
+ "normalized": false,
1887
+ "rstrip": false,
1888
+ "single_word": false,
1889
+ "special": true
1890
+ },
1891
+ "128236": {
1892
+ "content": "<|reserved_special_token_228|>",
1893
+ "lstrip": false,
1894
+ "normalized": false,
1895
+ "rstrip": false,
1896
+ "single_word": false,
1897
+ "special": true
1898
+ },
1899
+ "128237": {
1900
+ "content": "<|reserved_special_token_229|>",
1901
+ "lstrip": false,
1902
+ "normalized": false,
1903
+ "rstrip": false,
1904
+ "single_word": false,
1905
+ "special": true
1906
+ },
1907
+ "128238": {
1908
+ "content": "<|reserved_special_token_230|>",
1909
+ "lstrip": false,
1910
+ "normalized": false,
1911
+ "rstrip": false,
1912
+ "single_word": false,
1913
+ "special": true
1914
+ },
1915
+ "128239": {
1916
+ "content": "<|reserved_special_token_231|>",
1917
+ "lstrip": false,
1918
+ "normalized": false,
1919
+ "rstrip": false,
1920
+ "single_word": false,
1921
+ "special": true
1922
+ },
1923
+ "128240": {
1924
+ "content": "<|reserved_special_token_232|>",
1925
+ "lstrip": false,
1926
+ "normalized": false,
1927
+ "rstrip": false,
1928
+ "single_word": false,
1929
+ "special": true
1930
+ },
1931
+ "128241": {
1932
+ "content": "<|reserved_special_token_233|>",
1933
+ "lstrip": false,
1934
+ "normalized": false,
1935
+ "rstrip": false,
1936
+ "single_word": false,
1937
+ "special": true
1938
+ },
1939
+ "128242": {
1940
+ "content": "<|reserved_special_token_234|>",
1941
+ "lstrip": false,
1942
+ "normalized": false,
1943
+ "rstrip": false,
1944
+ "single_word": false,
1945
+ "special": true
1946
+ },
1947
+ "128243": {
1948
+ "content": "<|reserved_special_token_235|>",
1949
+ "lstrip": false,
1950
+ "normalized": false,
1951
+ "rstrip": false,
1952
+ "single_word": false,
1953
+ "special": true
1954
+ },
1955
+ "128244": {
1956
+ "content": "<|reserved_special_token_236|>",
1957
+ "lstrip": false,
1958
+ "normalized": false,
1959
+ "rstrip": false,
1960
+ "single_word": false,
1961
+ "special": true
1962
+ },
1963
+ "128245": {
1964
+ "content": "<|reserved_special_token_237|>",
1965
+ "lstrip": false,
1966
+ "normalized": false,
1967
+ "rstrip": false,
1968
+ "single_word": false,
1969
+ "special": true
1970
+ },
1971
+ "128246": {
1972
+ "content": "<|reserved_special_token_238|>",
1973
+ "lstrip": false,
1974
+ "normalized": false,
1975
+ "rstrip": false,
1976
+ "single_word": false,
1977
+ "special": true
1978
+ },
1979
+ "128247": {
1980
+ "content": "<|reserved_special_token_239|>",
1981
+ "lstrip": false,
1982
+ "normalized": false,
1983
+ "rstrip": false,
1984
+ "single_word": false,
1985
+ "special": true
1986
+ },
1987
+ "128248": {
1988
+ "content": "<|reserved_special_token_240|>",
1989
+ "lstrip": false,
1990
+ "normalized": false,
1991
+ "rstrip": false,
1992
+ "single_word": false,
1993
+ "special": true
1994
+ },
1995
+ "128249": {
1996
+ "content": "<|reserved_special_token_241|>",
1997
+ "lstrip": false,
1998
+ "normalized": false,
1999
+ "rstrip": false,
2000
+ "single_word": false,
2001
+ "special": true
2002
+ },
2003
+ "128250": {
2004
+ "content": "<|reserved_special_token_242|>",
2005
+ "lstrip": false,
2006
+ "normalized": false,
2007
+ "rstrip": false,
2008
+ "single_word": false,
2009
+ "special": true
2010
+ },
2011
+ "128251": {
2012
+ "content": "<|reserved_special_token_243|>",
2013
+ "lstrip": false,
2014
+ "normalized": false,
2015
+ "rstrip": false,
2016
+ "single_word": false,
2017
+ "special": true
2018
+ },
2019
+ "128252": {
2020
+ "content": "<|reserved_special_token_244|>",
2021
+ "lstrip": false,
2022
+ "normalized": false,
2023
+ "rstrip": false,
2024
+ "single_word": false,
2025
+ "special": true
2026
+ },
2027
+ "128253": {
2028
+ "content": "<|reserved_special_token_245|>",
2029
+ "lstrip": false,
2030
+ "normalized": false,
2031
+ "rstrip": false,
2032
+ "single_word": false,
2033
+ "special": true
2034
+ },
2035
+ "128254": {
2036
+ "content": "<|reserved_special_token_246|>",
2037
+ "lstrip": false,
2038
+ "normalized": false,
2039
+ "rstrip": false,
2040
+ "single_word": false,
2041
+ "special": true
2042
+ },
2043
+ "128255": {
2044
+ "content": "<|reserved_special_token_247|>",
2045
+ "lstrip": false,
2046
+ "normalized": false,
2047
+ "rstrip": false,
2048
+ "single_word": false,
2049
+ "special": true
2050
+ },
2051
+ "128256": {
2052
+ "content": "[PAD]",
2053
+ "lstrip": false,
2054
+ "normalized": false,
2055
+ "rstrip": false,
2056
+ "single_word": false,
2057
+ "special": true
2058
+ },
2059
+ "128257": {
2060
+ "content": "🅁",
2061
+ "lstrip": true,
2062
+ "normalized": true,
2063
+ "rstrip": false,
2064
+ "single_word": false,
2065
+ "special": false
2066
+ }
2067
+ },
2068
+ "bos_token": "<|begin_of_text|>",
2069
+ "clean_up_tokenization_spaces": true,
2070
+ "eos_token": "<|end_of_text|>",
2071
+ "model_input_names": [
2072
+ "input_ids",
2073
+ "attention_mask"
2074
+ ],
2075
+ "model_max_length": 131072,
2076
+ "pad_token": "[PAD]",
2077
+ "tokenizer_class": "PreTrainedTokenizerFast"
2078
+ }
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1200/config.json ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "meta-llama/Llama-3.2-3B",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 128000,
9
+ "eos_token_id": 128001,
10
+ "head_dim": 128,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 3072,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 8192,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "num_attention_heads": 24,
19
+ "num_hidden_layers": 28,
20
+ "num_key_value_heads": 8,
21
+ "pretraining_tp": 1,
22
+ "rms_norm_eps": 1e-05,
23
+ "rope_scaling": {
24
+ "factor": 32.0,
25
+ "high_freq_factor": 4.0,
26
+ "low_freq_factor": 1.0,
27
+ "original_max_position_embeddings": 8192,
28
+ "rope_type": "llama3"
29
+ },
30
+ "rope_theta": 500000.0,
31
+ "tie_word_embeddings": true,
32
+ "torch_dtype": "float16",
33
+ "transformers_version": "4.45.1",
34
+ "use_cache": true,
35
+ "vocab_size": 128256
36
+ }
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1200/rng_state_0.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:70c0a65a1f48609b709cff4968a5ca16f280b65798ab3e96b60a43db042976c5
3
+ size 20599
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1200/rng_state_1.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:01a7577bee4c118f55763b566db086aa10d2ec5a2d46bc623db8c8381f4ba7c5
3
+ size 20599
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1200/rng_state_2.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db09e5b1b76154e3b7043e11a650fd3051b5b3ab595a43c485df65a086c6f540
3
+ size 20599
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1200/rng_state_3.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:671e665f8b90df160f325e725a02c3635553d4058486bf76788722e80c8864e6
3
+ size 20599
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1200/rng_state_4.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7169d16d22347ae1ff1b51cfe165ad5333e47dc6b178e93f63331e743d23305a
3
+ size 20599
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1200/rng_state_5.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d866764dfa788f78507a77e36fe7f085156ee01dd1d0ef2cc476f85d7a3134dd
3
+ size 20599
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1200/rng_state_6.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eeb1301b45d7398af92537a9959369547a41f37de86e06f898e44fa1c6e0ffcc
3
+ size 20599
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1200/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c8b8dd10e3f7f429a30da6714b6bc34fffe93612cca0d55e0213efbb5aac3b13
3
+ size 627
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1200/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d396f0642c83730ebff54eaafc7dfb38092d5d41fd28ce6ebbd56ec539612e4b
3
+ size 6011
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1350/config.json ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "meta-llama/Llama-3.2-3B",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 128000,
9
+ "eos_token_id": 128001,
10
+ "head_dim": 128,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 3072,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 8192,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "num_attention_heads": 24,
19
+ "num_hidden_layers": 28,
20
+ "num_key_value_heads": 8,
21
+ "pretraining_tp": 1,
22
+ "rms_norm_eps": 1e-05,
23
+ "rope_scaling": {
24
+ "factor": 32.0,
25
+ "high_freq_factor": 4.0,
26
+ "low_freq_factor": 1.0,
27
+ "original_max_position_embeddings": 8192,
28
+ "rope_type": "llama3"
29
+ },
30
+ "rope_theta": 500000.0,
31
+ "tie_word_embeddings": true,
32
+ "torch_dtype": "float16",
33
+ "transformers_version": "4.45.1",
34
+ "use_cache": true,
35
+ "vocab_size": 128256
36
+ }
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1350/generation_config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 128000,
4
+ "do_sample": true,
5
+ "eos_token_id": 128001,
6
+ "temperature": 0.6,
7
+ "top_p": 0.9,
8
+ "transformers_version": "4.45.1"
9
+ }
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1350/latest ADDED
@@ -0,0 +1 @@
 
 
1
+ global_step1350
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1350/model.safetensors.index.json ADDED
@@ -0,0 +1,262 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 7213504512
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00002-of-00002.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
13
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
14
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
15
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
16
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
17
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
18
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
19
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
20
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
21
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
22
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
23
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
24
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
25
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
26
+ "model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
27
+ "model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
28
+ "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
29
+ "model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
30
+ "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
31
+ "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
32
+ "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
33
+ "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
34
+ "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
35
+ "model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
36
+ "model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
37
+ "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
38
+ "model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
39
+ "model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
40
+ "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
41
+ "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
42
+ "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
43
+ "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
44
+ "model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
45
+ "model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
46
+ "model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
47
+ "model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
48
+ "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
49
+ "model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
50
+ "model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
51
+ "model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
52
+ "model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
53
+ "model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
54
+ "model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
55
+ "model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
56
+ "model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
57
+ "model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
58
+ "model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
59
+ "model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
60
+ "model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
61
+ "model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
62
+ "model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
63
+ "model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
64
+ "model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
65
+ "model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
66
+ "model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
67
+ "model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
68
+ "model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
69
+ "model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
70
+ "model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
71
+ "model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
72
+ "model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
73
+ "model.layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
74
+ "model.layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
75
+ "model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
76
+ "model.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
77
+ "model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
78
+ "model.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
79
+ "model.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
80
+ "model.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors",
81
+ "model.layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
82
+ "model.layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
83
+ "model.layers.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
84
+ "model.layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
85
+ "model.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
86
+ "model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
87
+ "model.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
88
+ "model.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
89
+ "model.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors",
90
+ "model.layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
91
+ "model.layers.17.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
92
+ "model.layers.17.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
93
+ "model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
94
+ "model.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
95
+ "model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
96
+ "model.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
97
+ "model.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
98
+ "model.layers.18.input_layernorm.weight": "model-00001-of-00002.safetensors",
99
+ "model.layers.18.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
100
+ "model.layers.18.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
101
+ "model.layers.18.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
102
+ "model.layers.18.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
103
+ "model.layers.18.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
104
+ "model.layers.18.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
105
+ "model.layers.18.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
106
+ "model.layers.18.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
107
+ "model.layers.19.input_layernorm.weight": "model-00001-of-00002.safetensors",
108
+ "model.layers.19.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
109
+ "model.layers.19.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
110
+ "model.layers.19.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
111
+ "model.layers.19.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
112
+ "model.layers.19.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
113
+ "model.layers.19.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
114
+ "model.layers.19.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
115
+ "model.layers.19.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
116
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
117
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
118
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
119
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
120
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
121
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
122
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
123
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
124
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
125
+ "model.layers.20.input_layernorm.weight": "model-00002-of-00002.safetensors",
126
+ "model.layers.20.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
127
+ "model.layers.20.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
128
+ "model.layers.20.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
129
+ "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
130
+ "model.layers.20.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
131
+ "model.layers.20.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
132
+ "model.layers.20.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
133
+ "model.layers.20.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
134
+ "model.layers.21.input_layernorm.weight": "model-00002-of-00002.safetensors",
135
+ "model.layers.21.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
136
+ "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
137
+ "model.layers.21.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
138
+ "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
139
+ "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
140
+ "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
141
+ "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
142
+ "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
143
+ "model.layers.22.input_layernorm.weight": "model-00002-of-00002.safetensors",
144
+ "model.layers.22.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
145
+ "model.layers.22.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
146
+ "model.layers.22.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
147
+ "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
148
+ "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
149
+ "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
150
+ "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
151
+ "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
152
+ "model.layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors",
153
+ "model.layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
154
+ "model.layers.23.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
155
+ "model.layers.23.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
156
+ "model.layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
157
+ "model.layers.23.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
158
+ "model.layers.23.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
159
+ "model.layers.23.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
160
+ "model.layers.23.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
161
+ "model.layers.24.input_layernorm.weight": "model-00002-of-00002.safetensors",
162
+ "model.layers.24.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
163
+ "model.layers.24.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
164
+ "model.layers.24.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
165
+ "model.layers.24.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
166
+ "model.layers.24.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
167
+ "model.layers.24.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
168
+ "model.layers.24.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
169
+ "model.layers.24.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
170
+ "model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors",
171
+ "model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
172
+ "model.layers.25.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
173
+ "model.layers.25.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
174
+ "model.layers.25.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
175
+ "model.layers.25.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
176
+ "model.layers.25.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
177
+ "model.layers.25.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
178
+ "model.layers.25.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
179
+ "model.layers.26.input_layernorm.weight": "model-00002-of-00002.safetensors",
180
+ "model.layers.26.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
181
+ "model.layers.26.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
182
+ "model.layers.26.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
183
+ "model.layers.26.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
184
+ "model.layers.26.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
185
+ "model.layers.26.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
186
+ "model.layers.26.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
187
+ "model.layers.26.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
188
+ "model.layers.27.input_layernorm.weight": "model-00002-of-00002.safetensors",
189
+ "model.layers.27.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
190
+ "model.layers.27.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
191
+ "model.layers.27.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
192
+ "model.layers.27.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
193
+ "model.layers.27.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
194
+ "model.layers.27.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
195
+ "model.layers.27.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
196
+ "model.layers.27.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
197
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
198
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
199
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
200
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
201
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
202
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
203
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
204
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
205
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
206
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
207
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
208
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
209
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
210
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
211
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
212
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
213
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
214
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
215
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
216
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
217
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
218
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
219
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
220
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
221
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
222
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
223
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
224
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
225
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
226
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
227
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
228
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
229
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
230
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
231
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
232
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
233
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
234
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
235
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
236
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
237
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
238
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
239
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
240
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
241
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
242
+ "model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
243
+ "model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
244
+ "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
245
+ "model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
246
+ "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
247
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
248
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
249
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
250
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
251
+ "model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
252
+ "model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
253
+ "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
254
+ "model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
255
+ "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
256
+ "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
257
+ "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
258
+ "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
259
+ "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
260
+ "model.norm.weight": "model-00002-of-00002.safetensors"
261
+ }
262
+ }
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1350/special_tokens_map.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|begin_of_text|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|end_of_text|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "[PAD]",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ }
23
+ }
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1350/trainer_state.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 6.467065868263473,
5
+ "eval_steps": 500,
6
+ "global_step": 1350,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.718562874251497,
13
+ "grad_norm": 0.661852240562439,
14
+ "learning_rate": 4.6899038461538465e-05,
15
+ "loss": 2.1718,
16
+ "step": 150
17
+ },
18
+ {
19
+ "epoch": 1.437125748502994,
20
+ "grad_norm": 0.7592059373855591,
21
+ "learning_rate": 4.329326923076924e-05,
22
+ "loss": 1.5828,
23
+ "step": 300
24
+ },
25
+ {
26
+ "epoch": 2.155688622754491,
27
+ "grad_norm": 0.6453707218170166,
28
+ "learning_rate": 3.96875e-05,
29
+ "loss": 1.478,
30
+ "step": 450
31
+ },
32
+ {
33
+ "epoch": 2.874251497005988,
34
+ "grad_norm": 0.6175025105476379,
35
+ "learning_rate": 3.608173076923077e-05,
36
+ "loss": 1.4098,
37
+ "step": 600
38
+ },
39
+ {
40
+ "epoch": 3.592814371257485,
41
+ "grad_norm": 0.7075008749961853,
42
+ "learning_rate": 3.247596153846154e-05,
43
+ "loss": 1.3483,
44
+ "step": 750
45
+ },
46
+ {
47
+ "epoch": 4.311377245508982,
48
+ "grad_norm": 0.8413554430007935,
49
+ "learning_rate": 2.8870192307692306e-05,
50
+ "loss": 1.3009,
51
+ "step": 900
52
+ },
53
+ {
54
+ "epoch": 5.029940119760479,
55
+ "grad_norm": 0.9734054207801819,
56
+ "learning_rate": 2.5264423076923078e-05,
57
+ "loss": 1.2617,
58
+ "step": 1050
59
+ },
60
+ {
61
+ "epoch": 5.748502994011976,
62
+ "grad_norm": 0.8429175019264221,
63
+ "learning_rate": 2.1658653846153847e-05,
64
+ "loss": 1.1814,
65
+ "step": 1200
66
+ },
67
+ {
68
+ "epoch": 6.467065868263473,
69
+ "grad_norm": 0.9255201816558838,
70
+ "learning_rate": 1.8052884615384616e-05,
71
+ "loss": 1.1207,
72
+ "step": 1350
73
+ }
74
+ ],
75
+ "logging_steps": 150,
76
+ "max_steps": 2080,
77
+ "num_input_tokens_seen": 0,
78
+ "num_train_epochs": 10,
79
+ "save_steps": 150,
80
+ "stateful_callbacks": {
81
+ "TrainerControl": {
82
+ "args": {
83
+ "should_epoch_stop": false,
84
+ "should_evaluate": false,
85
+ "should_log": false,
86
+ "should_save": true,
87
+ "should_training_stop": false
88
+ },
89
+ "attributes": {}
90
+ }
91
+ },
92
+ "total_flos": 1.9639047370404004e+18,
93
+ "train_batch_size": 3,
94
+ "trial_name": null,
95
+ "trial_params": null
96
+ }
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1500/config.json ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "meta-llama/Llama-3.2-3B",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 128000,
9
+ "eos_token_id": 128001,
10
+ "head_dim": 128,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 3072,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 8192,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "num_attention_heads": 24,
19
+ "num_hidden_layers": 28,
20
+ "num_key_value_heads": 8,
21
+ "pretraining_tp": 1,
22
+ "rms_norm_eps": 1e-05,
23
+ "rope_scaling": {
24
+ "factor": 32.0,
25
+ "high_freq_factor": 4.0,
26
+ "low_freq_factor": 1.0,
27
+ "original_max_position_embeddings": 8192,
28
+ "rope_type": "llama3"
29
+ },
30
+ "rope_theta": 500000.0,
31
+ "tie_word_embeddings": true,
32
+ "torch_dtype": "float16",
33
+ "transformers_version": "4.45.1",
34
+ "use_cache": true,
35
+ "vocab_size": 128256
36
+ }
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1500/generation_config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 128000,
4
+ "do_sample": true,
5
+ "eos_token_id": 128001,
6
+ "temperature": 0.6,
7
+ "top_p": 0.9,
8
+ "transformers_version": "4.45.1"
9
+ }
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1500/latest ADDED
@@ -0,0 +1 @@
 
 
1
+ global_step1500
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1500/model.safetensors.index.json ADDED
@@ -0,0 +1,262 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 7213504512
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00002-of-00002.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
13
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
14
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
15
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
16
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
17
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
18
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
19
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
20
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
21
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
22
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
23
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
24
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
25
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
26
+ "model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
27
+ "model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
28
+ "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
29
+ "model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
30
+ "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
31
+ "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
32
+ "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
33
+ "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
34
+ "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
35
+ "model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
36
+ "model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
37
+ "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
38
+ "model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
39
+ "model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
40
+ "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
41
+ "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
42
+ "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
43
+ "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
44
+ "model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
45
+ "model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
46
+ "model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
47
+ "model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
48
+ "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
49
+ "model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
50
+ "model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
51
+ "model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
52
+ "model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
53
+ "model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
54
+ "model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
55
+ "model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
56
+ "model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
57
+ "model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
58
+ "model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
59
+ "model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
60
+ "model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
61
+ "model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
62
+ "model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
63
+ "model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
64
+ "model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
65
+ "model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
66
+ "model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
67
+ "model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
68
+ "model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
69
+ "model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
70
+ "model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
71
+ "model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
72
+ "model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
73
+ "model.layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
74
+ "model.layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
75
+ "model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
76
+ "model.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
77
+ "model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
78
+ "model.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
79
+ "model.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
80
+ "model.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors",
81
+ "model.layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
82
+ "model.layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
83
+ "model.layers.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
84
+ "model.layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
85
+ "model.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
86
+ "model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
87
+ "model.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
88
+ "model.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
89
+ "model.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors",
90
+ "model.layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
91
+ "model.layers.17.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
92
+ "model.layers.17.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
93
+ "model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
94
+ "model.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
95
+ "model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
96
+ "model.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
97
+ "model.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
98
+ "model.layers.18.input_layernorm.weight": "model-00001-of-00002.safetensors",
99
+ "model.layers.18.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
100
+ "model.layers.18.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
101
+ "model.layers.18.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
102
+ "model.layers.18.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
103
+ "model.layers.18.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
104
+ "model.layers.18.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
105
+ "model.layers.18.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
106
+ "model.layers.18.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
107
+ "model.layers.19.input_layernorm.weight": "model-00001-of-00002.safetensors",
108
+ "model.layers.19.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
109
+ "model.layers.19.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
110
+ "model.layers.19.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
111
+ "model.layers.19.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
112
+ "model.layers.19.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
113
+ "model.layers.19.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
114
+ "model.layers.19.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
115
+ "model.layers.19.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
116
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
117
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
118
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
119
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
120
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
121
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
122
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
123
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
124
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
125
+ "model.layers.20.input_layernorm.weight": "model-00002-of-00002.safetensors",
126
+ "model.layers.20.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
127
+ "model.layers.20.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
128
+ "model.layers.20.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
129
+ "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
130
+ "model.layers.20.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
131
+ "model.layers.20.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
132
+ "model.layers.20.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
133
+ "model.layers.20.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
134
+ "model.layers.21.input_layernorm.weight": "model-00002-of-00002.safetensors",
135
+ "model.layers.21.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
136
+ "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
137
+ "model.layers.21.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
138
+ "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
139
+ "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
140
+ "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
141
+ "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
142
+ "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
143
+ "model.layers.22.input_layernorm.weight": "model-00002-of-00002.safetensors",
144
+ "model.layers.22.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
145
+ "model.layers.22.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
146
+ "model.layers.22.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
147
+ "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
148
+ "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
149
+ "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
150
+ "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
151
+ "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
152
+ "model.layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors",
153
+ "model.layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
154
+ "model.layers.23.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
155
+ "model.layers.23.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
156
+ "model.layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
157
+ "model.layers.23.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
158
+ "model.layers.23.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
159
+ "model.layers.23.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
160
+ "model.layers.23.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
161
+ "model.layers.24.input_layernorm.weight": "model-00002-of-00002.safetensors",
162
+ "model.layers.24.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
163
+ "model.layers.24.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
164
+ "model.layers.24.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
165
+ "model.layers.24.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
166
+ "model.layers.24.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
167
+ "model.layers.24.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
168
+ "model.layers.24.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
169
+ "model.layers.24.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
170
+ "model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors",
171
+ "model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
172
+ "model.layers.25.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
173
+ "model.layers.25.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
174
+ "model.layers.25.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
175
+ "model.layers.25.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
176
+ "model.layers.25.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
177
+ "model.layers.25.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
178
+ "model.layers.25.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
179
+ "model.layers.26.input_layernorm.weight": "model-00002-of-00002.safetensors",
180
+ "model.layers.26.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
181
+ "model.layers.26.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
182
+ "model.layers.26.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
183
+ "model.layers.26.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
184
+ "model.layers.26.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
185
+ "model.layers.26.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
186
+ "model.layers.26.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
187
+ "model.layers.26.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
188
+ "model.layers.27.input_layernorm.weight": "model-00002-of-00002.safetensors",
189
+ "model.layers.27.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
190
+ "model.layers.27.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
191
+ "model.layers.27.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
192
+ "model.layers.27.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
193
+ "model.layers.27.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
194
+ "model.layers.27.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
195
+ "model.layers.27.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
196
+ "model.layers.27.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
197
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
198
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
199
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
200
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
201
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
202
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
203
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
204
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
205
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
206
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
207
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
208
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
209
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
210
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
211
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
212
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
213
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
214
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
215
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
216
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
217
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
218
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
219
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
220
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
221
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
222
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
223
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
224
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
225
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
226
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
227
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
228
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
229
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
230
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
231
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
232
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
233
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
234
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
235
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
236
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
237
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
238
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
239
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
240
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
241
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
242
+ "model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
243
+ "model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
244
+ "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
245
+ "model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
246
+ "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
247
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
248
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
249
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
250
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
251
+ "model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
252
+ "model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
253
+ "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
254
+ "model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
255
+ "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
256
+ "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
257
+ "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
258
+ "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
259
+ "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
260
+ "model.norm.weight": "model-00002-of-00002.safetensors"
261
+ }
262
+ }
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1500/rng_state_2.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:638670b4f96148564ef368df48443cdaa781c9697294452c3ff15d3f926e388a
3
+ size 20599
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1500/rng_state_6.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:595fa5c22607d6ab832d0b14385e9e6b9bfeb5d4c181d0acb849edd1df3e66a1
3
+ size 20599
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1500/special_tokens_map.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|begin_of_text|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|end_of_text|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "[PAD]",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ }
23
+ }
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1500/tokenizer_config.json ADDED
@@ -0,0 +1,2078 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "128000": {
4
+ "content": "<|begin_of_text|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "128001": {
12
+ "content": "<|end_of_text|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "128002": {
20
+ "content": "<|reserved_special_token_0|>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "128003": {
28
+ "content": "<|reserved_special_token_1|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "128004": {
36
+ "content": "<|finetune_right_pad_id|>",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ },
43
+ "128005": {
44
+ "content": "<|reserved_special_token_2|>",
45
+ "lstrip": false,
46
+ "normalized": false,
47
+ "rstrip": false,
48
+ "single_word": false,
49
+ "special": true
50
+ },
51
+ "128006": {
52
+ "content": "<|start_header_id|>",
53
+ "lstrip": false,
54
+ "normalized": false,
55
+ "rstrip": false,
56
+ "single_word": false,
57
+ "special": true
58
+ },
59
+ "128007": {
60
+ "content": "<|end_header_id|>",
61
+ "lstrip": false,
62
+ "normalized": false,
63
+ "rstrip": false,
64
+ "single_word": false,
65
+ "special": true
66
+ },
67
+ "128008": {
68
+ "content": "<|eom_id|>",
69
+ "lstrip": false,
70
+ "normalized": false,
71
+ "rstrip": false,
72
+ "single_word": false,
73
+ "special": true
74
+ },
75
+ "128009": {
76
+ "content": "<|eot_id|>",
77
+ "lstrip": false,
78
+ "normalized": false,
79
+ "rstrip": false,
80
+ "single_word": false,
81
+ "special": true
82
+ },
83
+ "128010": {
84
+ "content": "<|python_tag|>",
85
+ "lstrip": false,
86
+ "normalized": false,
87
+ "rstrip": false,
88
+ "single_word": false,
89
+ "special": true
90
+ },
91
+ "128011": {
92
+ "content": "<|reserved_special_token_3|>",
93
+ "lstrip": false,
94
+ "normalized": false,
95
+ "rstrip": false,
96
+ "single_word": false,
97
+ "special": true
98
+ },
99
+ "128012": {
100
+ "content": "<|reserved_special_token_4|>",
101
+ "lstrip": false,
102
+ "normalized": false,
103
+ "rstrip": false,
104
+ "single_word": false,
105
+ "special": true
106
+ },
107
+ "128013": {
108
+ "content": "<|reserved_special_token_5|>",
109
+ "lstrip": false,
110
+ "normalized": false,
111
+ "rstrip": false,
112
+ "single_word": false,
113
+ "special": true
114
+ },
115
+ "128014": {
116
+ "content": "<|reserved_special_token_6|>",
117
+ "lstrip": false,
118
+ "normalized": false,
119
+ "rstrip": false,
120
+ "single_word": false,
121
+ "special": true
122
+ },
123
+ "128015": {
124
+ "content": "<|reserved_special_token_7|>",
125
+ "lstrip": false,
126
+ "normalized": false,
127
+ "rstrip": false,
128
+ "single_word": false,
129
+ "special": true
130
+ },
131
+ "128016": {
132
+ "content": "<|reserved_special_token_8|>",
133
+ "lstrip": false,
134
+ "normalized": false,
135
+ "rstrip": false,
136
+ "single_word": false,
137
+ "special": true
138
+ },
139
+ "128017": {
140
+ "content": "<|reserved_special_token_9|>",
141
+ "lstrip": false,
142
+ "normalized": false,
143
+ "rstrip": false,
144
+ "single_word": false,
145
+ "special": true
146
+ },
147
+ "128018": {
148
+ "content": "<|reserved_special_token_10|>",
149
+ "lstrip": false,
150
+ "normalized": false,
151
+ "rstrip": false,
152
+ "single_word": false,
153
+ "special": true
154
+ },
155
+ "128019": {
156
+ "content": "<|reserved_special_token_11|>",
157
+ "lstrip": false,
158
+ "normalized": false,
159
+ "rstrip": false,
160
+ "single_word": false,
161
+ "special": true
162
+ },
163
+ "128020": {
164
+ "content": "<|reserved_special_token_12|>",
165
+ "lstrip": false,
166
+ "normalized": false,
167
+ "rstrip": false,
168
+ "single_word": false,
169
+ "special": true
170
+ },
171
+ "128021": {
172
+ "content": "<|reserved_special_token_13|>",
173
+ "lstrip": false,
174
+ "normalized": false,
175
+ "rstrip": false,
176
+ "single_word": false,
177
+ "special": true
178
+ },
179
+ "128022": {
180
+ "content": "<|reserved_special_token_14|>",
181
+ "lstrip": false,
182
+ "normalized": false,
183
+ "rstrip": false,
184
+ "single_word": false,
185
+ "special": true
186
+ },
187
+ "128023": {
188
+ "content": "<|reserved_special_token_15|>",
189
+ "lstrip": false,
190
+ "normalized": false,
191
+ "rstrip": false,
192
+ "single_word": false,
193
+ "special": true
194
+ },
195
+ "128024": {
196
+ "content": "<|reserved_special_token_16|>",
197
+ "lstrip": false,
198
+ "normalized": false,
199
+ "rstrip": false,
200
+ "single_word": false,
201
+ "special": true
202
+ },
203
+ "128025": {
204
+ "content": "<|reserved_special_token_17|>",
205
+ "lstrip": false,
206
+ "normalized": false,
207
+ "rstrip": false,
208
+ "single_word": false,
209
+ "special": true
210
+ },
211
+ "128026": {
212
+ "content": "<|reserved_special_token_18|>",
213
+ "lstrip": false,
214
+ "normalized": false,
215
+ "rstrip": false,
216
+ "single_word": false,
217
+ "special": true
218
+ },
219
+ "128027": {
220
+ "content": "<|reserved_special_token_19|>",
221
+ "lstrip": false,
222
+ "normalized": false,
223
+ "rstrip": false,
224
+ "single_word": false,
225
+ "special": true
226
+ },
227
+ "128028": {
228
+ "content": "<|reserved_special_token_20|>",
229
+ "lstrip": false,
230
+ "normalized": false,
231
+ "rstrip": false,
232
+ "single_word": false,
233
+ "special": true
234
+ },
235
+ "128029": {
236
+ "content": "<|reserved_special_token_21|>",
237
+ "lstrip": false,
238
+ "normalized": false,
239
+ "rstrip": false,
240
+ "single_word": false,
241
+ "special": true
242
+ },
243
+ "128030": {
244
+ "content": "<|reserved_special_token_22|>",
245
+ "lstrip": false,
246
+ "normalized": false,
247
+ "rstrip": false,
248
+ "single_word": false,
249
+ "special": true
250
+ },
251
+ "128031": {
252
+ "content": "<|reserved_special_token_23|>",
253
+ "lstrip": false,
254
+ "normalized": false,
255
+ "rstrip": false,
256
+ "single_word": false,
257
+ "special": true
258
+ },
259
+ "128032": {
260
+ "content": "<|reserved_special_token_24|>",
261
+ "lstrip": false,
262
+ "normalized": false,
263
+ "rstrip": false,
264
+ "single_word": false,
265
+ "special": true
266
+ },
267
+ "128033": {
268
+ "content": "<|reserved_special_token_25|>",
269
+ "lstrip": false,
270
+ "normalized": false,
271
+ "rstrip": false,
272
+ "single_word": false,
273
+ "special": true
274
+ },
275
+ "128034": {
276
+ "content": "<|reserved_special_token_26|>",
277
+ "lstrip": false,
278
+ "normalized": false,
279
+ "rstrip": false,
280
+ "single_word": false,
281
+ "special": true
282
+ },
283
+ "128035": {
284
+ "content": "<|reserved_special_token_27|>",
285
+ "lstrip": false,
286
+ "normalized": false,
287
+ "rstrip": false,
288
+ "single_word": false,
289
+ "special": true
290
+ },
291
+ "128036": {
292
+ "content": "<|reserved_special_token_28|>",
293
+ "lstrip": false,
294
+ "normalized": false,
295
+ "rstrip": false,
296
+ "single_word": false,
297
+ "special": true
298
+ },
299
+ "128037": {
300
+ "content": "<|reserved_special_token_29|>",
301
+ "lstrip": false,
302
+ "normalized": false,
303
+ "rstrip": false,
304
+ "single_word": false,
305
+ "special": true
306
+ },
307
+ "128038": {
308
+ "content": "<|reserved_special_token_30|>",
309
+ "lstrip": false,
310
+ "normalized": false,
311
+ "rstrip": false,
312
+ "single_word": false,
313
+ "special": true
314
+ },
315
+ "128039": {
316
+ "content": "<|reserved_special_token_31|>",
317
+ "lstrip": false,
318
+ "normalized": false,
319
+ "rstrip": false,
320
+ "single_word": false,
321
+ "special": true
322
+ },
323
+ "128040": {
324
+ "content": "<|reserved_special_token_32|>",
325
+ "lstrip": false,
326
+ "normalized": false,
327
+ "rstrip": false,
328
+ "single_word": false,
329
+ "special": true
330
+ },
331
+ "128041": {
332
+ "content": "<|reserved_special_token_33|>",
333
+ "lstrip": false,
334
+ "normalized": false,
335
+ "rstrip": false,
336
+ "single_word": false,
337
+ "special": true
338
+ },
339
+ "128042": {
340
+ "content": "<|reserved_special_token_34|>",
341
+ "lstrip": false,
342
+ "normalized": false,
343
+ "rstrip": false,
344
+ "single_word": false,
345
+ "special": true
346
+ },
347
+ "128043": {
348
+ "content": "<|reserved_special_token_35|>",
349
+ "lstrip": false,
350
+ "normalized": false,
351
+ "rstrip": false,
352
+ "single_word": false,
353
+ "special": true
354
+ },
355
+ "128044": {
356
+ "content": "<|reserved_special_token_36|>",
357
+ "lstrip": false,
358
+ "normalized": false,
359
+ "rstrip": false,
360
+ "single_word": false,
361
+ "special": true
362
+ },
363
+ "128045": {
364
+ "content": "<|reserved_special_token_37|>",
365
+ "lstrip": false,
366
+ "normalized": false,
367
+ "rstrip": false,
368
+ "single_word": false,
369
+ "special": true
370
+ },
371
+ "128046": {
372
+ "content": "<|reserved_special_token_38|>",
373
+ "lstrip": false,
374
+ "normalized": false,
375
+ "rstrip": false,
376
+ "single_word": false,
377
+ "special": true
378
+ },
379
+ "128047": {
380
+ "content": "<|reserved_special_token_39|>",
381
+ "lstrip": false,
382
+ "normalized": false,
383
+ "rstrip": false,
384
+ "single_word": false,
385
+ "special": true
386
+ },
387
+ "128048": {
388
+ "content": "<|reserved_special_token_40|>",
389
+ "lstrip": false,
390
+ "normalized": false,
391
+ "rstrip": false,
392
+ "single_word": false,
393
+ "special": true
394
+ },
395
+ "128049": {
396
+ "content": "<|reserved_special_token_41|>",
397
+ "lstrip": false,
398
+ "normalized": false,
399
+ "rstrip": false,
400
+ "single_word": false,
401
+ "special": true
402
+ },
403
+ "128050": {
404
+ "content": "<|reserved_special_token_42|>",
405
+ "lstrip": false,
406
+ "normalized": false,
407
+ "rstrip": false,
408
+ "single_word": false,
409
+ "special": true
410
+ },
411
+ "128051": {
412
+ "content": "<|reserved_special_token_43|>",
413
+ "lstrip": false,
414
+ "normalized": false,
415
+ "rstrip": false,
416
+ "single_word": false,
417
+ "special": true
418
+ },
419
+ "128052": {
420
+ "content": "<|reserved_special_token_44|>",
421
+ "lstrip": false,
422
+ "normalized": false,
423
+ "rstrip": false,
424
+ "single_word": false,
425
+ "special": true
426
+ },
427
+ "128053": {
428
+ "content": "<|reserved_special_token_45|>",
429
+ "lstrip": false,
430
+ "normalized": false,
431
+ "rstrip": false,
432
+ "single_word": false,
433
+ "special": true
434
+ },
435
+ "128054": {
436
+ "content": "<|reserved_special_token_46|>",
437
+ "lstrip": false,
438
+ "normalized": false,
439
+ "rstrip": false,
440
+ "single_word": false,
441
+ "special": true
442
+ },
443
+ "128055": {
444
+ "content": "<|reserved_special_token_47|>",
445
+ "lstrip": false,
446
+ "normalized": false,
447
+ "rstrip": false,
448
+ "single_word": false,
449
+ "special": true
450
+ },
451
+ "128056": {
452
+ "content": "<|reserved_special_token_48|>",
453
+ "lstrip": false,
454
+ "normalized": false,
455
+ "rstrip": false,
456
+ "single_word": false,
457
+ "special": true
458
+ },
459
+ "128057": {
460
+ "content": "<|reserved_special_token_49|>",
461
+ "lstrip": false,
462
+ "normalized": false,
463
+ "rstrip": false,
464
+ "single_word": false,
465
+ "special": true
466
+ },
467
+ "128058": {
468
+ "content": "<|reserved_special_token_50|>",
469
+ "lstrip": false,
470
+ "normalized": false,
471
+ "rstrip": false,
472
+ "single_word": false,
473
+ "special": true
474
+ },
475
+ "128059": {
476
+ "content": "<|reserved_special_token_51|>",
477
+ "lstrip": false,
478
+ "normalized": false,
479
+ "rstrip": false,
480
+ "single_word": false,
481
+ "special": true
482
+ },
483
+ "128060": {
484
+ "content": "<|reserved_special_token_52|>",
485
+ "lstrip": false,
486
+ "normalized": false,
487
+ "rstrip": false,
488
+ "single_word": false,
489
+ "special": true
490
+ },
491
+ "128061": {
492
+ "content": "<|reserved_special_token_53|>",
493
+ "lstrip": false,
494
+ "normalized": false,
495
+ "rstrip": false,
496
+ "single_word": false,
497
+ "special": true
498
+ },
499
+ "128062": {
500
+ "content": "<|reserved_special_token_54|>",
501
+ "lstrip": false,
502
+ "normalized": false,
503
+ "rstrip": false,
504
+ "single_word": false,
505
+ "special": true
506
+ },
507
+ "128063": {
508
+ "content": "<|reserved_special_token_55|>",
509
+ "lstrip": false,
510
+ "normalized": false,
511
+ "rstrip": false,
512
+ "single_word": false,
513
+ "special": true
514
+ },
515
+ "128064": {
516
+ "content": "<|reserved_special_token_56|>",
517
+ "lstrip": false,
518
+ "normalized": false,
519
+ "rstrip": false,
520
+ "single_word": false,
521
+ "special": true
522
+ },
523
+ "128065": {
524
+ "content": "<|reserved_special_token_57|>",
525
+ "lstrip": false,
526
+ "normalized": false,
527
+ "rstrip": false,
528
+ "single_word": false,
529
+ "special": true
530
+ },
531
+ "128066": {
532
+ "content": "<|reserved_special_token_58|>",
533
+ "lstrip": false,
534
+ "normalized": false,
535
+ "rstrip": false,
536
+ "single_word": false,
537
+ "special": true
538
+ },
539
+ "128067": {
540
+ "content": "<|reserved_special_token_59|>",
541
+ "lstrip": false,
542
+ "normalized": false,
543
+ "rstrip": false,
544
+ "single_word": false,
545
+ "special": true
546
+ },
547
+ "128068": {
548
+ "content": "<|reserved_special_token_60|>",
549
+ "lstrip": false,
550
+ "normalized": false,
551
+ "rstrip": false,
552
+ "single_word": false,
553
+ "special": true
554
+ },
555
+ "128069": {
556
+ "content": "<|reserved_special_token_61|>",
557
+ "lstrip": false,
558
+ "normalized": false,
559
+ "rstrip": false,
560
+ "single_word": false,
561
+ "special": true
562
+ },
563
+ "128070": {
564
+ "content": "<|reserved_special_token_62|>",
565
+ "lstrip": false,
566
+ "normalized": false,
567
+ "rstrip": false,
568
+ "single_word": false,
569
+ "special": true
570
+ },
571
+ "128071": {
572
+ "content": "<|reserved_special_token_63|>",
573
+ "lstrip": false,
574
+ "normalized": false,
575
+ "rstrip": false,
576
+ "single_word": false,
577
+ "special": true
578
+ },
579
+ "128072": {
580
+ "content": "<|reserved_special_token_64|>",
581
+ "lstrip": false,
582
+ "normalized": false,
583
+ "rstrip": false,
584
+ "single_word": false,
585
+ "special": true
586
+ },
587
+ "128073": {
588
+ "content": "<|reserved_special_token_65|>",
589
+ "lstrip": false,
590
+ "normalized": false,
591
+ "rstrip": false,
592
+ "single_word": false,
593
+ "special": true
594
+ },
595
+ "128074": {
596
+ "content": "<|reserved_special_token_66|>",
597
+ "lstrip": false,
598
+ "normalized": false,
599
+ "rstrip": false,
600
+ "single_word": false,
601
+ "special": true
602
+ },
603
+ "128075": {
604
+ "content": "<|reserved_special_token_67|>",
605
+ "lstrip": false,
606
+ "normalized": false,
607
+ "rstrip": false,
608
+ "single_word": false,
609
+ "special": true
610
+ },
611
+ "128076": {
612
+ "content": "<|reserved_special_token_68|>",
613
+ "lstrip": false,
614
+ "normalized": false,
615
+ "rstrip": false,
616
+ "single_word": false,
617
+ "special": true
618
+ },
619
+ "128077": {
620
+ "content": "<|reserved_special_token_69|>",
621
+ "lstrip": false,
622
+ "normalized": false,
623
+ "rstrip": false,
624
+ "single_word": false,
625
+ "special": true
626
+ },
627
+ "128078": {
628
+ "content": "<|reserved_special_token_70|>",
629
+ "lstrip": false,
630
+ "normalized": false,
631
+ "rstrip": false,
632
+ "single_word": false,
633
+ "special": true
634
+ },
635
+ "128079": {
636
+ "content": "<|reserved_special_token_71|>",
637
+ "lstrip": false,
638
+ "normalized": false,
639
+ "rstrip": false,
640
+ "single_word": false,
641
+ "special": true
642
+ },
643
+ "128080": {
644
+ "content": "<|reserved_special_token_72|>",
645
+ "lstrip": false,
646
+ "normalized": false,
647
+ "rstrip": false,
648
+ "single_word": false,
649
+ "special": true
650
+ },
651
+ "128081": {
652
+ "content": "<|reserved_special_token_73|>",
653
+ "lstrip": false,
654
+ "normalized": false,
655
+ "rstrip": false,
656
+ "single_word": false,
657
+ "special": true
658
+ },
659
+ "128082": {
660
+ "content": "<|reserved_special_token_74|>",
661
+ "lstrip": false,
662
+ "normalized": false,
663
+ "rstrip": false,
664
+ "single_word": false,
665
+ "special": true
666
+ },
667
+ "128083": {
668
+ "content": "<|reserved_special_token_75|>",
669
+ "lstrip": false,
670
+ "normalized": false,
671
+ "rstrip": false,
672
+ "single_word": false,
673
+ "special": true
674
+ },
675
+ "128084": {
676
+ "content": "<|reserved_special_token_76|>",
677
+ "lstrip": false,
678
+ "normalized": false,
679
+ "rstrip": false,
680
+ "single_word": false,
681
+ "special": true
682
+ },
683
+ "128085": {
684
+ "content": "<|reserved_special_token_77|>",
685
+ "lstrip": false,
686
+ "normalized": false,
687
+ "rstrip": false,
688
+ "single_word": false,
689
+ "special": true
690
+ },
691
+ "128086": {
692
+ "content": "<|reserved_special_token_78|>",
693
+ "lstrip": false,
694
+ "normalized": false,
695
+ "rstrip": false,
696
+ "single_word": false,
697
+ "special": true
698
+ },
699
+ "128087": {
700
+ "content": "<|reserved_special_token_79|>",
701
+ "lstrip": false,
702
+ "normalized": false,
703
+ "rstrip": false,
704
+ "single_word": false,
705
+ "special": true
706
+ },
707
+ "128088": {
708
+ "content": "<|reserved_special_token_80|>",
709
+ "lstrip": false,
710
+ "normalized": false,
711
+ "rstrip": false,
712
+ "single_word": false,
713
+ "special": true
714
+ },
715
+ "128089": {
716
+ "content": "<|reserved_special_token_81|>",
717
+ "lstrip": false,
718
+ "normalized": false,
719
+ "rstrip": false,
720
+ "single_word": false,
721
+ "special": true
722
+ },
723
+ "128090": {
724
+ "content": "<|reserved_special_token_82|>",
725
+ "lstrip": false,
726
+ "normalized": false,
727
+ "rstrip": false,
728
+ "single_word": false,
729
+ "special": true
730
+ },
731
+ "128091": {
732
+ "content": "<|reserved_special_token_83|>",
733
+ "lstrip": false,
734
+ "normalized": false,
735
+ "rstrip": false,
736
+ "single_word": false,
737
+ "special": true
738
+ },
739
+ "128092": {
740
+ "content": "<|reserved_special_token_84|>",
741
+ "lstrip": false,
742
+ "normalized": false,
743
+ "rstrip": false,
744
+ "single_word": false,
745
+ "special": true
746
+ },
747
+ "128093": {
748
+ "content": "<|reserved_special_token_85|>",
749
+ "lstrip": false,
750
+ "normalized": false,
751
+ "rstrip": false,
752
+ "single_word": false,
753
+ "special": true
754
+ },
755
+ "128094": {
756
+ "content": "<|reserved_special_token_86|>",
757
+ "lstrip": false,
758
+ "normalized": false,
759
+ "rstrip": false,
760
+ "single_word": false,
761
+ "special": true
762
+ },
763
+ "128095": {
764
+ "content": "<|reserved_special_token_87|>",
765
+ "lstrip": false,
766
+ "normalized": false,
767
+ "rstrip": false,
768
+ "single_word": false,
769
+ "special": true
770
+ },
771
+ "128096": {
772
+ "content": "<|reserved_special_token_88|>",
773
+ "lstrip": false,
774
+ "normalized": false,
775
+ "rstrip": false,
776
+ "single_word": false,
777
+ "special": true
778
+ },
779
+ "128097": {
780
+ "content": "<|reserved_special_token_89|>",
781
+ "lstrip": false,
782
+ "normalized": false,
783
+ "rstrip": false,
784
+ "single_word": false,
785
+ "special": true
786
+ },
787
+ "128098": {
788
+ "content": "<|reserved_special_token_90|>",
789
+ "lstrip": false,
790
+ "normalized": false,
791
+ "rstrip": false,
792
+ "single_word": false,
793
+ "special": true
794
+ },
795
+ "128099": {
796
+ "content": "<|reserved_special_token_91|>",
797
+ "lstrip": false,
798
+ "normalized": false,
799
+ "rstrip": false,
800
+ "single_word": false,
801
+ "special": true
802
+ },
803
+ "128100": {
804
+ "content": "<|reserved_special_token_92|>",
805
+ "lstrip": false,
806
+ "normalized": false,
807
+ "rstrip": false,
808
+ "single_word": false,
809
+ "special": true
810
+ },
811
+ "128101": {
812
+ "content": "<|reserved_special_token_93|>",
813
+ "lstrip": false,
814
+ "normalized": false,
815
+ "rstrip": false,
816
+ "single_word": false,
817
+ "special": true
818
+ },
819
+ "128102": {
820
+ "content": "<|reserved_special_token_94|>",
821
+ "lstrip": false,
822
+ "normalized": false,
823
+ "rstrip": false,
824
+ "single_word": false,
825
+ "special": true
826
+ },
827
+ "128103": {
828
+ "content": "<|reserved_special_token_95|>",
829
+ "lstrip": false,
830
+ "normalized": false,
831
+ "rstrip": false,
832
+ "single_word": false,
833
+ "special": true
834
+ },
835
+ "128104": {
836
+ "content": "<|reserved_special_token_96|>",
837
+ "lstrip": false,
838
+ "normalized": false,
839
+ "rstrip": false,
840
+ "single_word": false,
841
+ "special": true
842
+ },
843
+ "128105": {
844
+ "content": "<|reserved_special_token_97|>",
845
+ "lstrip": false,
846
+ "normalized": false,
847
+ "rstrip": false,
848
+ "single_word": false,
849
+ "special": true
850
+ },
851
+ "128106": {
852
+ "content": "<|reserved_special_token_98|>",
853
+ "lstrip": false,
854
+ "normalized": false,
855
+ "rstrip": false,
856
+ "single_word": false,
857
+ "special": true
858
+ },
859
+ "128107": {
860
+ "content": "<|reserved_special_token_99|>",
861
+ "lstrip": false,
862
+ "normalized": false,
863
+ "rstrip": false,
864
+ "single_word": false,
865
+ "special": true
866
+ },
867
+ "128108": {
868
+ "content": "<|reserved_special_token_100|>",
869
+ "lstrip": false,
870
+ "normalized": false,
871
+ "rstrip": false,
872
+ "single_word": false,
873
+ "special": true
874
+ },
875
+ "128109": {
876
+ "content": "<|reserved_special_token_101|>",
877
+ "lstrip": false,
878
+ "normalized": false,
879
+ "rstrip": false,
880
+ "single_word": false,
881
+ "special": true
882
+ },
883
+ "128110": {
884
+ "content": "<|reserved_special_token_102|>",
885
+ "lstrip": false,
886
+ "normalized": false,
887
+ "rstrip": false,
888
+ "single_word": false,
889
+ "special": true
890
+ },
891
+ "128111": {
892
+ "content": "<|reserved_special_token_103|>",
893
+ "lstrip": false,
894
+ "normalized": false,
895
+ "rstrip": false,
896
+ "single_word": false,
897
+ "special": true
898
+ },
899
+ "128112": {
900
+ "content": "<|reserved_special_token_104|>",
901
+ "lstrip": false,
902
+ "normalized": false,
903
+ "rstrip": false,
904
+ "single_word": false,
905
+ "special": true
906
+ },
907
+ "128113": {
908
+ "content": "<|reserved_special_token_105|>",
909
+ "lstrip": false,
910
+ "normalized": false,
911
+ "rstrip": false,
912
+ "single_word": false,
913
+ "special": true
914
+ },
915
+ "128114": {
916
+ "content": "<|reserved_special_token_106|>",
917
+ "lstrip": false,
918
+ "normalized": false,
919
+ "rstrip": false,
920
+ "single_word": false,
921
+ "special": true
922
+ },
923
+ "128115": {
924
+ "content": "<|reserved_special_token_107|>",
925
+ "lstrip": false,
926
+ "normalized": false,
927
+ "rstrip": false,
928
+ "single_word": false,
929
+ "special": true
930
+ },
931
+ "128116": {
932
+ "content": "<|reserved_special_token_108|>",
933
+ "lstrip": false,
934
+ "normalized": false,
935
+ "rstrip": false,
936
+ "single_word": false,
937
+ "special": true
938
+ },
939
+ "128117": {
940
+ "content": "<|reserved_special_token_109|>",
941
+ "lstrip": false,
942
+ "normalized": false,
943
+ "rstrip": false,
944
+ "single_word": false,
945
+ "special": true
946
+ },
947
+ "128118": {
948
+ "content": "<|reserved_special_token_110|>",
949
+ "lstrip": false,
950
+ "normalized": false,
951
+ "rstrip": false,
952
+ "single_word": false,
953
+ "special": true
954
+ },
955
+ "128119": {
956
+ "content": "<|reserved_special_token_111|>",
957
+ "lstrip": false,
958
+ "normalized": false,
959
+ "rstrip": false,
960
+ "single_word": false,
961
+ "special": true
962
+ },
963
+ "128120": {
964
+ "content": "<|reserved_special_token_112|>",
965
+ "lstrip": false,
966
+ "normalized": false,
967
+ "rstrip": false,
968
+ "single_word": false,
969
+ "special": true
970
+ },
971
+ "128121": {
972
+ "content": "<|reserved_special_token_113|>",
973
+ "lstrip": false,
974
+ "normalized": false,
975
+ "rstrip": false,
976
+ "single_word": false,
977
+ "special": true
978
+ },
979
+ "128122": {
980
+ "content": "<|reserved_special_token_114|>",
981
+ "lstrip": false,
982
+ "normalized": false,
983
+ "rstrip": false,
984
+ "single_word": false,
985
+ "special": true
986
+ },
987
+ "128123": {
988
+ "content": "<|reserved_special_token_115|>",
989
+ "lstrip": false,
990
+ "normalized": false,
991
+ "rstrip": false,
992
+ "single_word": false,
993
+ "special": true
994
+ },
995
+ "128124": {
996
+ "content": "<|reserved_special_token_116|>",
997
+ "lstrip": false,
998
+ "normalized": false,
999
+ "rstrip": false,
1000
+ "single_word": false,
1001
+ "special": true
1002
+ },
1003
+ "128125": {
1004
+ "content": "<|reserved_special_token_117|>",
1005
+ "lstrip": false,
1006
+ "normalized": false,
1007
+ "rstrip": false,
1008
+ "single_word": false,
1009
+ "special": true
1010
+ },
1011
+ "128126": {
1012
+ "content": "<|reserved_special_token_118|>",
1013
+ "lstrip": false,
1014
+ "normalized": false,
1015
+ "rstrip": false,
1016
+ "single_word": false,
1017
+ "special": true
1018
+ },
1019
+ "128127": {
1020
+ "content": "<|reserved_special_token_119|>",
1021
+ "lstrip": false,
1022
+ "normalized": false,
1023
+ "rstrip": false,
1024
+ "single_word": false,
1025
+ "special": true
1026
+ },
1027
+ "128128": {
1028
+ "content": "<|reserved_special_token_120|>",
1029
+ "lstrip": false,
1030
+ "normalized": false,
1031
+ "rstrip": false,
1032
+ "single_word": false,
1033
+ "special": true
1034
+ },
1035
+ "128129": {
1036
+ "content": "<|reserved_special_token_121|>",
1037
+ "lstrip": false,
1038
+ "normalized": false,
1039
+ "rstrip": false,
1040
+ "single_word": false,
1041
+ "special": true
1042
+ },
1043
+ "128130": {
1044
+ "content": "<|reserved_special_token_122|>",
1045
+ "lstrip": false,
1046
+ "normalized": false,
1047
+ "rstrip": false,
1048
+ "single_word": false,
1049
+ "special": true
1050
+ },
1051
+ "128131": {
1052
+ "content": "<|reserved_special_token_123|>",
1053
+ "lstrip": false,
1054
+ "normalized": false,
1055
+ "rstrip": false,
1056
+ "single_word": false,
1057
+ "special": true
1058
+ },
1059
+ "128132": {
1060
+ "content": "<|reserved_special_token_124|>",
1061
+ "lstrip": false,
1062
+ "normalized": false,
1063
+ "rstrip": false,
1064
+ "single_word": false,
1065
+ "special": true
1066
+ },
1067
+ "128133": {
1068
+ "content": "<|reserved_special_token_125|>",
1069
+ "lstrip": false,
1070
+ "normalized": false,
1071
+ "rstrip": false,
1072
+ "single_word": false,
1073
+ "special": true
1074
+ },
1075
+ "128134": {
1076
+ "content": "<|reserved_special_token_126|>",
1077
+ "lstrip": false,
1078
+ "normalized": false,
1079
+ "rstrip": false,
1080
+ "single_word": false,
1081
+ "special": true
1082
+ },
1083
+ "128135": {
1084
+ "content": "<|reserved_special_token_127|>",
1085
+ "lstrip": false,
1086
+ "normalized": false,
1087
+ "rstrip": false,
1088
+ "single_word": false,
1089
+ "special": true
1090
+ },
1091
+ "128136": {
1092
+ "content": "<|reserved_special_token_128|>",
1093
+ "lstrip": false,
1094
+ "normalized": false,
1095
+ "rstrip": false,
1096
+ "single_word": false,
1097
+ "special": true
1098
+ },
1099
+ "128137": {
1100
+ "content": "<|reserved_special_token_129|>",
1101
+ "lstrip": false,
1102
+ "normalized": false,
1103
+ "rstrip": false,
1104
+ "single_word": false,
1105
+ "special": true
1106
+ },
1107
+ "128138": {
1108
+ "content": "<|reserved_special_token_130|>",
1109
+ "lstrip": false,
1110
+ "normalized": false,
1111
+ "rstrip": false,
1112
+ "single_word": false,
1113
+ "special": true
1114
+ },
1115
+ "128139": {
1116
+ "content": "<|reserved_special_token_131|>",
1117
+ "lstrip": false,
1118
+ "normalized": false,
1119
+ "rstrip": false,
1120
+ "single_word": false,
1121
+ "special": true
1122
+ },
1123
+ "128140": {
1124
+ "content": "<|reserved_special_token_132|>",
1125
+ "lstrip": false,
1126
+ "normalized": false,
1127
+ "rstrip": false,
1128
+ "single_word": false,
1129
+ "special": true
1130
+ },
1131
+ "128141": {
1132
+ "content": "<|reserved_special_token_133|>",
1133
+ "lstrip": false,
1134
+ "normalized": false,
1135
+ "rstrip": false,
1136
+ "single_word": false,
1137
+ "special": true
1138
+ },
1139
+ "128142": {
1140
+ "content": "<|reserved_special_token_134|>",
1141
+ "lstrip": false,
1142
+ "normalized": false,
1143
+ "rstrip": false,
1144
+ "single_word": false,
1145
+ "special": true
1146
+ },
1147
+ "128143": {
1148
+ "content": "<|reserved_special_token_135|>",
1149
+ "lstrip": false,
1150
+ "normalized": false,
1151
+ "rstrip": false,
1152
+ "single_word": false,
1153
+ "special": true
1154
+ },
1155
+ "128144": {
1156
+ "content": "<|reserved_special_token_136|>",
1157
+ "lstrip": false,
1158
+ "normalized": false,
1159
+ "rstrip": false,
1160
+ "single_word": false,
1161
+ "special": true
1162
+ },
1163
+ "128145": {
1164
+ "content": "<|reserved_special_token_137|>",
1165
+ "lstrip": false,
1166
+ "normalized": false,
1167
+ "rstrip": false,
1168
+ "single_word": false,
1169
+ "special": true
1170
+ },
1171
+ "128146": {
1172
+ "content": "<|reserved_special_token_138|>",
1173
+ "lstrip": false,
1174
+ "normalized": false,
1175
+ "rstrip": false,
1176
+ "single_word": false,
1177
+ "special": true
1178
+ },
1179
+ "128147": {
1180
+ "content": "<|reserved_special_token_139|>",
1181
+ "lstrip": false,
1182
+ "normalized": false,
1183
+ "rstrip": false,
1184
+ "single_word": false,
1185
+ "special": true
1186
+ },
1187
+ "128148": {
1188
+ "content": "<|reserved_special_token_140|>",
1189
+ "lstrip": false,
1190
+ "normalized": false,
1191
+ "rstrip": false,
1192
+ "single_word": false,
1193
+ "special": true
1194
+ },
1195
+ "128149": {
1196
+ "content": "<|reserved_special_token_141|>",
1197
+ "lstrip": false,
1198
+ "normalized": false,
1199
+ "rstrip": false,
1200
+ "single_word": false,
1201
+ "special": true
1202
+ },
1203
+ "128150": {
1204
+ "content": "<|reserved_special_token_142|>",
1205
+ "lstrip": false,
1206
+ "normalized": false,
1207
+ "rstrip": false,
1208
+ "single_word": false,
1209
+ "special": true
1210
+ },
1211
+ "128151": {
1212
+ "content": "<|reserved_special_token_143|>",
1213
+ "lstrip": false,
1214
+ "normalized": false,
1215
+ "rstrip": false,
1216
+ "single_word": false,
1217
+ "special": true
1218
+ },
1219
+ "128152": {
1220
+ "content": "<|reserved_special_token_144|>",
1221
+ "lstrip": false,
1222
+ "normalized": false,
1223
+ "rstrip": false,
1224
+ "single_word": false,
1225
+ "special": true
1226
+ },
1227
+ "128153": {
1228
+ "content": "<|reserved_special_token_145|>",
1229
+ "lstrip": false,
1230
+ "normalized": false,
1231
+ "rstrip": false,
1232
+ "single_word": false,
1233
+ "special": true
1234
+ },
1235
+ "128154": {
1236
+ "content": "<|reserved_special_token_146|>",
1237
+ "lstrip": false,
1238
+ "normalized": false,
1239
+ "rstrip": false,
1240
+ "single_word": false,
1241
+ "special": true
1242
+ },
1243
+ "128155": {
1244
+ "content": "<|reserved_special_token_147|>",
1245
+ "lstrip": false,
1246
+ "normalized": false,
1247
+ "rstrip": false,
1248
+ "single_word": false,
1249
+ "special": true
1250
+ },
1251
+ "128156": {
1252
+ "content": "<|reserved_special_token_148|>",
1253
+ "lstrip": false,
1254
+ "normalized": false,
1255
+ "rstrip": false,
1256
+ "single_word": false,
1257
+ "special": true
1258
+ },
1259
+ "128157": {
1260
+ "content": "<|reserved_special_token_149|>",
1261
+ "lstrip": false,
1262
+ "normalized": false,
1263
+ "rstrip": false,
1264
+ "single_word": false,
1265
+ "special": true
1266
+ },
1267
+ "128158": {
1268
+ "content": "<|reserved_special_token_150|>",
1269
+ "lstrip": false,
1270
+ "normalized": false,
1271
+ "rstrip": false,
1272
+ "single_word": false,
1273
+ "special": true
1274
+ },
1275
+ "128159": {
1276
+ "content": "<|reserved_special_token_151|>",
1277
+ "lstrip": false,
1278
+ "normalized": false,
1279
+ "rstrip": false,
1280
+ "single_word": false,
1281
+ "special": true
1282
+ },
1283
+ "128160": {
1284
+ "content": "<|reserved_special_token_152|>",
1285
+ "lstrip": false,
1286
+ "normalized": false,
1287
+ "rstrip": false,
1288
+ "single_word": false,
1289
+ "special": true
1290
+ },
1291
+ "128161": {
1292
+ "content": "<|reserved_special_token_153|>",
1293
+ "lstrip": false,
1294
+ "normalized": false,
1295
+ "rstrip": false,
1296
+ "single_word": false,
1297
+ "special": true
1298
+ },
1299
+ "128162": {
1300
+ "content": "<|reserved_special_token_154|>",
1301
+ "lstrip": false,
1302
+ "normalized": false,
1303
+ "rstrip": false,
1304
+ "single_word": false,
1305
+ "special": true
1306
+ },
1307
+ "128163": {
1308
+ "content": "<|reserved_special_token_155|>",
1309
+ "lstrip": false,
1310
+ "normalized": false,
1311
+ "rstrip": false,
1312
+ "single_word": false,
1313
+ "special": true
1314
+ },
1315
+ "128164": {
1316
+ "content": "<|reserved_special_token_156|>",
1317
+ "lstrip": false,
1318
+ "normalized": false,
1319
+ "rstrip": false,
1320
+ "single_word": false,
1321
+ "special": true
1322
+ },
1323
+ "128165": {
1324
+ "content": "<|reserved_special_token_157|>",
1325
+ "lstrip": false,
1326
+ "normalized": false,
1327
+ "rstrip": false,
1328
+ "single_word": false,
1329
+ "special": true
1330
+ },
1331
+ "128166": {
1332
+ "content": "<|reserved_special_token_158|>",
1333
+ "lstrip": false,
1334
+ "normalized": false,
1335
+ "rstrip": false,
1336
+ "single_word": false,
1337
+ "special": true
1338
+ },
1339
+ "128167": {
1340
+ "content": "<|reserved_special_token_159|>",
1341
+ "lstrip": false,
1342
+ "normalized": false,
1343
+ "rstrip": false,
1344
+ "single_word": false,
1345
+ "special": true
1346
+ },
1347
+ "128168": {
1348
+ "content": "<|reserved_special_token_160|>",
1349
+ "lstrip": false,
1350
+ "normalized": false,
1351
+ "rstrip": false,
1352
+ "single_word": false,
1353
+ "special": true
1354
+ },
1355
+ "128169": {
1356
+ "content": "<|reserved_special_token_161|>",
1357
+ "lstrip": false,
1358
+ "normalized": false,
1359
+ "rstrip": false,
1360
+ "single_word": false,
1361
+ "special": true
1362
+ },
1363
+ "128170": {
1364
+ "content": "<|reserved_special_token_162|>",
1365
+ "lstrip": false,
1366
+ "normalized": false,
1367
+ "rstrip": false,
1368
+ "single_word": false,
1369
+ "special": true
1370
+ },
1371
+ "128171": {
1372
+ "content": "<|reserved_special_token_163|>",
1373
+ "lstrip": false,
1374
+ "normalized": false,
1375
+ "rstrip": false,
1376
+ "single_word": false,
1377
+ "special": true
1378
+ },
1379
+ "128172": {
1380
+ "content": "<|reserved_special_token_164|>",
1381
+ "lstrip": false,
1382
+ "normalized": false,
1383
+ "rstrip": false,
1384
+ "single_word": false,
1385
+ "special": true
1386
+ },
1387
+ "128173": {
1388
+ "content": "<|reserved_special_token_165|>",
1389
+ "lstrip": false,
1390
+ "normalized": false,
1391
+ "rstrip": false,
1392
+ "single_word": false,
1393
+ "special": true
1394
+ },
1395
+ "128174": {
1396
+ "content": "<|reserved_special_token_166|>",
1397
+ "lstrip": false,
1398
+ "normalized": false,
1399
+ "rstrip": false,
1400
+ "single_word": false,
1401
+ "special": true
1402
+ },
1403
+ "128175": {
1404
+ "content": "<|reserved_special_token_167|>",
1405
+ "lstrip": false,
1406
+ "normalized": false,
1407
+ "rstrip": false,
1408
+ "single_word": false,
1409
+ "special": true
1410
+ },
1411
+ "128176": {
1412
+ "content": "<|reserved_special_token_168|>",
1413
+ "lstrip": false,
1414
+ "normalized": false,
1415
+ "rstrip": false,
1416
+ "single_word": false,
1417
+ "special": true
1418
+ },
1419
+ "128177": {
1420
+ "content": "<|reserved_special_token_169|>",
1421
+ "lstrip": false,
1422
+ "normalized": false,
1423
+ "rstrip": false,
1424
+ "single_word": false,
1425
+ "special": true
1426
+ },
1427
+ "128178": {
1428
+ "content": "<|reserved_special_token_170|>",
1429
+ "lstrip": false,
1430
+ "normalized": false,
1431
+ "rstrip": false,
1432
+ "single_word": false,
1433
+ "special": true
1434
+ },
1435
+ "128179": {
1436
+ "content": "<|reserved_special_token_171|>",
1437
+ "lstrip": false,
1438
+ "normalized": false,
1439
+ "rstrip": false,
1440
+ "single_word": false,
1441
+ "special": true
1442
+ },
1443
+ "128180": {
1444
+ "content": "<|reserved_special_token_172|>",
1445
+ "lstrip": false,
1446
+ "normalized": false,
1447
+ "rstrip": false,
1448
+ "single_word": false,
1449
+ "special": true
1450
+ },
1451
+ "128181": {
1452
+ "content": "<|reserved_special_token_173|>",
1453
+ "lstrip": false,
1454
+ "normalized": false,
1455
+ "rstrip": false,
1456
+ "single_word": false,
1457
+ "special": true
1458
+ },
1459
+ "128182": {
1460
+ "content": "<|reserved_special_token_174|>",
1461
+ "lstrip": false,
1462
+ "normalized": false,
1463
+ "rstrip": false,
1464
+ "single_word": false,
1465
+ "special": true
1466
+ },
1467
+ "128183": {
1468
+ "content": "<|reserved_special_token_175|>",
1469
+ "lstrip": false,
1470
+ "normalized": false,
1471
+ "rstrip": false,
1472
+ "single_word": false,
1473
+ "special": true
1474
+ },
1475
+ "128184": {
1476
+ "content": "<|reserved_special_token_176|>",
1477
+ "lstrip": false,
1478
+ "normalized": false,
1479
+ "rstrip": false,
1480
+ "single_word": false,
1481
+ "special": true
1482
+ },
1483
+ "128185": {
1484
+ "content": "<|reserved_special_token_177|>",
1485
+ "lstrip": false,
1486
+ "normalized": false,
1487
+ "rstrip": false,
1488
+ "single_word": false,
1489
+ "special": true
1490
+ },
1491
+ "128186": {
1492
+ "content": "<|reserved_special_token_178|>",
1493
+ "lstrip": false,
1494
+ "normalized": false,
1495
+ "rstrip": false,
1496
+ "single_word": false,
1497
+ "special": true
1498
+ },
1499
+ "128187": {
1500
+ "content": "<|reserved_special_token_179|>",
1501
+ "lstrip": false,
1502
+ "normalized": false,
1503
+ "rstrip": false,
1504
+ "single_word": false,
1505
+ "special": true
1506
+ },
1507
+ "128188": {
1508
+ "content": "<|reserved_special_token_180|>",
1509
+ "lstrip": false,
1510
+ "normalized": false,
1511
+ "rstrip": false,
1512
+ "single_word": false,
1513
+ "special": true
1514
+ },
1515
+ "128189": {
1516
+ "content": "<|reserved_special_token_181|>",
1517
+ "lstrip": false,
1518
+ "normalized": false,
1519
+ "rstrip": false,
1520
+ "single_word": false,
1521
+ "special": true
1522
+ },
1523
+ "128190": {
1524
+ "content": "<|reserved_special_token_182|>",
1525
+ "lstrip": false,
1526
+ "normalized": false,
1527
+ "rstrip": false,
1528
+ "single_word": false,
1529
+ "special": true
1530
+ },
1531
+ "128191": {
1532
+ "content": "<|reserved_special_token_183|>",
1533
+ "lstrip": false,
1534
+ "normalized": false,
1535
+ "rstrip": false,
1536
+ "single_word": false,
1537
+ "special": true
1538
+ },
1539
+ "128192": {
1540
+ "content": "<|reserved_special_token_184|>",
1541
+ "lstrip": false,
1542
+ "normalized": false,
1543
+ "rstrip": false,
1544
+ "single_word": false,
1545
+ "special": true
1546
+ },
1547
+ "128193": {
1548
+ "content": "<|reserved_special_token_185|>",
1549
+ "lstrip": false,
1550
+ "normalized": false,
1551
+ "rstrip": false,
1552
+ "single_word": false,
1553
+ "special": true
1554
+ },
1555
+ "128194": {
1556
+ "content": "<|reserved_special_token_186|>",
1557
+ "lstrip": false,
1558
+ "normalized": false,
1559
+ "rstrip": false,
1560
+ "single_word": false,
1561
+ "special": true
1562
+ },
1563
+ "128195": {
1564
+ "content": "<|reserved_special_token_187|>",
1565
+ "lstrip": false,
1566
+ "normalized": false,
1567
+ "rstrip": false,
1568
+ "single_word": false,
1569
+ "special": true
1570
+ },
1571
+ "128196": {
1572
+ "content": "<|reserved_special_token_188|>",
1573
+ "lstrip": false,
1574
+ "normalized": false,
1575
+ "rstrip": false,
1576
+ "single_word": false,
1577
+ "special": true
1578
+ },
1579
+ "128197": {
1580
+ "content": "<|reserved_special_token_189|>",
1581
+ "lstrip": false,
1582
+ "normalized": false,
1583
+ "rstrip": false,
1584
+ "single_word": false,
1585
+ "special": true
1586
+ },
1587
+ "128198": {
1588
+ "content": "<|reserved_special_token_190|>",
1589
+ "lstrip": false,
1590
+ "normalized": false,
1591
+ "rstrip": false,
1592
+ "single_word": false,
1593
+ "special": true
1594
+ },
1595
+ "128199": {
1596
+ "content": "<|reserved_special_token_191|>",
1597
+ "lstrip": false,
1598
+ "normalized": false,
1599
+ "rstrip": false,
1600
+ "single_word": false,
1601
+ "special": true
1602
+ },
1603
+ "128200": {
1604
+ "content": "<|reserved_special_token_192|>",
1605
+ "lstrip": false,
1606
+ "normalized": false,
1607
+ "rstrip": false,
1608
+ "single_word": false,
1609
+ "special": true
1610
+ },
1611
+ "128201": {
1612
+ "content": "<|reserved_special_token_193|>",
1613
+ "lstrip": false,
1614
+ "normalized": false,
1615
+ "rstrip": false,
1616
+ "single_word": false,
1617
+ "special": true
1618
+ },
1619
+ "128202": {
1620
+ "content": "<|reserved_special_token_194|>",
1621
+ "lstrip": false,
1622
+ "normalized": false,
1623
+ "rstrip": false,
1624
+ "single_word": false,
1625
+ "special": true
1626
+ },
1627
+ "128203": {
1628
+ "content": "<|reserved_special_token_195|>",
1629
+ "lstrip": false,
1630
+ "normalized": false,
1631
+ "rstrip": false,
1632
+ "single_word": false,
1633
+ "special": true
1634
+ },
1635
+ "128204": {
1636
+ "content": "<|reserved_special_token_196|>",
1637
+ "lstrip": false,
1638
+ "normalized": false,
1639
+ "rstrip": false,
1640
+ "single_word": false,
1641
+ "special": true
1642
+ },
1643
+ "128205": {
1644
+ "content": "<|reserved_special_token_197|>",
1645
+ "lstrip": false,
1646
+ "normalized": false,
1647
+ "rstrip": false,
1648
+ "single_word": false,
1649
+ "special": true
1650
+ },
1651
+ "128206": {
1652
+ "content": "<|reserved_special_token_198|>",
1653
+ "lstrip": false,
1654
+ "normalized": false,
1655
+ "rstrip": false,
1656
+ "single_word": false,
1657
+ "special": true
1658
+ },
1659
+ "128207": {
1660
+ "content": "<|reserved_special_token_199|>",
1661
+ "lstrip": false,
1662
+ "normalized": false,
1663
+ "rstrip": false,
1664
+ "single_word": false,
1665
+ "special": true
1666
+ },
1667
+ "128208": {
1668
+ "content": "<|reserved_special_token_200|>",
1669
+ "lstrip": false,
1670
+ "normalized": false,
1671
+ "rstrip": false,
1672
+ "single_word": false,
1673
+ "special": true
1674
+ },
1675
+ "128209": {
1676
+ "content": "<|reserved_special_token_201|>",
1677
+ "lstrip": false,
1678
+ "normalized": false,
1679
+ "rstrip": false,
1680
+ "single_word": false,
1681
+ "special": true
1682
+ },
1683
+ "128210": {
1684
+ "content": "<|reserved_special_token_202|>",
1685
+ "lstrip": false,
1686
+ "normalized": false,
1687
+ "rstrip": false,
1688
+ "single_word": false,
1689
+ "special": true
1690
+ },
1691
+ "128211": {
1692
+ "content": "<|reserved_special_token_203|>",
1693
+ "lstrip": false,
1694
+ "normalized": false,
1695
+ "rstrip": false,
1696
+ "single_word": false,
1697
+ "special": true
1698
+ },
1699
+ "128212": {
1700
+ "content": "<|reserved_special_token_204|>",
1701
+ "lstrip": false,
1702
+ "normalized": false,
1703
+ "rstrip": false,
1704
+ "single_word": false,
1705
+ "special": true
1706
+ },
1707
+ "128213": {
1708
+ "content": "<|reserved_special_token_205|>",
1709
+ "lstrip": false,
1710
+ "normalized": false,
1711
+ "rstrip": false,
1712
+ "single_word": false,
1713
+ "special": true
1714
+ },
1715
+ "128214": {
1716
+ "content": "<|reserved_special_token_206|>",
1717
+ "lstrip": false,
1718
+ "normalized": false,
1719
+ "rstrip": false,
1720
+ "single_word": false,
1721
+ "special": true
1722
+ },
1723
+ "128215": {
1724
+ "content": "<|reserved_special_token_207|>",
1725
+ "lstrip": false,
1726
+ "normalized": false,
1727
+ "rstrip": false,
1728
+ "single_word": false,
1729
+ "special": true
1730
+ },
1731
+ "128216": {
1732
+ "content": "<|reserved_special_token_208|>",
1733
+ "lstrip": false,
1734
+ "normalized": false,
1735
+ "rstrip": false,
1736
+ "single_word": false,
1737
+ "special": true
1738
+ },
1739
+ "128217": {
1740
+ "content": "<|reserved_special_token_209|>",
1741
+ "lstrip": false,
1742
+ "normalized": false,
1743
+ "rstrip": false,
1744
+ "single_word": false,
1745
+ "special": true
1746
+ },
1747
+ "128218": {
1748
+ "content": "<|reserved_special_token_210|>",
1749
+ "lstrip": false,
1750
+ "normalized": false,
1751
+ "rstrip": false,
1752
+ "single_word": false,
1753
+ "special": true
1754
+ },
1755
+ "128219": {
1756
+ "content": "<|reserved_special_token_211|>",
1757
+ "lstrip": false,
1758
+ "normalized": false,
1759
+ "rstrip": false,
1760
+ "single_word": false,
1761
+ "special": true
1762
+ },
1763
+ "128220": {
1764
+ "content": "<|reserved_special_token_212|>",
1765
+ "lstrip": false,
1766
+ "normalized": false,
1767
+ "rstrip": false,
1768
+ "single_word": false,
1769
+ "special": true
1770
+ },
1771
+ "128221": {
1772
+ "content": "<|reserved_special_token_213|>",
1773
+ "lstrip": false,
1774
+ "normalized": false,
1775
+ "rstrip": false,
1776
+ "single_word": false,
1777
+ "special": true
1778
+ },
1779
+ "128222": {
1780
+ "content": "<|reserved_special_token_214|>",
1781
+ "lstrip": false,
1782
+ "normalized": false,
1783
+ "rstrip": false,
1784
+ "single_word": false,
1785
+ "special": true
1786
+ },
1787
+ "128223": {
1788
+ "content": "<|reserved_special_token_215|>",
1789
+ "lstrip": false,
1790
+ "normalized": false,
1791
+ "rstrip": false,
1792
+ "single_word": false,
1793
+ "special": true
1794
+ },
1795
+ "128224": {
1796
+ "content": "<|reserved_special_token_216|>",
1797
+ "lstrip": false,
1798
+ "normalized": false,
1799
+ "rstrip": false,
1800
+ "single_word": false,
1801
+ "special": true
1802
+ },
1803
+ "128225": {
1804
+ "content": "<|reserved_special_token_217|>",
1805
+ "lstrip": false,
1806
+ "normalized": false,
1807
+ "rstrip": false,
1808
+ "single_word": false,
1809
+ "special": true
1810
+ },
1811
+ "128226": {
1812
+ "content": "<|reserved_special_token_218|>",
1813
+ "lstrip": false,
1814
+ "normalized": false,
1815
+ "rstrip": false,
1816
+ "single_word": false,
1817
+ "special": true
1818
+ },
1819
+ "128227": {
1820
+ "content": "<|reserved_special_token_219|>",
1821
+ "lstrip": false,
1822
+ "normalized": false,
1823
+ "rstrip": false,
1824
+ "single_word": false,
1825
+ "special": true
1826
+ },
1827
+ "128228": {
1828
+ "content": "<|reserved_special_token_220|>",
1829
+ "lstrip": false,
1830
+ "normalized": false,
1831
+ "rstrip": false,
1832
+ "single_word": false,
1833
+ "special": true
1834
+ },
1835
+ "128229": {
1836
+ "content": "<|reserved_special_token_221|>",
1837
+ "lstrip": false,
1838
+ "normalized": false,
1839
+ "rstrip": false,
1840
+ "single_word": false,
1841
+ "special": true
1842
+ },
1843
+ "128230": {
1844
+ "content": "<|reserved_special_token_222|>",
1845
+ "lstrip": false,
1846
+ "normalized": false,
1847
+ "rstrip": false,
1848
+ "single_word": false,
1849
+ "special": true
1850
+ },
1851
+ "128231": {
1852
+ "content": "<|reserved_special_token_223|>",
1853
+ "lstrip": false,
1854
+ "normalized": false,
1855
+ "rstrip": false,
1856
+ "single_word": false,
1857
+ "special": true
1858
+ },
1859
+ "128232": {
1860
+ "content": "<|reserved_special_token_224|>",
1861
+ "lstrip": false,
1862
+ "normalized": false,
1863
+ "rstrip": false,
1864
+ "single_word": false,
1865
+ "special": true
1866
+ },
1867
+ "128233": {
1868
+ "content": "<|reserved_special_token_225|>",
1869
+ "lstrip": false,
1870
+ "normalized": false,
1871
+ "rstrip": false,
1872
+ "single_word": false,
1873
+ "special": true
1874
+ },
1875
+ "128234": {
1876
+ "content": "<|reserved_special_token_226|>",
1877
+ "lstrip": false,
1878
+ "normalized": false,
1879
+ "rstrip": false,
1880
+ "single_word": false,
1881
+ "special": true
1882
+ },
1883
+ "128235": {
1884
+ "content": "<|reserved_special_token_227|>",
1885
+ "lstrip": false,
1886
+ "normalized": false,
1887
+ "rstrip": false,
1888
+ "single_word": false,
1889
+ "special": true
1890
+ },
1891
+ "128236": {
1892
+ "content": "<|reserved_special_token_228|>",
1893
+ "lstrip": false,
1894
+ "normalized": false,
1895
+ "rstrip": false,
1896
+ "single_word": false,
1897
+ "special": true
1898
+ },
1899
+ "128237": {
1900
+ "content": "<|reserved_special_token_229|>",
1901
+ "lstrip": false,
1902
+ "normalized": false,
1903
+ "rstrip": false,
1904
+ "single_word": false,
1905
+ "special": true
1906
+ },
1907
+ "128238": {
1908
+ "content": "<|reserved_special_token_230|>",
1909
+ "lstrip": false,
1910
+ "normalized": false,
1911
+ "rstrip": false,
1912
+ "single_word": false,
1913
+ "special": true
1914
+ },
1915
+ "128239": {
1916
+ "content": "<|reserved_special_token_231|>",
1917
+ "lstrip": false,
1918
+ "normalized": false,
1919
+ "rstrip": false,
1920
+ "single_word": false,
1921
+ "special": true
1922
+ },
1923
+ "128240": {
1924
+ "content": "<|reserved_special_token_232|>",
1925
+ "lstrip": false,
1926
+ "normalized": false,
1927
+ "rstrip": false,
1928
+ "single_word": false,
1929
+ "special": true
1930
+ },
1931
+ "128241": {
1932
+ "content": "<|reserved_special_token_233|>",
1933
+ "lstrip": false,
1934
+ "normalized": false,
1935
+ "rstrip": false,
1936
+ "single_word": false,
1937
+ "special": true
1938
+ },
1939
+ "128242": {
1940
+ "content": "<|reserved_special_token_234|>",
1941
+ "lstrip": false,
1942
+ "normalized": false,
1943
+ "rstrip": false,
1944
+ "single_word": false,
1945
+ "special": true
1946
+ },
1947
+ "128243": {
1948
+ "content": "<|reserved_special_token_235|>",
1949
+ "lstrip": false,
1950
+ "normalized": false,
1951
+ "rstrip": false,
1952
+ "single_word": false,
1953
+ "special": true
1954
+ },
1955
+ "128244": {
1956
+ "content": "<|reserved_special_token_236|>",
1957
+ "lstrip": false,
1958
+ "normalized": false,
1959
+ "rstrip": false,
1960
+ "single_word": false,
1961
+ "special": true
1962
+ },
1963
+ "128245": {
1964
+ "content": "<|reserved_special_token_237|>",
1965
+ "lstrip": false,
1966
+ "normalized": false,
1967
+ "rstrip": false,
1968
+ "single_word": false,
1969
+ "special": true
1970
+ },
1971
+ "128246": {
1972
+ "content": "<|reserved_special_token_238|>",
1973
+ "lstrip": false,
1974
+ "normalized": false,
1975
+ "rstrip": false,
1976
+ "single_word": false,
1977
+ "special": true
1978
+ },
1979
+ "128247": {
1980
+ "content": "<|reserved_special_token_239|>",
1981
+ "lstrip": false,
1982
+ "normalized": false,
1983
+ "rstrip": false,
1984
+ "single_word": false,
1985
+ "special": true
1986
+ },
1987
+ "128248": {
1988
+ "content": "<|reserved_special_token_240|>",
1989
+ "lstrip": false,
1990
+ "normalized": false,
1991
+ "rstrip": false,
1992
+ "single_word": false,
1993
+ "special": true
1994
+ },
1995
+ "128249": {
1996
+ "content": "<|reserved_special_token_241|>",
1997
+ "lstrip": false,
1998
+ "normalized": false,
1999
+ "rstrip": false,
2000
+ "single_word": false,
2001
+ "special": true
2002
+ },
2003
+ "128250": {
2004
+ "content": "<|reserved_special_token_242|>",
2005
+ "lstrip": false,
2006
+ "normalized": false,
2007
+ "rstrip": false,
2008
+ "single_word": false,
2009
+ "special": true
2010
+ },
2011
+ "128251": {
2012
+ "content": "<|reserved_special_token_243|>",
2013
+ "lstrip": false,
2014
+ "normalized": false,
2015
+ "rstrip": false,
2016
+ "single_word": false,
2017
+ "special": true
2018
+ },
2019
+ "128252": {
2020
+ "content": "<|reserved_special_token_244|>",
2021
+ "lstrip": false,
2022
+ "normalized": false,
2023
+ "rstrip": false,
2024
+ "single_word": false,
2025
+ "special": true
2026
+ },
2027
+ "128253": {
2028
+ "content": "<|reserved_special_token_245|>",
2029
+ "lstrip": false,
2030
+ "normalized": false,
2031
+ "rstrip": false,
2032
+ "single_word": false,
2033
+ "special": true
2034
+ },
2035
+ "128254": {
2036
+ "content": "<|reserved_special_token_246|>",
2037
+ "lstrip": false,
2038
+ "normalized": false,
2039
+ "rstrip": false,
2040
+ "single_word": false,
2041
+ "special": true
2042
+ },
2043
+ "128255": {
2044
+ "content": "<|reserved_special_token_247|>",
2045
+ "lstrip": false,
2046
+ "normalized": false,
2047
+ "rstrip": false,
2048
+ "single_word": false,
2049
+ "special": true
2050
+ },
2051
+ "128256": {
2052
+ "content": "[PAD]",
2053
+ "lstrip": false,
2054
+ "normalized": false,
2055
+ "rstrip": false,
2056
+ "single_word": false,
2057
+ "special": true
2058
+ },
2059
+ "128257": {
2060
+ "content": "🅁",
2061
+ "lstrip": true,
2062
+ "normalized": true,
2063
+ "rstrip": false,
2064
+ "single_word": false,
2065
+ "special": false
2066
+ }
2067
+ },
2068
+ "bos_token": "<|begin_of_text|>",
2069
+ "clean_up_tokenization_spaces": true,
2070
+ "eos_token": "<|end_of_text|>",
2071
+ "model_input_names": [
2072
+ "input_ids",
2073
+ "attention_mask"
2074
+ ],
2075
+ "model_max_length": 131072,
2076
+ "pad_token": "[PAD]",
2077
+ "tokenizer_class": "PreTrainedTokenizerFast"
2078
+ }
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1500/trainer_state.json ADDED
@@ -0,0 +1,103 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 7.18562874251497,
5
+ "eval_steps": 500,
6
+ "global_step": 1500,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.718562874251497,
13
+ "grad_norm": 0.661852240562439,
14
+ "learning_rate": 4.6899038461538465e-05,
15
+ "loss": 2.1718,
16
+ "step": 150
17
+ },
18
+ {
19
+ "epoch": 1.437125748502994,
20
+ "grad_norm": 0.7592059373855591,
21
+ "learning_rate": 4.329326923076924e-05,
22
+ "loss": 1.5828,
23
+ "step": 300
24
+ },
25
+ {
26
+ "epoch": 2.155688622754491,
27
+ "grad_norm": 0.6453707218170166,
28
+ "learning_rate": 3.96875e-05,
29
+ "loss": 1.478,
30
+ "step": 450
31
+ },
32
+ {
33
+ "epoch": 2.874251497005988,
34
+ "grad_norm": 0.6175025105476379,
35
+ "learning_rate": 3.608173076923077e-05,
36
+ "loss": 1.4098,
37
+ "step": 600
38
+ },
39
+ {
40
+ "epoch": 3.592814371257485,
41
+ "grad_norm": 0.7075008749961853,
42
+ "learning_rate": 3.247596153846154e-05,
43
+ "loss": 1.3483,
44
+ "step": 750
45
+ },
46
+ {
47
+ "epoch": 4.311377245508982,
48
+ "grad_norm": 0.8413554430007935,
49
+ "learning_rate": 2.8870192307692306e-05,
50
+ "loss": 1.3009,
51
+ "step": 900
52
+ },
53
+ {
54
+ "epoch": 5.029940119760479,
55
+ "grad_norm": 0.9734054207801819,
56
+ "learning_rate": 2.5264423076923078e-05,
57
+ "loss": 1.2617,
58
+ "step": 1050
59
+ },
60
+ {
61
+ "epoch": 5.748502994011976,
62
+ "grad_norm": 0.8429175019264221,
63
+ "learning_rate": 2.1658653846153847e-05,
64
+ "loss": 1.1814,
65
+ "step": 1200
66
+ },
67
+ {
68
+ "epoch": 6.467065868263473,
69
+ "grad_norm": 0.9255201816558838,
70
+ "learning_rate": 1.8052884615384616e-05,
71
+ "loss": 1.1207,
72
+ "step": 1350
73
+ },
74
+ {
75
+ "epoch": 7.18562874251497,
76
+ "grad_norm": 1.2465689182281494,
77
+ "learning_rate": 1.4447115384615385e-05,
78
+ "loss": 1.0608,
79
+ "step": 1500
80
+ }
81
+ ],
82
+ "logging_steps": 150,
83
+ "max_steps": 2080,
84
+ "num_input_tokens_seen": 0,
85
+ "num_train_epochs": 10,
86
+ "save_steps": 150,
87
+ "stateful_callbacks": {
88
+ "TrainerControl": {
89
+ "args": {
90
+ "should_epoch_stop": false,
91
+ "should_evaluate": false,
92
+ "should_log": false,
93
+ "should_save": true,
94
+ "should_training_stop": false
95
+ },
96
+ "attributes": {}
97
+ }
98
+ },
99
+ "total_flos": 2.1821163744893338e+18,
100
+ "train_batch_size": 3,
101
+ "trial_name": null,
102
+ "trial_params": null
103
+ }
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1500/zero_to_fp32.py ADDED
@@ -0,0 +1,604 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+
3
+ # Copyright (c) Microsoft Corporation.
4
+ # SPDX-License-Identifier: Apache-2.0
5
+
6
+ # DeepSpeed Team
7
+
8
+ # This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets
9
+ # copied into the top level checkpoint dir, so the user can easily do the conversion at any point in
10
+ # the future. Once extracted, the weights don't require DeepSpeed and can be used in any
11
+ # application.
12
+ #
13
+ # example: python zero_to_fp32.py . pytorch_model.bin
14
+
15
+ import argparse
16
+ import torch
17
+ import glob
18
+ import math
19
+ import os
20
+ import re
21
+ from collections import OrderedDict
22
+ from dataclasses import dataclass
23
+
24
+ # while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with
25
+ # DeepSpeed data structures it has to be available in the current python environment.
26
+ from deepspeed.utils import logger
27
+ from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS,
28
+ FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES,
29
+ FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS)
30
+
31
+
32
+ @dataclass
33
+ class zero_model_state:
34
+ buffers: dict()
35
+ param_shapes: dict()
36
+ shared_params: list
37
+ ds_version: int
38
+ frozen_param_shapes: dict()
39
+ frozen_param_fragments: dict()
40
+
41
+
42
+ debug = 0
43
+
44
+ # load to cpu
45
+ device = torch.device('cpu')
46
+
47
+
48
+ def atoi(text):
49
+ return int(text) if text.isdigit() else text
50
+
51
+
52
+ def natural_keys(text):
53
+ '''
54
+ alist.sort(key=natural_keys) sorts in human order
55
+ http://nedbatchelder.com/blog/200712/human_sorting.html
56
+ (See Toothy's implementation in the comments)
57
+ '''
58
+ return [atoi(c) for c in re.split(r'(\d+)', text)]
59
+
60
+
61
+ def get_model_state_file(checkpoint_dir, zero_stage):
62
+ if not os.path.isdir(checkpoint_dir):
63
+ raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist")
64
+
65
+ # there should be only one file
66
+ if zero_stage <= 2:
67
+ file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt")
68
+ elif zero_stage == 3:
69
+ file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt")
70
+
71
+ if not os.path.exists(file):
72
+ raise FileNotFoundError(f"can't find model states file at '{file}'")
73
+
74
+ return file
75
+
76
+
77
+ def get_checkpoint_files(checkpoint_dir, glob_pattern):
78
+ # XXX: need to test that this simple glob rule works for multi-node setup too
79
+ ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys)
80
+
81
+ if len(ckpt_files) == 0:
82
+ raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'")
83
+
84
+ return ckpt_files
85
+
86
+
87
+ def get_optim_files(checkpoint_dir):
88
+ return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt")
89
+
90
+
91
+ def get_model_state_files(checkpoint_dir):
92
+ return get_checkpoint_files(checkpoint_dir, "*_model_states.pt")
93
+
94
+
95
+ def parse_model_states(files):
96
+ zero_model_states = []
97
+ for file in files:
98
+ state_dict = torch.load(file, map_location=device)
99
+
100
+ if BUFFER_NAMES not in state_dict:
101
+ raise ValueError(f"{file} is not a model state checkpoint")
102
+ buffer_names = state_dict[BUFFER_NAMES]
103
+ if debug:
104
+ print("Found buffers:", buffer_names)
105
+
106
+ # recover just the buffers while restoring them to fp32 if they were saved in fp16
107
+ buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names}
108
+ param_shapes = state_dict[PARAM_SHAPES]
109
+
110
+ # collect parameters that are included in param_shapes
111
+ param_names = []
112
+ for s in param_shapes:
113
+ for name in s.keys():
114
+ param_names.append(name)
115
+
116
+ # update with frozen parameters
117
+ frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None)
118
+ if frozen_param_shapes is not None:
119
+ if debug:
120
+ print(f"Found frozen_param_shapes: {frozen_param_shapes}")
121
+ param_names += list(frozen_param_shapes.keys())
122
+
123
+ # handle shared params
124
+ shared_params = [[k, v] for k, v in state_dict["shared_params"].items()]
125
+
126
+ ds_version = state_dict.get(DS_VERSION, None)
127
+
128
+ frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None)
129
+
130
+ z_model_state = zero_model_state(buffers=buffers,
131
+ param_shapes=param_shapes,
132
+ shared_params=shared_params,
133
+ ds_version=ds_version,
134
+ frozen_param_shapes=frozen_param_shapes,
135
+ frozen_param_fragments=frozen_param_fragments)
136
+ zero_model_states.append(z_model_state)
137
+
138
+ return zero_model_states
139
+
140
+
141
+ def parse_optim_states(files, ds_checkpoint_dir):
142
+
143
+ total_files = len(files)
144
+ state_dicts = []
145
+ for f in files:
146
+ state_dict = torch.load(f, map_location=device)
147
+ # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights
148
+ # and also handle the case where it was already removed by another helper script
149
+ state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None)
150
+ state_dicts.append(state_dict)
151
+
152
+ if not ZERO_STAGE in state_dicts[0][OPTIMIZER_STATE_DICT]:
153
+ raise ValueError(f"{files[0]} is not a zero checkpoint")
154
+ zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE]
155
+ world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT]
156
+
157
+ # For ZeRO-2 each param group can have different partition_count as data parallelism for expert
158
+ # parameters can be different from data parallelism for non-expert parameters. So we can just
159
+ # use the max of the partition_count to get the dp world_size.
160
+
161
+ if type(world_size) is list:
162
+ world_size = max(world_size)
163
+
164
+ if world_size != total_files:
165
+ raise ValueError(
166
+ f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. "
167
+ "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes."
168
+ )
169
+
170
+ # the groups are named differently in each stage
171
+ if zero_stage <= 2:
172
+ fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS
173
+ elif zero_stage == 3:
174
+ fp32_groups_key = FP32_FLAT_GROUPS
175
+ else:
176
+ raise ValueError(f"unknown zero stage {zero_stage}")
177
+
178
+ if zero_stage <= 2:
179
+ fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))]
180
+ elif zero_stage == 3:
181
+ # if there is more than one param group, there will be multiple flattened tensors - one
182
+ # flattened tensor per group - for simplicity merge them into a single tensor
183
+ #
184
+ # XXX: could make the script more memory efficient for when there are multiple groups - it
185
+ # will require matching the sub-lists of param_shapes for each param group flattened tensor
186
+
187
+ fp32_flat_groups = [
188
+ torch.cat(state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key], 0) for i in range(len(state_dicts))
189
+ ]
190
+
191
+ return zero_stage, world_size, fp32_flat_groups
192
+
193
+
194
+ def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters):
195
+ """
196
+ Returns fp32 state_dict reconstructed from ds checkpoint
197
+
198
+ Args:
199
+ - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are)
200
+
201
+ """
202
+ print(f"Processing zero checkpoint '{ds_checkpoint_dir}'")
203
+
204
+ optim_files = get_optim_files(ds_checkpoint_dir)
205
+ zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir)
206
+ print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}")
207
+
208
+ model_files = get_model_state_files(ds_checkpoint_dir)
209
+
210
+ zero_model_states = parse_model_states(model_files)
211
+ print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}')
212
+
213
+ if zero_stage <= 2:
214
+ return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
215
+ exclude_frozen_parameters)
216
+ elif zero_stage == 3:
217
+ return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
218
+ exclude_frozen_parameters)
219
+
220
+
221
+ def _zero2_merge_frozen_params(state_dict, zero_model_states):
222
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
223
+ return
224
+
225
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
226
+ frozen_param_fragments = zero_model_states[0].frozen_param_fragments
227
+
228
+ if debug:
229
+ num_elem = sum(s.numel() for s in frozen_param_shapes.values())
230
+ print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
231
+
232
+ wanted_params = len(frozen_param_shapes)
233
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
234
+ avail_numel = sum([p.numel() for p in frozen_param_fragments.values()])
235
+ print(f'Frozen params: Have {avail_numel} numels to process.')
236
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
237
+
238
+ total_params = 0
239
+ total_numel = 0
240
+ for name, shape in frozen_param_shapes.items():
241
+ total_params += 1
242
+ unpartitioned_numel = shape.numel()
243
+ total_numel += unpartitioned_numel
244
+
245
+ state_dict[name] = frozen_param_fragments[name]
246
+
247
+ if debug:
248
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
249
+
250
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
251
+
252
+
253
+ def _has_callable(obj, fn):
254
+ attr = getattr(obj, fn, None)
255
+ return callable(attr)
256
+
257
+
258
+ def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
259
+ param_shapes = zero_model_states[0].param_shapes
260
+
261
+ # Reconstruction protocol:
262
+ #
263
+ # XXX: document this
264
+
265
+ if debug:
266
+ for i in range(world_size):
267
+ for j in range(len(fp32_flat_groups[0])):
268
+ print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}")
269
+
270
+ # XXX: memory usage doubles here (zero2)
271
+ num_param_groups = len(fp32_flat_groups[0])
272
+ merged_single_partition_of_fp32_groups = []
273
+ for i in range(num_param_groups):
274
+ merged_partitions = [sd[i] for sd in fp32_flat_groups]
275
+ full_single_fp32_vector = torch.cat(merged_partitions, 0)
276
+ merged_single_partition_of_fp32_groups.append(full_single_fp32_vector)
277
+ avail_numel = sum(
278
+ [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups])
279
+
280
+ if debug:
281
+ wanted_params = sum([len(shapes) for shapes in param_shapes])
282
+ wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes])
283
+ # not asserting if there is a mismatch due to possible padding
284
+ print(f"Have {avail_numel} numels to process.")
285
+ print(f"Need {wanted_numel} numels in {wanted_params} params.")
286
+
287
+ # params
288
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
289
+ # out-of-core computing solution
290
+ total_numel = 0
291
+ total_params = 0
292
+ for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups):
293
+ offset = 0
294
+ avail_numel = full_single_fp32_vector.numel()
295
+ for name, shape in shapes.items():
296
+
297
+ unpartitioned_numel = shape.numel() if _has_callable(shape, 'numel') else math.prod(shape)
298
+ total_numel += unpartitioned_numel
299
+ total_params += 1
300
+
301
+ if debug:
302
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
303
+ state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape)
304
+ offset += unpartitioned_numel
305
+
306
+ # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and
307
+ # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex
308
+ # paddings performed in the code it's almost impossible to predict the exact numbers w/o the
309
+ # live optimizer object, so we are checking that the numbers are within the right range
310
+ align_to = 2 * world_size
311
+
312
+ def zero2_align(x):
313
+ return align_to * math.ceil(x / align_to)
314
+
315
+ if debug:
316
+ print(f"original offset={offset}, avail_numel={avail_numel}")
317
+
318
+ offset = zero2_align(offset)
319
+ avail_numel = zero2_align(avail_numel)
320
+
321
+ if debug:
322
+ print(f"aligned offset={offset}, avail_numel={avail_numel}")
323
+
324
+ # Sanity check
325
+ if offset != avail_numel:
326
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
327
+
328
+ print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements")
329
+
330
+
331
+ def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
332
+ exclude_frozen_parameters):
333
+ state_dict = OrderedDict()
334
+
335
+ # buffers
336
+ buffers = zero_model_states[0].buffers
337
+ state_dict.update(buffers)
338
+ if debug:
339
+ print(f"added {len(buffers)} buffers")
340
+
341
+ if not exclude_frozen_parameters:
342
+ _zero2_merge_frozen_params(state_dict, zero_model_states)
343
+
344
+ _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
345
+
346
+ # recover shared parameters
347
+ for pair in zero_model_states[0].shared_params:
348
+ if pair[1] in state_dict:
349
+ state_dict[pair[0]] = state_dict[pair[1]]
350
+
351
+ return state_dict
352
+
353
+
354
+ def zero3_partitioned_param_info(unpartitioned_numel, world_size):
355
+ remainder = unpartitioned_numel % world_size
356
+ padding_numel = (world_size - remainder) if remainder else 0
357
+ partitioned_numel = math.ceil(unpartitioned_numel / world_size)
358
+ return partitioned_numel, padding_numel
359
+
360
+
361
+ def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states):
362
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
363
+ return
364
+
365
+ if debug:
366
+ for i in range(world_size):
367
+ num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values())
368
+ print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
369
+
370
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
371
+ wanted_params = len(frozen_param_shapes)
372
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
373
+ avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size
374
+ print(f'Frozen params: Have {avail_numel} numels to process.')
375
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
376
+
377
+ total_params = 0
378
+ total_numel = 0
379
+ for name, shape in zero_model_states[0].frozen_param_shapes.items():
380
+ total_params += 1
381
+ unpartitioned_numel = shape.numel()
382
+ total_numel += unpartitioned_numel
383
+
384
+ param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states)
385
+ state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape)
386
+
387
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
388
+
389
+ if debug:
390
+ print(
391
+ f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
392
+ )
393
+
394
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
395
+
396
+
397
+ def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
398
+ param_shapes = zero_model_states[0].param_shapes
399
+ avail_numel = fp32_flat_groups[0].numel() * world_size
400
+ # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each
401
+ # param, re-consolidating each param, while dealing with padding if any
402
+
403
+ # merge list of dicts, preserving order
404
+ param_shapes = {k: v for d in param_shapes for k, v in d.items()}
405
+
406
+ if debug:
407
+ for i in range(world_size):
408
+ print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}")
409
+
410
+ wanted_params = len(param_shapes)
411
+ wanted_numel = sum(shape.numel() for shape in param_shapes.values())
412
+ # not asserting if there is a mismatch due to possible padding
413
+ avail_numel = fp32_flat_groups[0].numel() * world_size
414
+ print(f"Trainable params: Have {avail_numel} numels to process.")
415
+ print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.")
416
+
417
+ # params
418
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
419
+ # out-of-core computing solution
420
+ offset = 0
421
+ total_numel = 0
422
+ total_params = 0
423
+ for name, shape in param_shapes.items():
424
+
425
+ unpartitioned_numel = shape.numel()
426
+ total_numel += unpartitioned_numel
427
+ total_params += 1
428
+
429
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
430
+
431
+ if debug:
432
+ print(
433
+ f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
434
+ )
435
+
436
+ # XXX: memory usage doubles here
437
+ state_dict[name] = torch.cat(
438
+ tuple(fp32_flat_groups[i].narrow(0, offset, partitioned_numel) for i in range(world_size)),
439
+ 0).narrow(0, 0, unpartitioned_numel).view(shape)
440
+ offset += partitioned_numel
441
+
442
+ offset *= world_size
443
+
444
+ # Sanity check
445
+ if offset != avail_numel:
446
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
447
+
448
+ print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements")
449
+
450
+
451
+ def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
452
+ exclude_frozen_parameters):
453
+ state_dict = OrderedDict()
454
+
455
+ # buffers
456
+ buffers = zero_model_states[0].buffers
457
+ state_dict.update(buffers)
458
+ if debug:
459
+ print(f"added {len(buffers)} buffers")
460
+
461
+ if not exclude_frozen_parameters:
462
+ _zero3_merge_frozen_params(state_dict, world_size, zero_model_states)
463
+
464
+ _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
465
+
466
+ # recover shared parameters
467
+ for pair in zero_model_states[0].shared_params:
468
+ if pair[1] in state_dict:
469
+ state_dict[pair[0]] = state_dict[pair[1]]
470
+
471
+ return state_dict
472
+
473
+
474
+ def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag=None, exclude_frozen_parameters=False):
475
+ """
476
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with
477
+ ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example
478
+ via a model hub.
479
+
480
+ Args:
481
+ - ``checkpoint_dir``: path to the desired checkpoint folder
482
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14``
483
+ - ``exclude_frozen_parameters``: exclude frozen parameters
484
+
485
+ Returns:
486
+ - pytorch ``state_dict``
487
+
488
+ Note: this approach may not work if your application doesn't have sufficient free CPU memory and
489
+ you may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with
490
+ the checkpoint.
491
+
492
+ A typical usage might be ::
493
+
494
+ from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
495
+ # do the training and checkpoint saving
496
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu
497
+ model = model.cpu() # move to cpu
498
+ model.load_state_dict(state_dict)
499
+ # submit to model hub or save the model to share with others
500
+
501
+ In this example the ``model`` will no longer be usable in the deepspeed context of the same
502
+ application. i.e. you will need to re-initialize the deepspeed engine, since
503
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
504
+
505
+ If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead.
506
+
507
+ """
508
+ if tag is None:
509
+ latest_path = os.path.join(checkpoint_dir, 'latest')
510
+ if os.path.isfile(latest_path):
511
+ with open(latest_path, 'r') as fd:
512
+ tag = fd.read().strip()
513
+ else:
514
+ raise ValueError(f"Unable to find 'latest' file at {latest_path}")
515
+
516
+ ds_checkpoint_dir = os.path.join(checkpoint_dir, tag)
517
+
518
+ if not os.path.isdir(ds_checkpoint_dir):
519
+ raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist")
520
+
521
+ return _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters)
522
+
523
+
524
+ def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir, output_file, tag=None, exclude_frozen_parameters=False):
525
+ """
526
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be
527
+ loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed.
528
+
529
+ Args:
530
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
531
+ - ``output_file``: path to the pytorch fp32 state_dict output file (e.g. path/pytorch_model.bin)
532
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
533
+ - ``exclude_frozen_parameters``: exclude frozen parameters
534
+ """
535
+
536
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag, exclude_frozen_parameters)
537
+ print(f"Saving fp32 state dict to {output_file}")
538
+ torch.save(state_dict, output_file)
539
+
540
+
541
+ def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None):
542
+ """
543
+ 1. Put the provided model to cpu
544
+ 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict``
545
+ 3. Load it into the provided model
546
+
547
+ Args:
548
+ - ``model``: the model object to update
549
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
550
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
551
+
552
+ Returns:
553
+ - ``model`: modified model
554
+
555
+ Make sure you have plenty of CPU memory available before you call this function. If you don't
556
+ have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it
557
+ conveniently placed for you in the checkpoint folder.
558
+
559
+ A typical usage might be ::
560
+
561
+ from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint
562
+ model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir)
563
+ # submit to model hub or save the model to share with others
564
+
565
+ Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context
566
+ of the same application. i.e. you will need to re-initialize the deepspeed engine, since
567
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
568
+
569
+ """
570
+ logger.info(f"Extracting fp32 weights")
571
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
572
+
573
+ logger.info(f"Overwriting model with fp32 weights")
574
+ model = model.cpu()
575
+ model.load_state_dict(state_dict, strict=False)
576
+
577
+ return model
578
+
579
+
580
+ if __name__ == "__main__":
581
+
582
+ parser = argparse.ArgumentParser()
583
+ parser.add_argument("checkpoint_dir",
584
+ type=str,
585
+ help="path to the desired checkpoint folder, e.g., path/checkpoint-12")
586
+ parser.add_argument(
587
+ "output_file",
588
+ type=str,
589
+ help="path to the pytorch fp32 state_dict output file (e.g. path/checkpoint-12/pytorch_model.bin)")
590
+ parser.add_argument("-t",
591
+ "--tag",
592
+ type=str,
593
+ default=None,
594
+ help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1")
595
+ parser.add_argument("--exclude_frozen_parameters", action='store_true', help="exclude frozen parameters")
596
+ parser.add_argument("-d", "--debug", action='store_true', help="enable debug")
597
+ args = parser.parse_args()
598
+
599
+ debug = args.debug
600
+
601
+ convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir,
602
+ args.output_file,
603
+ tag=args.tag,
604
+ exclude_frozen_parameters=args.exclude_frozen_parameters)
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1650/config.json ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "meta-llama/Llama-3.2-3B",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 128000,
9
+ "eos_token_id": 128001,
10
+ "head_dim": 128,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 3072,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 8192,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "num_attention_heads": 24,
19
+ "num_hidden_layers": 28,
20
+ "num_key_value_heads": 8,
21
+ "pretraining_tp": 1,
22
+ "rms_norm_eps": 1e-05,
23
+ "rope_scaling": {
24
+ "factor": 32.0,
25
+ "high_freq_factor": 4.0,
26
+ "low_freq_factor": 1.0,
27
+ "original_max_position_embeddings": 8192,
28
+ "rope_type": "llama3"
29
+ },
30
+ "rope_theta": 500000.0,
31
+ "tie_word_embeddings": true,
32
+ "torch_dtype": "float16",
33
+ "transformers_version": "4.45.1",
34
+ "use_cache": true,
35
+ "vocab_size": 128256
36
+ }
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1650/generation_config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 128000,
4
+ "do_sample": true,
5
+ "eos_token_id": 128001,
6
+ "temperature": 0.6,
7
+ "top_p": 0.9,
8
+ "transformers_version": "4.45.1"
9
+ }
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1650/latest ADDED
@@ -0,0 +1 @@
 
 
1
+ global_step1650
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1650/rng_state_3.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3c146f631f86bf0f652d7a36cc3ef8abae906fa1b54abb7af60635fac7919c55
3
+ size 20599
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1650/rng_state_6.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:595fa5c22607d6ab832d0b14385e9e6b9bfeb5d4c181d0acb849edd1df3e66a1
3
+ size 20599
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1650/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fe730d7c46d9e44b7cee973c364bc4ff93710ee1b6faa309a35e497360023b72
3
+ size 627
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1650/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d396f0642c83730ebff54eaafc7dfb38092d5d41fd28ce6ebbd56ec539612e4b
3
+ size 6011
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1650/zero_to_fp32.py ADDED
@@ -0,0 +1,604 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+
3
+ # Copyright (c) Microsoft Corporation.
4
+ # SPDX-License-Identifier: Apache-2.0
5
+
6
+ # DeepSpeed Team
7
+
8
+ # This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets
9
+ # copied into the top level checkpoint dir, so the user can easily do the conversion at any point in
10
+ # the future. Once extracted, the weights don't require DeepSpeed and can be used in any
11
+ # application.
12
+ #
13
+ # example: python zero_to_fp32.py . pytorch_model.bin
14
+
15
+ import argparse
16
+ import torch
17
+ import glob
18
+ import math
19
+ import os
20
+ import re
21
+ from collections import OrderedDict
22
+ from dataclasses import dataclass
23
+
24
+ # while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with
25
+ # DeepSpeed data structures it has to be available in the current python environment.
26
+ from deepspeed.utils import logger
27
+ from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS,
28
+ FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES,
29
+ FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS)
30
+
31
+
32
+ @dataclass
33
+ class zero_model_state:
34
+ buffers: dict()
35
+ param_shapes: dict()
36
+ shared_params: list
37
+ ds_version: int
38
+ frozen_param_shapes: dict()
39
+ frozen_param_fragments: dict()
40
+
41
+
42
+ debug = 0
43
+
44
+ # load to cpu
45
+ device = torch.device('cpu')
46
+
47
+
48
+ def atoi(text):
49
+ return int(text) if text.isdigit() else text
50
+
51
+
52
+ def natural_keys(text):
53
+ '''
54
+ alist.sort(key=natural_keys) sorts in human order
55
+ http://nedbatchelder.com/blog/200712/human_sorting.html
56
+ (See Toothy's implementation in the comments)
57
+ '''
58
+ return [atoi(c) for c in re.split(r'(\d+)', text)]
59
+
60
+
61
+ def get_model_state_file(checkpoint_dir, zero_stage):
62
+ if not os.path.isdir(checkpoint_dir):
63
+ raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist")
64
+
65
+ # there should be only one file
66
+ if zero_stage <= 2:
67
+ file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt")
68
+ elif zero_stage == 3:
69
+ file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt")
70
+
71
+ if not os.path.exists(file):
72
+ raise FileNotFoundError(f"can't find model states file at '{file}'")
73
+
74
+ return file
75
+
76
+
77
+ def get_checkpoint_files(checkpoint_dir, glob_pattern):
78
+ # XXX: need to test that this simple glob rule works for multi-node setup too
79
+ ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys)
80
+
81
+ if len(ckpt_files) == 0:
82
+ raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'")
83
+
84
+ return ckpt_files
85
+
86
+
87
+ def get_optim_files(checkpoint_dir):
88
+ return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt")
89
+
90
+
91
+ def get_model_state_files(checkpoint_dir):
92
+ return get_checkpoint_files(checkpoint_dir, "*_model_states.pt")
93
+
94
+
95
+ def parse_model_states(files):
96
+ zero_model_states = []
97
+ for file in files:
98
+ state_dict = torch.load(file, map_location=device)
99
+
100
+ if BUFFER_NAMES not in state_dict:
101
+ raise ValueError(f"{file} is not a model state checkpoint")
102
+ buffer_names = state_dict[BUFFER_NAMES]
103
+ if debug:
104
+ print("Found buffers:", buffer_names)
105
+
106
+ # recover just the buffers while restoring them to fp32 if they were saved in fp16
107
+ buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names}
108
+ param_shapes = state_dict[PARAM_SHAPES]
109
+
110
+ # collect parameters that are included in param_shapes
111
+ param_names = []
112
+ for s in param_shapes:
113
+ for name in s.keys():
114
+ param_names.append(name)
115
+
116
+ # update with frozen parameters
117
+ frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None)
118
+ if frozen_param_shapes is not None:
119
+ if debug:
120
+ print(f"Found frozen_param_shapes: {frozen_param_shapes}")
121
+ param_names += list(frozen_param_shapes.keys())
122
+
123
+ # handle shared params
124
+ shared_params = [[k, v] for k, v in state_dict["shared_params"].items()]
125
+
126
+ ds_version = state_dict.get(DS_VERSION, None)
127
+
128
+ frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None)
129
+
130
+ z_model_state = zero_model_state(buffers=buffers,
131
+ param_shapes=param_shapes,
132
+ shared_params=shared_params,
133
+ ds_version=ds_version,
134
+ frozen_param_shapes=frozen_param_shapes,
135
+ frozen_param_fragments=frozen_param_fragments)
136
+ zero_model_states.append(z_model_state)
137
+
138
+ return zero_model_states
139
+
140
+
141
+ def parse_optim_states(files, ds_checkpoint_dir):
142
+
143
+ total_files = len(files)
144
+ state_dicts = []
145
+ for f in files:
146
+ state_dict = torch.load(f, map_location=device)
147
+ # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights
148
+ # and also handle the case where it was already removed by another helper script
149
+ state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None)
150
+ state_dicts.append(state_dict)
151
+
152
+ if not ZERO_STAGE in state_dicts[0][OPTIMIZER_STATE_DICT]:
153
+ raise ValueError(f"{files[0]} is not a zero checkpoint")
154
+ zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE]
155
+ world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT]
156
+
157
+ # For ZeRO-2 each param group can have different partition_count as data parallelism for expert
158
+ # parameters can be different from data parallelism for non-expert parameters. So we can just
159
+ # use the max of the partition_count to get the dp world_size.
160
+
161
+ if type(world_size) is list:
162
+ world_size = max(world_size)
163
+
164
+ if world_size != total_files:
165
+ raise ValueError(
166
+ f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. "
167
+ "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes."
168
+ )
169
+
170
+ # the groups are named differently in each stage
171
+ if zero_stage <= 2:
172
+ fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS
173
+ elif zero_stage == 3:
174
+ fp32_groups_key = FP32_FLAT_GROUPS
175
+ else:
176
+ raise ValueError(f"unknown zero stage {zero_stage}")
177
+
178
+ if zero_stage <= 2:
179
+ fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))]
180
+ elif zero_stage == 3:
181
+ # if there is more than one param group, there will be multiple flattened tensors - one
182
+ # flattened tensor per group - for simplicity merge them into a single tensor
183
+ #
184
+ # XXX: could make the script more memory efficient for when there are multiple groups - it
185
+ # will require matching the sub-lists of param_shapes for each param group flattened tensor
186
+
187
+ fp32_flat_groups = [
188
+ torch.cat(state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key], 0) for i in range(len(state_dicts))
189
+ ]
190
+
191
+ return zero_stage, world_size, fp32_flat_groups
192
+
193
+
194
+ def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters):
195
+ """
196
+ Returns fp32 state_dict reconstructed from ds checkpoint
197
+
198
+ Args:
199
+ - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are)
200
+
201
+ """
202
+ print(f"Processing zero checkpoint '{ds_checkpoint_dir}'")
203
+
204
+ optim_files = get_optim_files(ds_checkpoint_dir)
205
+ zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir)
206
+ print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}")
207
+
208
+ model_files = get_model_state_files(ds_checkpoint_dir)
209
+
210
+ zero_model_states = parse_model_states(model_files)
211
+ print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}')
212
+
213
+ if zero_stage <= 2:
214
+ return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
215
+ exclude_frozen_parameters)
216
+ elif zero_stage == 3:
217
+ return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
218
+ exclude_frozen_parameters)
219
+
220
+
221
+ def _zero2_merge_frozen_params(state_dict, zero_model_states):
222
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
223
+ return
224
+
225
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
226
+ frozen_param_fragments = zero_model_states[0].frozen_param_fragments
227
+
228
+ if debug:
229
+ num_elem = sum(s.numel() for s in frozen_param_shapes.values())
230
+ print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
231
+
232
+ wanted_params = len(frozen_param_shapes)
233
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
234
+ avail_numel = sum([p.numel() for p in frozen_param_fragments.values()])
235
+ print(f'Frozen params: Have {avail_numel} numels to process.')
236
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
237
+
238
+ total_params = 0
239
+ total_numel = 0
240
+ for name, shape in frozen_param_shapes.items():
241
+ total_params += 1
242
+ unpartitioned_numel = shape.numel()
243
+ total_numel += unpartitioned_numel
244
+
245
+ state_dict[name] = frozen_param_fragments[name]
246
+
247
+ if debug:
248
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
249
+
250
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
251
+
252
+
253
+ def _has_callable(obj, fn):
254
+ attr = getattr(obj, fn, None)
255
+ return callable(attr)
256
+
257
+
258
+ def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
259
+ param_shapes = zero_model_states[0].param_shapes
260
+
261
+ # Reconstruction protocol:
262
+ #
263
+ # XXX: document this
264
+
265
+ if debug:
266
+ for i in range(world_size):
267
+ for j in range(len(fp32_flat_groups[0])):
268
+ print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}")
269
+
270
+ # XXX: memory usage doubles here (zero2)
271
+ num_param_groups = len(fp32_flat_groups[0])
272
+ merged_single_partition_of_fp32_groups = []
273
+ for i in range(num_param_groups):
274
+ merged_partitions = [sd[i] for sd in fp32_flat_groups]
275
+ full_single_fp32_vector = torch.cat(merged_partitions, 0)
276
+ merged_single_partition_of_fp32_groups.append(full_single_fp32_vector)
277
+ avail_numel = sum(
278
+ [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups])
279
+
280
+ if debug:
281
+ wanted_params = sum([len(shapes) for shapes in param_shapes])
282
+ wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes])
283
+ # not asserting if there is a mismatch due to possible padding
284
+ print(f"Have {avail_numel} numels to process.")
285
+ print(f"Need {wanted_numel} numels in {wanted_params} params.")
286
+
287
+ # params
288
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
289
+ # out-of-core computing solution
290
+ total_numel = 0
291
+ total_params = 0
292
+ for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups):
293
+ offset = 0
294
+ avail_numel = full_single_fp32_vector.numel()
295
+ for name, shape in shapes.items():
296
+
297
+ unpartitioned_numel = shape.numel() if _has_callable(shape, 'numel') else math.prod(shape)
298
+ total_numel += unpartitioned_numel
299
+ total_params += 1
300
+
301
+ if debug:
302
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
303
+ state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape)
304
+ offset += unpartitioned_numel
305
+
306
+ # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and
307
+ # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex
308
+ # paddings performed in the code it's almost impossible to predict the exact numbers w/o the
309
+ # live optimizer object, so we are checking that the numbers are within the right range
310
+ align_to = 2 * world_size
311
+
312
+ def zero2_align(x):
313
+ return align_to * math.ceil(x / align_to)
314
+
315
+ if debug:
316
+ print(f"original offset={offset}, avail_numel={avail_numel}")
317
+
318
+ offset = zero2_align(offset)
319
+ avail_numel = zero2_align(avail_numel)
320
+
321
+ if debug:
322
+ print(f"aligned offset={offset}, avail_numel={avail_numel}")
323
+
324
+ # Sanity check
325
+ if offset != avail_numel:
326
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
327
+
328
+ print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements")
329
+
330
+
331
+ def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
332
+ exclude_frozen_parameters):
333
+ state_dict = OrderedDict()
334
+
335
+ # buffers
336
+ buffers = zero_model_states[0].buffers
337
+ state_dict.update(buffers)
338
+ if debug:
339
+ print(f"added {len(buffers)} buffers")
340
+
341
+ if not exclude_frozen_parameters:
342
+ _zero2_merge_frozen_params(state_dict, zero_model_states)
343
+
344
+ _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
345
+
346
+ # recover shared parameters
347
+ for pair in zero_model_states[0].shared_params:
348
+ if pair[1] in state_dict:
349
+ state_dict[pair[0]] = state_dict[pair[1]]
350
+
351
+ return state_dict
352
+
353
+
354
+ def zero3_partitioned_param_info(unpartitioned_numel, world_size):
355
+ remainder = unpartitioned_numel % world_size
356
+ padding_numel = (world_size - remainder) if remainder else 0
357
+ partitioned_numel = math.ceil(unpartitioned_numel / world_size)
358
+ return partitioned_numel, padding_numel
359
+
360
+
361
+ def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states):
362
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
363
+ return
364
+
365
+ if debug:
366
+ for i in range(world_size):
367
+ num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values())
368
+ print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
369
+
370
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
371
+ wanted_params = len(frozen_param_shapes)
372
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
373
+ avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size
374
+ print(f'Frozen params: Have {avail_numel} numels to process.')
375
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
376
+
377
+ total_params = 0
378
+ total_numel = 0
379
+ for name, shape in zero_model_states[0].frozen_param_shapes.items():
380
+ total_params += 1
381
+ unpartitioned_numel = shape.numel()
382
+ total_numel += unpartitioned_numel
383
+
384
+ param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states)
385
+ state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape)
386
+
387
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
388
+
389
+ if debug:
390
+ print(
391
+ f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
392
+ )
393
+
394
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
395
+
396
+
397
+ def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
398
+ param_shapes = zero_model_states[0].param_shapes
399
+ avail_numel = fp32_flat_groups[0].numel() * world_size
400
+ # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each
401
+ # param, re-consolidating each param, while dealing with padding if any
402
+
403
+ # merge list of dicts, preserving order
404
+ param_shapes = {k: v for d in param_shapes for k, v in d.items()}
405
+
406
+ if debug:
407
+ for i in range(world_size):
408
+ print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}")
409
+
410
+ wanted_params = len(param_shapes)
411
+ wanted_numel = sum(shape.numel() for shape in param_shapes.values())
412
+ # not asserting if there is a mismatch due to possible padding
413
+ avail_numel = fp32_flat_groups[0].numel() * world_size
414
+ print(f"Trainable params: Have {avail_numel} numels to process.")
415
+ print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.")
416
+
417
+ # params
418
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
419
+ # out-of-core computing solution
420
+ offset = 0
421
+ total_numel = 0
422
+ total_params = 0
423
+ for name, shape in param_shapes.items():
424
+
425
+ unpartitioned_numel = shape.numel()
426
+ total_numel += unpartitioned_numel
427
+ total_params += 1
428
+
429
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
430
+
431
+ if debug:
432
+ print(
433
+ f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
434
+ )
435
+
436
+ # XXX: memory usage doubles here
437
+ state_dict[name] = torch.cat(
438
+ tuple(fp32_flat_groups[i].narrow(0, offset, partitioned_numel) for i in range(world_size)),
439
+ 0).narrow(0, 0, unpartitioned_numel).view(shape)
440
+ offset += partitioned_numel
441
+
442
+ offset *= world_size
443
+
444
+ # Sanity check
445
+ if offset != avail_numel:
446
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
447
+
448
+ print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements")
449
+
450
+
451
+ def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
452
+ exclude_frozen_parameters):
453
+ state_dict = OrderedDict()
454
+
455
+ # buffers
456
+ buffers = zero_model_states[0].buffers
457
+ state_dict.update(buffers)
458
+ if debug:
459
+ print(f"added {len(buffers)} buffers")
460
+
461
+ if not exclude_frozen_parameters:
462
+ _zero3_merge_frozen_params(state_dict, world_size, zero_model_states)
463
+
464
+ _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
465
+
466
+ # recover shared parameters
467
+ for pair in zero_model_states[0].shared_params:
468
+ if pair[1] in state_dict:
469
+ state_dict[pair[0]] = state_dict[pair[1]]
470
+
471
+ return state_dict
472
+
473
+
474
+ def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag=None, exclude_frozen_parameters=False):
475
+ """
476
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with
477
+ ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example
478
+ via a model hub.
479
+
480
+ Args:
481
+ - ``checkpoint_dir``: path to the desired checkpoint folder
482
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14``
483
+ - ``exclude_frozen_parameters``: exclude frozen parameters
484
+
485
+ Returns:
486
+ - pytorch ``state_dict``
487
+
488
+ Note: this approach may not work if your application doesn't have sufficient free CPU memory and
489
+ you may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with
490
+ the checkpoint.
491
+
492
+ A typical usage might be ::
493
+
494
+ from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
495
+ # do the training and checkpoint saving
496
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu
497
+ model = model.cpu() # move to cpu
498
+ model.load_state_dict(state_dict)
499
+ # submit to model hub or save the model to share with others
500
+
501
+ In this example the ``model`` will no longer be usable in the deepspeed context of the same
502
+ application. i.e. you will need to re-initialize the deepspeed engine, since
503
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
504
+
505
+ If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead.
506
+
507
+ """
508
+ if tag is None:
509
+ latest_path = os.path.join(checkpoint_dir, 'latest')
510
+ if os.path.isfile(latest_path):
511
+ with open(latest_path, 'r') as fd:
512
+ tag = fd.read().strip()
513
+ else:
514
+ raise ValueError(f"Unable to find 'latest' file at {latest_path}")
515
+
516
+ ds_checkpoint_dir = os.path.join(checkpoint_dir, tag)
517
+
518
+ if not os.path.isdir(ds_checkpoint_dir):
519
+ raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist")
520
+
521
+ return _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters)
522
+
523
+
524
+ def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir, output_file, tag=None, exclude_frozen_parameters=False):
525
+ """
526
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be
527
+ loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed.
528
+
529
+ Args:
530
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
531
+ - ``output_file``: path to the pytorch fp32 state_dict output file (e.g. path/pytorch_model.bin)
532
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
533
+ - ``exclude_frozen_parameters``: exclude frozen parameters
534
+ """
535
+
536
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag, exclude_frozen_parameters)
537
+ print(f"Saving fp32 state dict to {output_file}")
538
+ torch.save(state_dict, output_file)
539
+
540
+
541
+ def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None):
542
+ """
543
+ 1. Put the provided model to cpu
544
+ 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict``
545
+ 3. Load it into the provided model
546
+
547
+ Args:
548
+ - ``model``: the model object to update
549
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
550
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
551
+
552
+ Returns:
553
+ - ``model`: modified model
554
+
555
+ Make sure you have plenty of CPU memory available before you call this function. If you don't
556
+ have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it
557
+ conveniently placed for you in the checkpoint folder.
558
+
559
+ A typical usage might be ::
560
+
561
+ from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint
562
+ model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir)
563
+ # submit to model hub or save the model to share with others
564
+
565
+ Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context
566
+ of the same application. i.e. you will need to re-initialize the deepspeed engine, since
567
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
568
+
569
+ """
570
+ logger.info(f"Extracting fp32 weights")
571
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
572
+
573
+ logger.info(f"Overwriting model with fp32 weights")
574
+ model = model.cpu()
575
+ model.load_state_dict(state_dict, strict=False)
576
+
577
+ return model
578
+
579
+
580
+ if __name__ == "__main__":
581
+
582
+ parser = argparse.ArgumentParser()
583
+ parser.add_argument("checkpoint_dir",
584
+ type=str,
585
+ help="path to the desired checkpoint folder, e.g., path/checkpoint-12")
586
+ parser.add_argument(
587
+ "output_file",
588
+ type=str,
589
+ help="path to the pytorch fp32 state_dict output file (e.g. path/checkpoint-12/pytorch_model.bin)")
590
+ parser.add_argument("-t",
591
+ "--tag",
592
+ type=str,
593
+ default=None,
594
+ help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1")
595
+ parser.add_argument("--exclude_frozen_parameters", action='store_true', help="exclude frozen parameters")
596
+ parser.add_argument("-d", "--debug", action='store_true', help="enable debug")
597
+ args = parser.parse_args()
598
+
599
+ debug = args.debug
600
+
601
+ convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir,
602
+ args.output_file,
603
+ tag=args.tag,
604
+ exclude_frozen_parameters=args.exclude_frozen_parameters)
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1800/config.json ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "meta-llama/Llama-3.2-3B",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 128000,
9
+ "eos_token_id": 128001,
10
+ "head_dim": 128,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 3072,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 8192,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "num_attention_heads": 24,
19
+ "num_hidden_layers": 28,
20
+ "num_key_value_heads": 8,
21
+ "pretraining_tp": 1,
22
+ "rms_norm_eps": 1e-05,
23
+ "rope_scaling": {
24
+ "factor": 32.0,
25
+ "high_freq_factor": 4.0,
26
+ "low_freq_factor": 1.0,
27
+ "original_max_position_embeddings": 8192,
28
+ "rope_type": "llama3"
29
+ },
30
+ "rope_theta": 500000.0,
31
+ "tie_word_embeddings": true,
32
+ "torch_dtype": "float16",
33
+ "transformers_version": "4.45.1",
34
+ "use_cache": true,
35
+ "vocab_size": 128256
36
+ }
train/checkpoints/Llama-3.2-3B/babylm_reverse_full_10M_seed0/runs/checkpoint-1800/generation_config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 128000,
4
+ "do_sample": true,
5
+ "eos_token_id": 128001,
6
+ "temperature": 0.6,
7
+ "top_p": 0.9,
8
+ "transformers_version": "4.45.1"
9
+ }