crazyjeannot commited on
Commit
b2d47d4
1 Parent(s): 39082f4

Upload folder using huggingface_hub

Browse files
1_Pooling/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "word_embedding_dimension": 1024,
3
+ "pooling_mode_cls_token": true,
4
+ "pooling_mode_mean_tokens": false,
5
+ "pooling_mode_max_tokens": false,
6
+ "pooling_mode_mean_sqrt_len_tokens": false,
7
+ "pooling_mode_weightedmean_tokens": false,
8
+ "pooling_mode_lasttoken": false,
9
+ "include_prompt": true
10
+ }
README.md CHANGED
@@ -1,3 +1,144 @@
1
- ---
2
- license: apache-2.0
3
- ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ datasets: []
3
+ language: []
4
+ library_name: sentence-transformers
5
+ pipeline_tag: sentence-similarity
6
+ tags:
7
+ - sentence-transformers
8
+ - sentence-similarity
9
+ - feature-extraction
10
+ widget: []
11
+ ---
12
+
13
+ # SentenceTransformer
14
+
15
+ This is a [sentence-transformers](https://www.SBERT.net) model trained. It maps sentences & paragraphs to a 1024-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.
16
+
17
+ ## Model Details
18
+
19
+ ### Model Description
20
+ - **Model Type:** Sentence Transformer
21
+ <!-- - **Base model:** [Unknown](https://huggingface.co/unknown) -->
22
+ - **Maximum Sequence Length:** 512 tokens
23
+ - **Output Dimensionality:** 1024 tokens
24
+ - **Similarity Function:** Cosine Similarity
25
+ <!-- - **Training Dataset:** Unknown -->
26
+ <!-- - **Language:** Unknown -->
27
+ <!-- - **License:** Unknown -->
28
+
29
+ ### Model Sources
30
+
31
+ - **Documentation:** [Sentence Transformers Documentation](https://sbert.net)
32
+ - **Repository:** [Sentence Transformers on GitHub](https://github.com/UKPLab/sentence-transformers)
33
+ - **Hugging Face:** [Sentence Transformers on Hugging Face](https://huggingface.co/models?library=sentence-transformers)
34
+
35
+ ### Full Model Architecture
36
+
37
+ ```
38
+ SentenceTransformer(
39
+ (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel
40
+ (1): Pooling({'word_embedding_dimension': 1024, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
41
+ (2): Normalize()
42
+ )
43
+ ```
44
+
45
+ ## Usage
46
+
47
+ ### Direct Usage (Sentence Transformers)
48
+
49
+ First install the Sentence Transformers library:
50
+
51
+ ```bash
52
+ pip install -U sentence-transformers
53
+ ```
54
+
55
+ Then you can load this model and run inference.
56
+ ```python
57
+ from sentence_transformers import SentenceTransformer
58
+
59
+ # Download from the 🤗 Hub
60
+ model = SentenceTransformer("sentence_transformers_model_id")
61
+ # Run inference
62
+ sentences = [
63
+ 'The weather is lovely today.',
64
+ "It's so sunny outside!",
65
+ 'He drove to the stadium.',
66
+ ]
67
+ embeddings = model.encode(sentences)
68
+ print(embeddings.shape)
69
+ # [3, 1024]
70
+
71
+ # Get the similarity scores for the embeddings
72
+ similarities = model.similarity(embeddings, embeddings)
73
+ print(similarities.shape)
74
+ # [3, 3]
75
+ ```
76
+
77
+ <!--
78
+ ### Direct Usage (Transformers)
79
+
80
+ <details><summary>Click to see the direct usage in Transformers</summary>
81
+
82
+ </details>
83
+ -->
84
+
85
+ <!--
86
+ ### Downstream Usage (Sentence Transformers)
87
+
88
+ You can finetune this model on your own dataset.
89
+
90
+ <details><summary>Click to expand</summary>
91
+
92
+ </details>
93
+ -->
94
+
95
+ <!--
96
+ ### Out-of-Scope Use
97
+
98
+ *List how the model may foreseeably be misused and address what users ought not to do with the model.*
99
+ -->
100
+
101
+ <!--
102
+ ## Bias, Risks and Limitations
103
+
104
+ *What are the known or foreseeable issues stemming from this model? You could also flag here known failure cases or weaknesses of the model.*
105
+ -->
106
+
107
+ <!--
108
+ ### Recommendations
109
+
110
+ *What are recommendations with respect to the foreseeable issues? For example, filtering explicit content.*
111
+ -->
112
+
113
+ ## Training Details
114
+
115
+ ### Framework Versions
116
+ - Python: 3.9.2
117
+ - Sentence Transformers: 3.0.1
118
+ - Transformers: 4.41.2
119
+ - PyTorch: 2.3.1+cu121
120
+ - Accelerate: 0.31.0
121
+ - Datasets: 2.20.0
122
+ - Tokenizers: 0.19.1
123
+
124
+ ## Citation
125
+
126
+ ### BibTeX
127
+
128
+ <!--
129
+ ## Glossary
130
+
131
+ *Clearly define terms in order to be accessible across audiences.*
132
+ -->
133
+
134
+ <!--
135
+ ## Model Card Authors
136
+
137
+ *Lists the people who create the model card, providing recognition and accountability for the detailed work that goes into its construction.*
138
+ -->
139
+
140
+ <!--
141
+ ## Model Card Contact
142
+
143
+ *Provides a way for people who have updates to the Model Card, suggestions, or questions, to contact the Model Card authors.*
144
+ -->
config.json ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "BGE_FT_MODEL_DENSE_40/checkpoint-20000",
3
+ "architectures": [
4
+ "BertModel"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "bos_token_id": 0,
8
+ "classifier_dropout": null,
9
+ "directionality": "bidi",
10
+ "eos_token_id": 2,
11
+ "hidden_act": "gelu",
12
+ "hidden_dropout_prob": 0.1,
13
+ "hidden_size": 1024,
14
+ "id2label": {
15
+ "0": "LABEL_0"
16
+ },
17
+ "initializer_range": 0.02,
18
+ "intermediate_size": 4096,
19
+ "label2id": {
20
+ "LABEL_0": 0
21
+ },
22
+ "layer_norm_eps": 1e-12,
23
+ "max_position_embeddings": 512,
24
+ "model_type": "bert",
25
+ "num_attention_heads": 16,
26
+ "num_hidden_layers": 24,
27
+ "output_past": true,
28
+ "pad_token_id": 0,
29
+ "pooler_fc_size": 768,
30
+ "pooler_num_attention_heads": 12,
31
+ "pooler_num_fc_layers": 3,
32
+ "pooler_size_per_head": 128,
33
+ "pooler_type": "first_token_transform",
34
+ "position_embedding_type": "absolute",
35
+ "torch_dtype": "float32",
36
+ "transformers_version": "4.41.2",
37
+ "type_vocab_size": 2,
38
+ "use_cache": true,
39
+ "vocab_size": 21128
40
+ }
config_sentence_transformers.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "__version__": {
3
+ "sentence_transformers": "3.0.1",
4
+ "transformers": "4.41.2",
5
+ "pytorch": "2.3.1+cu121"
6
+ },
7
+ "prompts": {},
8
+ "default_prompt_name": null,
9
+ "similarity_fn_name": null
10
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3a3b3c9b53cb1edf34cdfe1862c732681c57be67b7380841cec4bfae21b25913
3
+ size 1302134568
modules.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "idx": 0,
4
+ "name": "0",
5
+ "path": "",
6
+ "type": "sentence_transformers.models.Transformer"
7
+ },
8
+ {
9
+ "idx": 1,
10
+ "name": "1",
11
+ "path": "1_Pooling",
12
+ "type": "sentence_transformers.models.Pooling"
13
+ },
14
+ {
15
+ "idx": 2,
16
+ "name": "2",
17
+ "path": "2_Normalize",
18
+ "type": "sentence_transformers.models.Normalize"
19
+ }
20
+ ]
optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9a3f3fd9c43d86f92cf7016d8b41eb9f3b60e975ee03f29c5dab1b8d347240c5
3
+ size 2596108193
rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b8705e2b97793e346b294fee7b65363f6f3316fce2a183259ea8d1fc6c8b8c66
3
+ size 14244
scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a1d724d88426b758dac0c7579247688fd3c3ab98594b5baffe79e186c02bebc
3
+ size 1064
sentence_bert_config.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "max_seq_length": 512,
3
+ "do_lower_case": false
4
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": {
3
+ "content": "[CLS]",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "mask_token": {
10
+ "content": "[MASK]",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "[PAD]",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "sep_token": {
24
+ "content": "[SEP]",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ },
30
+ "unk_token": {
31
+ "content": "[UNK]",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false
36
+ }
37
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "[PAD]",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "100": {
12
+ "content": "[UNK]",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "101": {
20
+ "content": "[CLS]",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "102": {
28
+ "content": "[SEP]",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "103": {
36
+ "content": "[MASK]",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ }
43
+ },
44
+ "clean_up_tokenization_spaces": true,
45
+ "cls_token": "[CLS]",
46
+ "do_basic_tokenize": true,
47
+ "do_lower_case": true,
48
+ "mask_token": "[MASK]",
49
+ "model_max_length": 1000000000000000019884624838656,
50
+ "never_split": null,
51
+ "pad_token": "[PAD]",
52
+ "sep_token": "[SEP]",
53
+ "strip_accents": null,
54
+ "tokenize_chinese_chars": true,
55
+ "tokenizer_class": "BertTokenizer",
56
+ "unk_token": "[UNK]"
57
+ }
trainer_state.json ADDED
@@ -0,0 +1,2833 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 4.0,
5
+ "eval_steps": 500,
6
+ "global_step": 20000,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.01,
13
+ "grad_norm": 24.59290313720703,
14
+ "learning_rate": 9.976e-07,
15
+ "loss": 2.6229,
16
+ "step": 50
17
+ },
18
+ {
19
+ "epoch": 0.02,
20
+ "grad_norm": 44.6561279296875,
21
+ "learning_rate": 9.951e-07,
22
+ "loss": 2.3333,
23
+ "step": 100
24
+ },
25
+ {
26
+ "epoch": 0.03,
27
+ "grad_norm": 31.2443790435791,
28
+ "learning_rate": 9.926e-07,
29
+ "loss": 2.1022,
30
+ "step": 150
31
+ },
32
+ {
33
+ "epoch": 0.04,
34
+ "grad_norm": 30.142139434814453,
35
+ "learning_rate": 9.900999999999998e-07,
36
+ "loss": 2.1899,
37
+ "step": 200
38
+ },
39
+ {
40
+ "epoch": 0.05,
41
+ "grad_norm": 40.865047454833984,
42
+ "learning_rate": 9.876e-07,
43
+ "loss": 2.0581,
44
+ "step": 250
45
+ },
46
+ {
47
+ "epoch": 0.06,
48
+ "grad_norm": 49.21326446533203,
49
+ "learning_rate": 9.850999999999999e-07,
50
+ "loss": 2.0981,
51
+ "step": 300
52
+ },
53
+ {
54
+ "epoch": 0.07,
55
+ "grad_norm": 34.270729064941406,
56
+ "learning_rate": 9.826e-07,
57
+ "loss": 2.1487,
58
+ "step": 350
59
+ },
60
+ {
61
+ "epoch": 0.08,
62
+ "grad_norm": 37.85762405395508,
63
+ "learning_rate": 9.801e-07,
64
+ "loss": 2.0944,
65
+ "step": 400
66
+ },
67
+ {
68
+ "epoch": 0.09,
69
+ "grad_norm": 38.32168197631836,
70
+ "learning_rate": 9.776e-07,
71
+ "loss": 2.0425,
72
+ "step": 450
73
+ },
74
+ {
75
+ "epoch": 0.1,
76
+ "grad_norm": 40.687808990478516,
77
+ "learning_rate": 9.751e-07,
78
+ "loss": 1.9918,
79
+ "step": 500
80
+ },
81
+ {
82
+ "epoch": 0.11,
83
+ "grad_norm": 41.6737174987793,
84
+ "learning_rate": 9.726e-07,
85
+ "loss": 1.9946,
86
+ "step": 550
87
+ },
88
+ {
89
+ "epoch": 0.12,
90
+ "grad_norm": 32.36153030395508,
91
+ "learning_rate": 9.700999999999998e-07,
92
+ "loss": 2.0164,
93
+ "step": 600
94
+ },
95
+ {
96
+ "epoch": 0.13,
97
+ "grad_norm": 30.314594268798828,
98
+ "learning_rate": 9.676e-07,
99
+ "loss": 1.9766,
100
+ "step": 650
101
+ },
102
+ {
103
+ "epoch": 0.14,
104
+ "grad_norm": 32.16353988647461,
105
+ "learning_rate": 9.650999999999999e-07,
106
+ "loss": 1.9656,
107
+ "step": 700
108
+ },
109
+ {
110
+ "epoch": 0.15,
111
+ "grad_norm": 53.54747772216797,
112
+ "learning_rate": 9.626e-07,
113
+ "loss": 1.9925,
114
+ "step": 750
115
+ },
116
+ {
117
+ "epoch": 0.16,
118
+ "grad_norm": 31.085399627685547,
119
+ "learning_rate": 9.601e-07,
120
+ "loss": 1.9141,
121
+ "step": 800
122
+ },
123
+ {
124
+ "epoch": 0.17,
125
+ "grad_norm": 44.15593719482422,
126
+ "learning_rate": 9.576e-07,
127
+ "loss": 2.0047,
128
+ "step": 850
129
+ },
130
+ {
131
+ "epoch": 0.18,
132
+ "grad_norm": 40.597999572753906,
133
+ "learning_rate": 9.551e-07,
134
+ "loss": 1.9799,
135
+ "step": 900
136
+ },
137
+ {
138
+ "epoch": 0.19,
139
+ "grad_norm": 40.38375473022461,
140
+ "learning_rate": 9.526e-07,
141
+ "loss": 1.9333,
142
+ "step": 950
143
+ },
144
+ {
145
+ "epoch": 0.2,
146
+ "grad_norm": 31.839799880981445,
147
+ "learning_rate": 9.500999999999999e-07,
148
+ "loss": 2.0041,
149
+ "step": 1000
150
+ },
151
+ {
152
+ "epoch": 0.21,
153
+ "grad_norm": 43.41249084472656,
154
+ "learning_rate": 9.475999999999999e-07,
155
+ "loss": 1.9008,
156
+ "step": 1050
157
+ },
158
+ {
159
+ "epoch": 0.22,
160
+ "grad_norm": 39.27376937866211,
161
+ "learning_rate": 9.451e-07,
162
+ "loss": 1.8454,
163
+ "step": 1100
164
+ },
165
+ {
166
+ "epoch": 0.23,
167
+ "grad_norm": 37.960269927978516,
168
+ "learning_rate": 9.426e-07,
169
+ "loss": 1.8152,
170
+ "step": 1150
171
+ },
172
+ {
173
+ "epoch": 0.24,
174
+ "grad_norm": 44.50017547607422,
175
+ "learning_rate": 9.401e-07,
176
+ "loss": 1.8737,
177
+ "step": 1200
178
+ },
179
+ {
180
+ "epoch": 0.25,
181
+ "grad_norm": 37.4691162109375,
182
+ "learning_rate": 9.375999999999999e-07,
183
+ "loss": 1.808,
184
+ "step": 1250
185
+ },
186
+ {
187
+ "epoch": 0.26,
188
+ "grad_norm": 37.40227127075195,
189
+ "learning_rate": 9.351e-07,
190
+ "loss": 1.832,
191
+ "step": 1300
192
+ },
193
+ {
194
+ "epoch": 0.27,
195
+ "grad_norm": 39.435123443603516,
196
+ "learning_rate": 9.326e-07,
197
+ "loss": 1.9672,
198
+ "step": 1350
199
+ },
200
+ {
201
+ "epoch": 0.28,
202
+ "grad_norm": 41.97803497314453,
203
+ "learning_rate": 9.301e-07,
204
+ "loss": 1.869,
205
+ "step": 1400
206
+ },
207
+ {
208
+ "epoch": 0.29,
209
+ "grad_norm": 42.29490661621094,
210
+ "learning_rate": 9.275999999999999e-07,
211
+ "loss": 1.8844,
212
+ "step": 1450
213
+ },
214
+ {
215
+ "epoch": 0.3,
216
+ "grad_norm": 39.2964973449707,
217
+ "learning_rate": 9.250999999999999e-07,
218
+ "loss": 1.8546,
219
+ "step": 1500
220
+ },
221
+ {
222
+ "epoch": 0.31,
223
+ "grad_norm": 46.49869155883789,
224
+ "learning_rate": 9.226e-07,
225
+ "loss": 1.7998,
226
+ "step": 1550
227
+ },
228
+ {
229
+ "epoch": 0.32,
230
+ "grad_norm": 35.85550308227539,
231
+ "learning_rate": 9.201e-07,
232
+ "loss": 1.8493,
233
+ "step": 1600
234
+ },
235
+ {
236
+ "epoch": 0.33,
237
+ "grad_norm": 36.285282135009766,
238
+ "learning_rate": 9.175999999999999e-07,
239
+ "loss": 1.8918,
240
+ "step": 1650
241
+ },
242
+ {
243
+ "epoch": 0.34,
244
+ "grad_norm": 36.704795837402344,
245
+ "learning_rate": 9.151e-07,
246
+ "loss": 1.8412,
247
+ "step": 1700
248
+ },
249
+ {
250
+ "epoch": 0.35,
251
+ "grad_norm": 39.69667434692383,
252
+ "learning_rate": 9.126e-07,
253
+ "loss": 1.809,
254
+ "step": 1750
255
+ },
256
+ {
257
+ "epoch": 0.36,
258
+ "grad_norm": 36.81133270263672,
259
+ "learning_rate": 9.101e-07,
260
+ "loss": 1.8425,
261
+ "step": 1800
262
+ },
263
+ {
264
+ "epoch": 0.37,
265
+ "grad_norm": 40.80613708496094,
266
+ "learning_rate": 9.075999999999999e-07,
267
+ "loss": 1.9628,
268
+ "step": 1850
269
+ },
270
+ {
271
+ "epoch": 0.38,
272
+ "grad_norm": 39.36043930053711,
273
+ "learning_rate": 9.051e-07,
274
+ "loss": 1.7546,
275
+ "step": 1900
276
+ },
277
+ {
278
+ "epoch": 0.39,
279
+ "grad_norm": 40.22469711303711,
280
+ "learning_rate": 9.025999999999999e-07,
281
+ "loss": 1.8055,
282
+ "step": 1950
283
+ },
284
+ {
285
+ "epoch": 0.4,
286
+ "grad_norm": 44.26631546020508,
287
+ "learning_rate": 9.001e-07,
288
+ "loss": 1.8203,
289
+ "step": 2000
290
+ },
291
+ {
292
+ "epoch": 0.41,
293
+ "grad_norm": 50.78614807128906,
294
+ "learning_rate": 8.976499999999999e-07,
295
+ "loss": 1.8012,
296
+ "step": 2050
297
+ },
298
+ {
299
+ "epoch": 0.42,
300
+ "grad_norm": 63.06776809692383,
301
+ "learning_rate": 8.951499999999999e-07,
302
+ "loss": 1.8745,
303
+ "step": 2100
304
+ },
305
+ {
306
+ "epoch": 0.43,
307
+ "grad_norm": 48.32567596435547,
308
+ "learning_rate": 8.9265e-07,
309
+ "loss": 1.7794,
310
+ "step": 2150
311
+ },
312
+ {
313
+ "epoch": 0.44,
314
+ "grad_norm": 48.12654495239258,
315
+ "learning_rate": 8.9015e-07,
316
+ "loss": 1.9107,
317
+ "step": 2200
318
+ },
319
+ {
320
+ "epoch": 0.45,
321
+ "grad_norm": 38.266197204589844,
322
+ "learning_rate": 8.8765e-07,
323
+ "loss": 1.7588,
324
+ "step": 2250
325
+ },
326
+ {
327
+ "epoch": 0.46,
328
+ "grad_norm": 39.421504974365234,
329
+ "learning_rate": 8.851499999999999e-07,
330
+ "loss": 1.8511,
331
+ "step": 2300
332
+ },
333
+ {
334
+ "epoch": 0.47,
335
+ "grad_norm": 39.87221908569336,
336
+ "learning_rate": 8.8265e-07,
337
+ "loss": 1.6939,
338
+ "step": 2350
339
+ },
340
+ {
341
+ "epoch": 0.48,
342
+ "grad_norm": 43.736900329589844,
343
+ "learning_rate": 8.8015e-07,
344
+ "loss": 1.7321,
345
+ "step": 2400
346
+ },
347
+ {
348
+ "epoch": 0.49,
349
+ "grad_norm": 43.2308235168457,
350
+ "learning_rate": 8.7765e-07,
351
+ "loss": 1.8803,
352
+ "step": 2450
353
+ },
354
+ {
355
+ "epoch": 0.5,
356
+ "grad_norm": 40.887939453125,
357
+ "learning_rate": 8.751499999999999e-07,
358
+ "loss": 1.731,
359
+ "step": 2500
360
+ },
361
+ {
362
+ "epoch": 0.51,
363
+ "grad_norm": 43.211727142333984,
364
+ "learning_rate": 8.7265e-07,
365
+ "loss": 1.8046,
366
+ "step": 2550
367
+ },
368
+ {
369
+ "epoch": 0.52,
370
+ "grad_norm": 51.66404342651367,
371
+ "learning_rate": 8.701499999999999e-07,
372
+ "loss": 1.774,
373
+ "step": 2600
374
+ },
375
+ {
376
+ "epoch": 0.53,
377
+ "grad_norm": 39.11322021484375,
378
+ "learning_rate": 8.6765e-07,
379
+ "loss": 1.8792,
380
+ "step": 2650
381
+ },
382
+ {
383
+ "epoch": 0.54,
384
+ "grad_norm": 48.62511444091797,
385
+ "learning_rate": 8.6515e-07,
386
+ "loss": 1.7991,
387
+ "step": 2700
388
+ },
389
+ {
390
+ "epoch": 0.55,
391
+ "grad_norm": 41.84499740600586,
392
+ "learning_rate": 8.6265e-07,
393
+ "loss": 1.8712,
394
+ "step": 2750
395
+ },
396
+ {
397
+ "epoch": 0.56,
398
+ "grad_norm": 51.82795715332031,
399
+ "learning_rate": 8.601499999999999e-07,
400
+ "loss": 1.7662,
401
+ "step": 2800
402
+ },
403
+ {
404
+ "epoch": 0.57,
405
+ "grad_norm": 54.27309799194336,
406
+ "learning_rate": 8.5765e-07,
407
+ "loss": 1.7643,
408
+ "step": 2850
409
+ },
410
+ {
411
+ "epoch": 0.58,
412
+ "grad_norm": 39.44385528564453,
413
+ "learning_rate": 8.5515e-07,
414
+ "loss": 1.804,
415
+ "step": 2900
416
+ },
417
+ {
418
+ "epoch": 0.59,
419
+ "grad_norm": 37.73311233520508,
420
+ "learning_rate": 8.5265e-07,
421
+ "loss": 1.7039,
422
+ "step": 2950
423
+ },
424
+ {
425
+ "epoch": 0.6,
426
+ "grad_norm": 43.82680892944336,
427
+ "learning_rate": 8.501499999999999e-07,
428
+ "loss": 1.7717,
429
+ "step": 3000
430
+ },
431
+ {
432
+ "epoch": 0.61,
433
+ "grad_norm": 44.10558319091797,
434
+ "learning_rate": 8.476499999999999e-07,
435
+ "loss": 1.7109,
436
+ "step": 3050
437
+ },
438
+ {
439
+ "epoch": 0.62,
440
+ "grad_norm": 43.69202423095703,
441
+ "learning_rate": 8.4515e-07,
442
+ "loss": 1.8569,
443
+ "step": 3100
444
+ },
445
+ {
446
+ "epoch": 0.63,
447
+ "grad_norm": 46.728538513183594,
448
+ "learning_rate": 8.4265e-07,
449
+ "loss": 1.8468,
450
+ "step": 3150
451
+ },
452
+ {
453
+ "epoch": 0.64,
454
+ "grad_norm": 38.28073501586914,
455
+ "learning_rate": 8.401499999999999e-07,
456
+ "loss": 1.6194,
457
+ "step": 3200
458
+ },
459
+ {
460
+ "epoch": 0.65,
461
+ "grad_norm": 48.87205123901367,
462
+ "learning_rate": 8.376499999999999e-07,
463
+ "loss": 1.7792,
464
+ "step": 3250
465
+ },
466
+ {
467
+ "epoch": 0.66,
468
+ "grad_norm": 39.80657196044922,
469
+ "learning_rate": 8.3515e-07,
470
+ "loss": 1.7198,
471
+ "step": 3300
472
+ },
473
+ {
474
+ "epoch": 0.67,
475
+ "grad_norm": 37.74007034301758,
476
+ "learning_rate": 8.3265e-07,
477
+ "loss": 1.7539,
478
+ "step": 3350
479
+ },
480
+ {
481
+ "epoch": 0.68,
482
+ "grad_norm": 47.30875015258789,
483
+ "learning_rate": 8.3015e-07,
484
+ "loss": 1.725,
485
+ "step": 3400
486
+ },
487
+ {
488
+ "epoch": 0.69,
489
+ "grad_norm": 44.54092025756836,
490
+ "learning_rate": 8.276499999999999e-07,
491
+ "loss": 1.7863,
492
+ "step": 3450
493
+ },
494
+ {
495
+ "epoch": 0.7,
496
+ "grad_norm": 49.025169372558594,
497
+ "learning_rate": 8.251500000000001e-07,
498
+ "loss": 1.7914,
499
+ "step": 3500
500
+ },
501
+ {
502
+ "epoch": 0.71,
503
+ "grad_norm": 45.531864166259766,
504
+ "learning_rate": 8.2265e-07,
505
+ "loss": 1.8326,
506
+ "step": 3550
507
+ },
508
+ {
509
+ "epoch": 0.72,
510
+ "grad_norm": 37.38317108154297,
511
+ "learning_rate": 8.2015e-07,
512
+ "loss": 1.8113,
513
+ "step": 3600
514
+ },
515
+ {
516
+ "epoch": 0.73,
517
+ "grad_norm": 42.720829010009766,
518
+ "learning_rate": 8.176499999999999e-07,
519
+ "loss": 1.7043,
520
+ "step": 3650
521
+ },
522
+ {
523
+ "epoch": 0.74,
524
+ "grad_norm": 42.389339447021484,
525
+ "learning_rate": 8.1515e-07,
526
+ "loss": 1.6669,
527
+ "step": 3700
528
+ },
529
+ {
530
+ "epoch": 0.75,
531
+ "grad_norm": 50.965545654296875,
532
+ "learning_rate": 8.1265e-07,
533
+ "loss": 1.6581,
534
+ "step": 3750
535
+ },
536
+ {
537
+ "epoch": 0.76,
538
+ "grad_norm": 33.738948822021484,
539
+ "learning_rate": 8.1015e-07,
540
+ "loss": 1.7835,
541
+ "step": 3800
542
+ },
543
+ {
544
+ "epoch": 0.77,
545
+ "grad_norm": 47.6729736328125,
546
+ "learning_rate": 8.076499999999999e-07,
547
+ "loss": 1.8784,
548
+ "step": 3850
549
+ },
550
+ {
551
+ "epoch": 0.78,
552
+ "grad_norm": 40.22808837890625,
553
+ "learning_rate": 8.0515e-07,
554
+ "loss": 1.6733,
555
+ "step": 3900
556
+ },
557
+ {
558
+ "epoch": 0.79,
559
+ "grad_norm": 43.46096420288086,
560
+ "learning_rate": 8.0265e-07,
561
+ "loss": 1.7327,
562
+ "step": 3950
563
+ },
564
+ {
565
+ "epoch": 0.8,
566
+ "grad_norm": 39.8518180847168,
567
+ "learning_rate": 8.0015e-07,
568
+ "loss": 1.6765,
569
+ "step": 4000
570
+ },
571
+ {
572
+ "epoch": 0.81,
573
+ "grad_norm": 43.99556350708008,
574
+ "learning_rate": 7.976999999999999e-07,
575
+ "loss": 1.6858,
576
+ "step": 4050
577
+ },
578
+ {
579
+ "epoch": 0.82,
580
+ "grad_norm": 39.69054412841797,
581
+ "learning_rate": 7.952e-07,
582
+ "loss": 1.7666,
583
+ "step": 4100
584
+ },
585
+ {
586
+ "epoch": 0.83,
587
+ "grad_norm": 37.67417907714844,
588
+ "learning_rate": 7.926999999999999e-07,
589
+ "loss": 1.7942,
590
+ "step": 4150
591
+ },
592
+ {
593
+ "epoch": 0.84,
594
+ "grad_norm": 39.24260711669922,
595
+ "learning_rate": 7.902e-07,
596
+ "loss": 1.7449,
597
+ "step": 4200
598
+ },
599
+ {
600
+ "epoch": 0.85,
601
+ "grad_norm": 44.0001220703125,
602
+ "learning_rate": 7.876999999999999e-07,
603
+ "loss": 1.7139,
604
+ "step": 4250
605
+ },
606
+ {
607
+ "epoch": 0.86,
608
+ "grad_norm": 41.069236755371094,
609
+ "learning_rate": 7.852e-07,
610
+ "loss": 1.6626,
611
+ "step": 4300
612
+ },
613
+ {
614
+ "epoch": 0.87,
615
+ "grad_norm": 40.02455139160156,
616
+ "learning_rate": 7.826999999999999e-07,
617
+ "loss": 1.6679,
618
+ "step": 4350
619
+ },
620
+ {
621
+ "epoch": 0.88,
622
+ "grad_norm": 38.73442077636719,
623
+ "learning_rate": 7.802e-07,
624
+ "loss": 1.7561,
625
+ "step": 4400
626
+ },
627
+ {
628
+ "epoch": 0.89,
629
+ "grad_norm": 44.03123474121094,
630
+ "learning_rate": 7.776999999999999e-07,
631
+ "loss": 1.7519,
632
+ "step": 4450
633
+ },
634
+ {
635
+ "epoch": 0.9,
636
+ "grad_norm": 38.86705017089844,
637
+ "learning_rate": 7.752e-07,
638
+ "loss": 1.7051,
639
+ "step": 4500
640
+ },
641
+ {
642
+ "epoch": 0.91,
643
+ "grad_norm": 42.8055534362793,
644
+ "learning_rate": 7.727e-07,
645
+ "loss": 1.7045,
646
+ "step": 4550
647
+ },
648
+ {
649
+ "epoch": 0.92,
650
+ "grad_norm": 50.77599334716797,
651
+ "learning_rate": 7.702e-07,
652
+ "loss": 1.7706,
653
+ "step": 4600
654
+ },
655
+ {
656
+ "epoch": 0.93,
657
+ "grad_norm": 39.55324935913086,
658
+ "learning_rate": 7.677e-07,
659
+ "loss": 1.7337,
660
+ "step": 4650
661
+ },
662
+ {
663
+ "epoch": 0.94,
664
+ "grad_norm": 48.40109634399414,
665
+ "learning_rate": 7.652e-07,
666
+ "loss": 1.6519,
667
+ "step": 4700
668
+ },
669
+ {
670
+ "epoch": 0.95,
671
+ "grad_norm": 47.08484649658203,
672
+ "learning_rate": 7.627e-07,
673
+ "loss": 1.7528,
674
+ "step": 4750
675
+ },
676
+ {
677
+ "epoch": 0.96,
678
+ "grad_norm": 41.425846099853516,
679
+ "learning_rate": 7.601999999999999e-07,
680
+ "loss": 1.6663,
681
+ "step": 4800
682
+ },
683
+ {
684
+ "epoch": 0.97,
685
+ "grad_norm": 42.3223991394043,
686
+ "learning_rate": 7.577e-07,
687
+ "loss": 1.6747,
688
+ "step": 4850
689
+ },
690
+ {
691
+ "epoch": 0.98,
692
+ "grad_norm": 44.03189468383789,
693
+ "learning_rate": 7.552e-07,
694
+ "loss": 1.6487,
695
+ "step": 4900
696
+ },
697
+ {
698
+ "epoch": 0.99,
699
+ "grad_norm": 41.170108795166016,
700
+ "learning_rate": 7.527e-07,
701
+ "loss": 1.7019,
702
+ "step": 4950
703
+ },
704
+ {
705
+ "epoch": 1.0,
706
+ "grad_norm": 42.10016632080078,
707
+ "learning_rate": 7.501999999999999e-07,
708
+ "loss": 1.6696,
709
+ "step": 5000
710
+ },
711
+ {
712
+ "epoch": 1.01,
713
+ "grad_norm": 34.27055358886719,
714
+ "learning_rate": 7.477e-07,
715
+ "loss": 1.62,
716
+ "step": 5050
717
+ },
718
+ {
719
+ "epoch": 1.02,
720
+ "grad_norm": 41.19538116455078,
721
+ "learning_rate": 7.452e-07,
722
+ "loss": 1.6309,
723
+ "step": 5100
724
+ },
725
+ {
726
+ "epoch": 1.03,
727
+ "grad_norm": 45.550575256347656,
728
+ "learning_rate": 7.427e-07,
729
+ "loss": 1.6535,
730
+ "step": 5150
731
+ },
732
+ {
733
+ "epoch": 1.04,
734
+ "grad_norm": 45.184539794921875,
735
+ "learning_rate": 7.401999999999999e-07,
736
+ "loss": 1.7223,
737
+ "step": 5200
738
+ },
739
+ {
740
+ "epoch": 1.05,
741
+ "grad_norm": 43.35379409790039,
742
+ "learning_rate": 7.376999999999999e-07,
743
+ "loss": 1.6844,
744
+ "step": 5250
745
+ },
746
+ {
747
+ "epoch": 1.06,
748
+ "grad_norm": 40.49492645263672,
749
+ "learning_rate": 7.352e-07,
750
+ "loss": 1.689,
751
+ "step": 5300
752
+ },
753
+ {
754
+ "epoch": 1.07,
755
+ "grad_norm": 38.820106506347656,
756
+ "learning_rate": 7.327e-07,
757
+ "loss": 1.7506,
758
+ "step": 5350
759
+ },
760
+ {
761
+ "epoch": 1.08,
762
+ "grad_norm": 38.87533187866211,
763
+ "learning_rate": 7.301999999999999e-07,
764
+ "loss": 1.5689,
765
+ "step": 5400
766
+ },
767
+ {
768
+ "epoch": 1.09,
769
+ "grad_norm": 34.46385192871094,
770
+ "learning_rate": 7.276999999999999e-07,
771
+ "loss": 1.6876,
772
+ "step": 5450
773
+ },
774
+ {
775
+ "epoch": 1.1,
776
+ "grad_norm": 39.77639389038086,
777
+ "learning_rate": 7.252e-07,
778
+ "loss": 1.6515,
779
+ "step": 5500
780
+ },
781
+ {
782
+ "epoch": 1.11,
783
+ "grad_norm": 42.202735900878906,
784
+ "learning_rate": 7.227e-07,
785
+ "loss": 1.5767,
786
+ "step": 5550
787
+ },
788
+ {
789
+ "epoch": 1.12,
790
+ "grad_norm": 36.47328186035156,
791
+ "learning_rate": 7.201999999999999e-07,
792
+ "loss": 1.7376,
793
+ "step": 5600
794
+ },
795
+ {
796
+ "epoch": 1.13,
797
+ "grad_norm": 40.54157257080078,
798
+ "learning_rate": 7.176999999999999e-07,
799
+ "loss": 1.768,
800
+ "step": 5650
801
+ },
802
+ {
803
+ "epoch": 1.1400000000000001,
804
+ "grad_norm": 36.865177154541016,
805
+ "learning_rate": 7.151999999999999e-07,
806
+ "loss": 1.5431,
807
+ "step": 5700
808
+ },
809
+ {
810
+ "epoch": 1.15,
811
+ "grad_norm": 32.530216217041016,
812
+ "learning_rate": 7.127e-07,
813
+ "loss": 1.6509,
814
+ "step": 5750
815
+ },
816
+ {
817
+ "epoch": 1.16,
818
+ "grad_norm": 38.9181022644043,
819
+ "learning_rate": 7.102e-07,
820
+ "loss": 1.6686,
821
+ "step": 5800
822
+ },
823
+ {
824
+ "epoch": 1.17,
825
+ "grad_norm": 47.71958541870117,
826
+ "learning_rate": 7.076999999999999e-07,
827
+ "loss": 1.6332,
828
+ "step": 5850
829
+ },
830
+ {
831
+ "epoch": 1.18,
832
+ "grad_norm": 33.65346908569336,
833
+ "learning_rate": 7.052e-07,
834
+ "loss": 1.6374,
835
+ "step": 5900
836
+ },
837
+ {
838
+ "epoch": 1.19,
839
+ "grad_norm": 50.740474700927734,
840
+ "learning_rate": 7.027e-07,
841
+ "loss": 1.6919,
842
+ "step": 5950
843
+ },
844
+ {
845
+ "epoch": 1.2,
846
+ "grad_norm": 35.024147033691406,
847
+ "learning_rate": 7.002e-07,
848
+ "loss": 1.5119,
849
+ "step": 6000
850
+ },
851
+ {
852
+ "epoch": 1.21,
853
+ "grad_norm": 46.42051315307617,
854
+ "learning_rate": 6.977499999999999e-07,
855
+ "loss": 1.7009,
856
+ "step": 6050
857
+ },
858
+ {
859
+ "epoch": 1.22,
860
+ "grad_norm": 40.99652099609375,
861
+ "learning_rate": 6.9525e-07,
862
+ "loss": 1.671,
863
+ "step": 6100
864
+ },
865
+ {
866
+ "epoch": 1.23,
867
+ "grad_norm": 48.398765563964844,
868
+ "learning_rate": 6.9275e-07,
869
+ "loss": 1.5888,
870
+ "step": 6150
871
+ },
872
+ {
873
+ "epoch": 1.24,
874
+ "grad_norm": 37.039058685302734,
875
+ "learning_rate": 6.9025e-07,
876
+ "loss": 1.7995,
877
+ "step": 6200
878
+ },
879
+ {
880
+ "epoch": 1.25,
881
+ "grad_norm": 39.58694839477539,
882
+ "learning_rate": 6.877499999999999e-07,
883
+ "loss": 1.6646,
884
+ "step": 6250
885
+ },
886
+ {
887
+ "epoch": 1.26,
888
+ "grad_norm": 43.098045349121094,
889
+ "learning_rate": 6.8525e-07,
890
+ "loss": 1.7114,
891
+ "step": 6300
892
+ },
893
+ {
894
+ "epoch": 1.27,
895
+ "grad_norm": 38.67097091674805,
896
+ "learning_rate": 6.827499999999999e-07,
897
+ "loss": 1.5854,
898
+ "step": 6350
899
+ },
900
+ {
901
+ "epoch": 1.28,
902
+ "grad_norm": 43.86952590942383,
903
+ "learning_rate": 6.8025e-07,
904
+ "loss": 1.6068,
905
+ "step": 6400
906
+ },
907
+ {
908
+ "epoch": 1.29,
909
+ "grad_norm": 43.158714294433594,
910
+ "learning_rate": 6.777499999999999e-07,
911
+ "loss": 1.7206,
912
+ "step": 6450
913
+ },
914
+ {
915
+ "epoch": 1.3,
916
+ "grad_norm": 44.74959945678711,
917
+ "learning_rate": 6.7525e-07,
918
+ "loss": 1.656,
919
+ "step": 6500
920
+ },
921
+ {
922
+ "epoch": 1.31,
923
+ "grad_norm": 34.64699172973633,
924
+ "learning_rate": 6.727499999999999e-07,
925
+ "loss": 1.6306,
926
+ "step": 6550
927
+ },
928
+ {
929
+ "epoch": 1.32,
930
+ "grad_norm": 51.52064895629883,
931
+ "learning_rate": 6.7025e-07,
932
+ "loss": 1.7831,
933
+ "step": 6600
934
+ },
935
+ {
936
+ "epoch": 1.33,
937
+ "grad_norm": 55.97758102416992,
938
+ "learning_rate": 6.677499999999999e-07,
939
+ "loss": 1.5366,
940
+ "step": 6650
941
+ },
942
+ {
943
+ "epoch": 1.34,
944
+ "grad_norm": 40.172523498535156,
945
+ "learning_rate": 6.6525e-07,
946
+ "loss": 1.7331,
947
+ "step": 6700
948
+ },
949
+ {
950
+ "epoch": 1.35,
951
+ "grad_norm": 42.77679443359375,
952
+ "learning_rate": 6.627499999999999e-07,
953
+ "loss": 1.6112,
954
+ "step": 6750
955
+ },
956
+ {
957
+ "epoch": 1.3599999999999999,
958
+ "grad_norm": 36.01056671142578,
959
+ "learning_rate": 6.602499999999999e-07,
960
+ "loss": 1.5688,
961
+ "step": 6800
962
+ },
963
+ {
964
+ "epoch": 1.37,
965
+ "grad_norm": 40.94023895263672,
966
+ "learning_rate": 6.577499999999999e-07,
967
+ "loss": 1.5867,
968
+ "step": 6850
969
+ },
970
+ {
971
+ "epoch": 1.38,
972
+ "grad_norm": 45.53089141845703,
973
+ "learning_rate": 6.5525e-07,
974
+ "loss": 1.5666,
975
+ "step": 6900
976
+ },
977
+ {
978
+ "epoch": 1.3900000000000001,
979
+ "grad_norm": 37.20089340209961,
980
+ "learning_rate": 6.5275e-07,
981
+ "loss": 1.5804,
982
+ "step": 6950
983
+ },
984
+ {
985
+ "epoch": 1.4,
986
+ "grad_norm": 33.455230712890625,
987
+ "learning_rate": 6.502499999999999e-07,
988
+ "loss": 1.5933,
989
+ "step": 7000
990
+ },
991
+ {
992
+ "epoch": 1.41,
993
+ "grad_norm": 39.31380844116211,
994
+ "learning_rate": 6.477500000000001e-07,
995
+ "loss": 1.6599,
996
+ "step": 7050
997
+ },
998
+ {
999
+ "epoch": 1.42,
1000
+ "grad_norm": 40.90291976928711,
1001
+ "learning_rate": 6.4525e-07,
1002
+ "loss": 1.7809,
1003
+ "step": 7100
1004
+ },
1005
+ {
1006
+ "epoch": 1.43,
1007
+ "grad_norm": 38.44248580932617,
1008
+ "learning_rate": 6.4275e-07,
1009
+ "loss": 1.554,
1010
+ "step": 7150
1011
+ },
1012
+ {
1013
+ "epoch": 1.44,
1014
+ "grad_norm": 41.40769958496094,
1015
+ "learning_rate": 6.402499999999999e-07,
1016
+ "loss": 1.5848,
1017
+ "step": 7200
1018
+ },
1019
+ {
1020
+ "epoch": 1.45,
1021
+ "grad_norm": 34.839202880859375,
1022
+ "learning_rate": 6.377500000000001e-07,
1023
+ "loss": 1.5506,
1024
+ "step": 7250
1025
+ },
1026
+ {
1027
+ "epoch": 1.46,
1028
+ "grad_norm": 39.07792663574219,
1029
+ "learning_rate": 6.3525e-07,
1030
+ "loss": 1.6936,
1031
+ "step": 7300
1032
+ },
1033
+ {
1034
+ "epoch": 1.47,
1035
+ "grad_norm": 47.81125259399414,
1036
+ "learning_rate": 6.3275e-07,
1037
+ "loss": 1.7948,
1038
+ "step": 7350
1039
+ },
1040
+ {
1041
+ "epoch": 1.48,
1042
+ "grad_norm": 33.074684143066406,
1043
+ "learning_rate": 6.302499999999999e-07,
1044
+ "loss": 1.5964,
1045
+ "step": 7400
1046
+ },
1047
+ {
1048
+ "epoch": 1.49,
1049
+ "grad_norm": 35.54746627807617,
1050
+ "learning_rate": 6.2775e-07,
1051
+ "loss": 1.5494,
1052
+ "step": 7450
1053
+ },
1054
+ {
1055
+ "epoch": 1.5,
1056
+ "grad_norm": 47.26594543457031,
1057
+ "learning_rate": 6.2525e-07,
1058
+ "loss": 1.6013,
1059
+ "step": 7500
1060
+ },
1061
+ {
1062
+ "epoch": 1.51,
1063
+ "grad_norm": 47.13039779663086,
1064
+ "learning_rate": 6.2275e-07,
1065
+ "loss": 1.5403,
1066
+ "step": 7550
1067
+ },
1068
+ {
1069
+ "epoch": 1.52,
1070
+ "grad_norm": 42.410614013671875,
1071
+ "learning_rate": 6.202499999999999e-07,
1072
+ "loss": 1.5956,
1073
+ "step": 7600
1074
+ },
1075
+ {
1076
+ "epoch": 1.53,
1077
+ "grad_norm": 38.69276428222656,
1078
+ "learning_rate": 6.1775e-07,
1079
+ "loss": 1.6829,
1080
+ "step": 7650
1081
+ },
1082
+ {
1083
+ "epoch": 1.54,
1084
+ "grad_norm": 41.635135650634766,
1085
+ "learning_rate": 6.1525e-07,
1086
+ "loss": 1.5228,
1087
+ "step": 7700
1088
+ },
1089
+ {
1090
+ "epoch": 1.55,
1091
+ "grad_norm": 38.301753997802734,
1092
+ "learning_rate": 6.1275e-07,
1093
+ "loss": 1.5039,
1094
+ "step": 7750
1095
+ },
1096
+ {
1097
+ "epoch": 1.56,
1098
+ "grad_norm": 40.455101013183594,
1099
+ "learning_rate": 6.102499999999999e-07,
1100
+ "loss": 1.5666,
1101
+ "step": 7800
1102
+ },
1103
+ {
1104
+ "epoch": 1.5699999999999998,
1105
+ "grad_norm": 49.50397491455078,
1106
+ "learning_rate": 6.0775e-07,
1107
+ "loss": 1.5666,
1108
+ "step": 7850
1109
+ },
1110
+ {
1111
+ "epoch": 1.58,
1112
+ "grad_norm": 51.95878601074219,
1113
+ "learning_rate": 6.052499999999999e-07,
1114
+ "loss": 1.5874,
1115
+ "step": 7900
1116
+ },
1117
+ {
1118
+ "epoch": 1.5899999999999999,
1119
+ "grad_norm": 36.6114501953125,
1120
+ "learning_rate": 6.0275e-07,
1121
+ "loss": 1.63,
1122
+ "step": 7950
1123
+ },
1124
+ {
1125
+ "epoch": 1.6,
1126
+ "grad_norm": 41.69790267944336,
1127
+ "learning_rate": 6.002499999999999e-07,
1128
+ "loss": 1.5995,
1129
+ "step": 8000
1130
+ },
1131
+ {
1132
+ "epoch": 1.6099999999999999,
1133
+ "grad_norm": 38.66043472290039,
1134
+ "learning_rate": 5.978e-07,
1135
+ "loss": 1.5629,
1136
+ "step": 8050
1137
+ },
1138
+ {
1139
+ "epoch": 1.62,
1140
+ "grad_norm": 59.15949249267578,
1141
+ "learning_rate": 5.953e-07,
1142
+ "loss": 1.6076,
1143
+ "step": 8100
1144
+ },
1145
+ {
1146
+ "epoch": 1.63,
1147
+ "grad_norm": 37.75910186767578,
1148
+ "learning_rate": 5.928e-07,
1149
+ "loss": 1.5531,
1150
+ "step": 8150
1151
+ },
1152
+ {
1153
+ "epoch": 1.6400000000000001,
1154
+ "grad_norm": 40.374855041503906,
1155
+ "learning_rate": 5.903e-07,
1156
+ "loss": 1.5393,
1157
+ "step": 8200
1158
+ },
1159
+ {
1160
+ "epoch": 1.65,
1161
+ "grad_norm": 47.41810989379883,
1162
+ "learning_rate": 5.878e-07,
1163
+ "loss": 1.5062,
1164
+ "step": 8250
1165
+ },
1166
+ {
1167
+ "epoch": 1.6600000000000001,
1168
+ "grad_norm": 46.51014709472656,
1169
+ "learning_rate": 5.853e-07,
1170
+ "loss": 1.6144,
1171
+ "step": 8300
1172
+ },
1173
+ {
1174
+ "epoch": 1.67,
1175
+ "grad_norm": 49.626834869384766,
1176
+ "learning_rate": 5.828e-07,
1177
+ "loss": 1.6439,
1178
+ "step": 8350
1179
+ },
1180
+ {
1181
+ "epoch": 1.6800000000000002,
1182
+ "grad_norm": 32.59950256347656,
1183
+ "learning_rate": 5.803e-07,
1184
+ "loss": 1.5362,
1185
+ "step": 8400
1186
+ },
1187
+ {
1188
+ "epoch": 1.69,
1189
+ "grad_norm": 49.02924346923828,
1190
+ "learning_rate": 5.778e-07,
1191
+ "loss": 1.7244,
1192
+ "step": 8450
1193
+ },
1194
+ {
1195
+ "epoch": 1.7,
1196
+ "grad_norm": 41.86074447631836,
1197
+ "learning_rate": 5.753e-07,
1198
+ "loss": 1.5701,
1199
+ "step": 8500
1200
+ },
1201
+ {
1202
+ "epoch": 1.71,
1203
+ "grad_norm": 30.711210250854492,
1204
+ "learning_rate": 5.727999999999999e-07,
1205
+ "loss": 1.6219,
1206
+ "step": 8550
1207
+ },
1208
+ {
1209
+ "epoch": 1.72,
1210
+ "grad_norm": 47.8115234375,
1211
+ "learning_rate": 5.703e-07,
1212
+ "loss": 1.5723,
1213
+ "step": 8600
1214
+ },
1215
+ {
1216
+ "epoch": 1.73,
1217
+ "grad_norm": 39.29584884643555,
1218
+ "learning_rate": 5.678e-07,
1219
+ "loss": 1.5869,
1220
+ "step": 8650
1221
+ },
1222
+ {
1223
+ "epoch": 1.74,
1224
+ "grad_norm": 31.378740310668945,
1225
+ "learning_rate": 5.653e-07,
1226
+ "loss": 1.635,
1227
+ "step": 8700
1228
+ },
1229
+ {
1230
+ "epoch": 1.75,
1231
+ "grad_norm": 38.850608825683594,
1232
+ "learning_rate": 5.627999999999999e-07,
1233
+ "loss": 1.6046,
1234
+ "step": 8750
1235
+ },
1236
+ {
1237
+ "epoch": 1.76,
1238
+ "grad_norm": 26.495405197143555,
1239
+ "learning_rate": 5.603e-07,
1240
+ "loss": 1.6276,
1241
+ "step": 8800
1242
+ },
1243
+ {
1244
+ "epoch": 1.77,
1245
+ "grad_norm": 44.871849060058594,
1246
+ "learning_rate": 5.578e-07,
1247
+ "loss": 1.6889,
1248
+ "step": 8850
1249
+ },
1250
+ {
1251
+ "epoch": 1.78,
1252
+ "grad_norm": 47.68773651123047,
1253
+ "learning_rate": 5.553e-07,
1254
+ "loss": 1.7151,
1255
+ "step": 8900
1256
+ },
1257
+ {
1258
+ "epoch": 1.79,
1259
+ "grad_norm": 31.6007080078125,
1260
+ "learning_rate": 5.527999999999999e-07,
1261
+ "loss": 1.5286,
1262
+ "step": 8950
1263
+ },
1264
+ {
1265
+ "epoch": 1.8,
1266
+ "grad_norm": 30.19587516784668,
1267
+ "learning_rate": 5.502999999999999e-07,
1268
+ "loss": 1.6597,
1269
+ "step": 9000
1270
+ },
1271
+ {
1272
+ "epoch": 1.81,
1273
+ "grad_norm": 36.03053665161133,
1274
+ "learning_rate": 5.478e-07,
1275
+ "loss": 1.6422,
1276
+ "step": 9050
1277
+ },
1278
+ {
1279
+ "epoch": 1.8199999999999998,
1280
+ "grad_norm": 41.28793716430664,
1281
+ "learning_rate": 5.453e-07,
1282
+ "loss": 1.6893,
1283
+ "step": 9100
1284
+ },
1285
+ {
1286
+ "epoch": 1.83,
1287
+ "grad_norm": 32.481689453125,
1288
+ "learning_rate": 5.427999999999999e-07,
1289
+ "loss": 1.5683,
1290
+ "step": 9150
1291
+ },
1292
+ {
1293
+ "epoch": 1.8399999999999999,
1294
+ "grad_norm": 40.114654541015625,
1295
+ "learning_rate": 5.402999999999999e-07,
1296
+ "loss": 1.6021,
1297
+ "step": 9200
1298
+ },
1299
+ {
1300
+ "epoch": 1.85,
1301
+ "grad_norm": 44.78034210205078,
1302
+ "learning_rate": 5.378e-07,
1303
+ "loss": 1.6241,
1304
+ "step": 9250
1305
+ },
1306
+ {
1307
+ "epoch": 1.8599999999999999,
1308
+ "grad_norm": 32.96756362915039,
1309
+ "learning_rate": 5.353e-07,
1310
+ "loss": 1.4664,
1311
+ "step": 9300
1312
+ },
1313
+ {
1314
+ "epoch": 1.87,
1315
+ "grad_norm": 35.44109344482422,
1316
+ "learning_rate": 5.328e-07,
1317
+ "loss": 1.6045,
1318
+ "step": 9350
1319
+ },
1320
+ {
1321
+ "epoch": 1.88,
1322
+ "grad_norm": 47.10539245605469,
1323
+ "learning_rate": 5.302999999999999e-07,
1324
+ "loss": 1.6529,
1325
+ "step": 9400
1326
+ },
1327
+ {
1328
+ "epoch": 1.8900000000000001,
1329
+ "grad_norm": 35.667083740234375,
1330
+ "learning_rate": 5.278000000000001e-07,
1331
+ "loss": 1.6878,
1332
+ "step": 9450
1333
+ },
1334
+ {
1335
+ "epoch": 1.9,
1336
+ "grad_norm": 42.152652740478516,
1337
+ "learning_rate": 5.253e-07,
1338
+ "loss": 1.6694,
1339
+ "step": 9500
1340
+ },
1341
+ {
1342
+ "epoch": 1.9100000000000001,
1343
+ "grad_norm": 44.23321533203125,
1344
+ "learning_rate": 5.228e-07,
1345
+ "loss": 1.61,
1346
+ "step": 9550
1347
+ },
1348
+ {
1349
+ "epoch": 1.92,
1350
+ "grad_norm": 37.32196807861328,
1351
+ "learning_rate": 5.202999999999999e-07,
1352
+ "loss": 1.5521,
1353
+ "step": 9600
1354
+ },
1355
+ {
1356
+ "epoch": 1.9300000000000002,
1357
+ "grad_norm": 51.841514587402344,
1358
+ "learning_rate": 5.178e-07,
1359
+ "loss": 1.682,
1360
+ "step": 9650
1361
+ },
1362
+ {
1363
+ "epoch": 1.94,
1364
+ "grad_norm": 41.69253158569336,
1365
+ "learning_rate": 5.153e-07,
1366
+ "loss": 1.6228,
1367
+ "step": 9700
1368
+ },
1369
+ {
1370
+ "epoch": 1.95,
1371
+ "grad_norm": 32.16023635864258,
1372
+ "learning_rate": 5.128e-07,
1373
+ "loss": 1.6022,
1374
+ "step": 9750
1375
+ },
1376
+ {
1377
+ "epoch": 1.96,
1378
+ "grad_norm": 40.430206298828125,
1379
+ "learning_rate": 5.102999999999999e-07,
1380
+ "loss": 1.6262,
1381
+ "step": 9800
1382
+ },
1383
+ {
1384
+ "epoch": 1.97,
1385
+ "grad_norm": 47.571449279785156,
1386
+ "learning_rate": 5.078e-07,
1387
+ "loss": 1.6431,
1388
+ "step": 9850
1389
+ },
1390
+ {
1391
+ "epoch": 1.98,
1392
+ "grad_norm": 40.32009506225586,
1393
+ "learning_rate": 5.053e-07,
1394
+ "loss": 1.4864,
1395
+ "step": 9900
1396
+ },
1397
+ {
1398
+ "epoch": 1.99,
1399
+ "grad_norm": 40.59520721435547,
1400
+ "learning_rate": 5.028e-07,
1401
+ "loss": 1.595,
1402
+ "step": 9950
1403
+ },
1404
+ {
1405
+ "epoch": 2.0,
1406
+ "grad_norm": 44.62666702270508,
1407
+ "learning_rate": 5.002999999999999e-07,
1408
+ "loss": 1.5604,
1409
+ "step": 10000
1410
+ },
1411
+ {
1412
+ "epoch": 2.01,
1413
+ "grad_norm": 37.61200714111328,
1414
+ "learning_rate": 4.9785e-07,
1415
+ "loss": 1.6435,
1416
+ "step": 10050
1417
+ },
1418
+ {
1419
+ "epoch": 2.02,
1420
+ "grad_norm": 40.34099197387695,
1421
+ "learning_rate": 4.953499999999999e-07,
1422
+ "loss": 1.7094,
1423
+ "step": 10100
1424
+ },
1425
+ {
1426
+ "epoch": 2.03,
1427
+ "grad_norm": 47.07732009887695,
1428
+ "learning_rate": 4.9285e-07,
1429
+ "loss": 1.5482,
1430
+ "step": 10150
1431
+ },
1432
+ {
1433
+ "epoch": 2.04,
1434
+ "grad_norm": 23.103185653686523,
1435
+ "learning_rate": 4.9035e-07,
1436
+ "loss": 1.6298,
1437
+ "step": 10200
1438
+ },
1439
+ {
1440
+ "epoch": 2.05,
1441
+ "grad_norm": 44.33336639404297,
1442
+ "learning_rate": 4.8785e-07,
1443
+ "loss": 1.5512,
1444
+ "step": 10250
1445
+ },
1446
+ {
1447
+ "epoch": 2.06,
1448
+ "grad_norm": 41.70211410522461,
1449
+ "learning_rate": 4.853499999999999e-07,
1450
+ "loss": 1.4399,
1451
+ "step": 10300
1452
+ },
1453
+ {
1454
+ "epoch": 2.07,
1455
+ "grad_norm": 36.45768737792969,
1456
+ "learning_rate": 4.8285e-07,
1457
+ "loss": 1.5103,
1458
+ "step": 10350
1459
+ },
1460
+ {
1461
+ "epoch": 2.08,
1462
+ "grad_norm": 30.36566162109375,
1463
+ "learning_rate": 4.8035e-07,
1464
+ "loss": 1.4481,
1465
+ "step": 10400
1466
+ },
1467
+ {
1468
+ "epoch": 2.09,
1469
+ "grad_norm": 35.793094635009766,
1470
+ "learning_rate": 4.7785e-07,
1471
+ "loss": 1.5339,
1472
+ "step": 10450
1473
+ },
1474
+ {
1475
+ "epoch": 2.1,
1476
+ "grad_norm": 52.48720932006836,
1477
+ "learning_rate": 4.7535e-07,
1478
+ "loss": 1.6079,
1479
+ "step": 10500
1480
+ },
1481
+ {
1482
+ "epoch": 2.11,
1483
+ "grad_norm": 41.160335540771484,
1484
+ "learning_rate": 4.7284999999999995e-07,
1485
+ "loss": 1.4992,
1486
+ "step": 10550
1487
+ },
1488
+ {
1489
+ "epoch": 2.12,
1490
+ "grad_norm": 40.25874710083008,
1491
+ "learning_rate": 4.7034999999999997e-07,
1492
+ "loss": 1.601,
1493
+ "step": 10600
1494
+ },
1495
+ {
1496
+ "epoch": 2.13,
1497
+ "grad_norm": 35.40253829956055,
1498
+ "learning_rate": 4.6784999999999994e-07,
1499
+ "loss": 1.5437,
1500
+ "step": 10650
1501
+ },
1502
+ {
1503
+ "epoch": 2.14,
1504
+ "grad_norm": 42.93209457397461,
1505
+ "learning_rate": 4.6534999999999997e-07,
1506
+ "loss": 1.5923,
1507
+ "step": 10700
1508
+ },
1509
+ {
1510
+ "epoch": 2.15,
1511
+ "grad_norm": 43.273956298828125,
1512
+ "learning_rate": 4.6284999999999994e-07,
1513
+ "loss": 1.5925,
1514
+ "step": 10750
1515
+ },
1516
+ {
1517
+ "epoch": 2.16,
1518
+ "grad_norm": 49.30186080932617,
1519
+ "learning_rate": 4.6034999999999996e-07,
1520
+ "loss": 1.538,
1521
+ "step": 10800
1522
+ },
1523
+ {
1524
+ "epoch": 2.17,
1525
+ "grad_norm": 40.10365295410156,
1526
+ "learning_rate": 4.5784999999999993e-07,
1527
+ "loss": 1.545,
1528
+ "step": 10850
1529
+ },
1530
+ {
1531
+ "epoch": 2.18,
1532
+ "grad_norm": 40.76726531982422,
1533
+ "learning_rate": 4.5534999999999996e-07,
1534
+ "loss": 1.4837,
1535
+ "step": 10900
1536
+ },
1537
+ {
1538
+ "epoch": 2.19,
1539
+ "grad_norm": 36.3805046081543,
1540
+ "learning_rate": 4.5284999999999993e-07,
1541
+ "loss": 1.5463,
1542
+ "step": 10950
1543
+ },
1544
+ {
1545
+ "epoch": 2.2,
1546
+ "grad_norm": 25.484167098999023,
1547
+ "learning_rate": 4.5034999999999995e-07,
1548
+ "loss": 1.5916,
1549
+ "step": 11000
1550
+ },
1551
+ {
1552
+ "epoch": 2.21,
1553
+ "grad_norm": 33.473121643066406,
1554
+ "learning_rate": 4.4785000000000003e-07,
1555
+ "loss": 1.5033,
1556
+ "step": 11050
1557
+ },
1558
+ {
1559
+ "epoch": 2.22,
1560
+ "grad_norm": 36.8259391784668,
1561
+ "learning_rate": 4.4535e-07,
1562
+ "loss": 1.552,
1563
+ "step": 11100
1564
+ },
1565
+ {
1566
+ "epoch": 2.23,
1567
+ "grad_norm": 32.61063766479492,
1568
+ "learning_rate": 4.4285e-07,
1569
+ "loss": 1.5759,
1570
+ "step": 11150
1571
+ },
1572
+ {
1573
+ "epoch": 2.24,
1574
+ "grad_norm": 51.49959945678711,
1575
+ "learning_rate": 4.4035e-07,
1576
+ "loss": 1.556,
1577
+ "step": 11200
1578
+ },
1579
+ {
1580
+ "epoch": 2.25,
1581
+ "grad_norm": 44.21879959106445,
1582
+ "learning_rate": 4.3785e-07,
1583
+ "loss": 1.4935,
1584
+ "step": 11250
1585
+ },
1586
+ {
1587
+ "epoch": 2.26,
1588
+ "grad_norm": 36.884010314941406,
1589
+ "learning_rate": 4.3535e-07,
1590
+ "loss": 1.4284,
1591
+ "step": 11300
1592
+ },
1593
+ {
1594
+ "epoch": 2.27,
1595
+ "grad_norm": 38.094329833984375,
1596
+ "learning_rate": 4.3285e-07,
1597
+ "loss": 1.6868,
1598
+ "step": 11350
1599
+ },
1600
+ {
1601
+ "epoch": 2.2800000000000002,
1602
+ "grad_norm": 41.53178024291992,
1603
+ "learning_rate": 4.3035e-07,
1604
+ "loss": 1.6034,
1605
+ "step": 11400
1606
+ },
1607
+ {
1608
+ "epoch": 2.29,
1609
+ "grad_norm": 46.737213134765625,
1610
+ "learning_rate": 4.2785e-07,
1611
+ "loss": 1.5557,
1612
+ "step": 11450
1613
+ },
1614
+ {
1615
+ "epoch": 2.3,
1616
+ "grad_norm": 50.67173385620117,
1617
+ "learning_rate": 4.2535e-07,
1618
+ "loss": 1.6998,
1619
+ "step": 11500
1620
+ },
1621
+ {
1622
+ "epoch": 2.31,
1623
+ "grad_norm": 52.222557067871094,
1624
+ "learning_rate": 4.2285e-07,
1625
+ "loss": 1.5495,
1626
+ "step": 11550
1627
+ },
1628
+ {
1629
+ "epoch": 2.32,
1630
+ "grad_norm": 48.568050384521484,
1631
+ "learning_rate": 4.2035e-07,
1632
+ "loss": 1.5441,
1633
+ "step": 11600
1634
+ },
1635
+ {
1636
+ "epoch": 2.33,
1637
+ "grad_norm": 55.280521392822266,
1638
+ "learning_rate": 4.1785e-07,
1639
+ "loss": 1.68,
1640
+ "step": 11650
1641
+ },
1642
+ {
1643
+ "epoch": 2.34,
1644
+ "grad_norm": 42.30193328857422,
1645
+ "learning_rate": 4.1534999999999997e-07,
1646
+ "loss": 1.6276,
1647
+ "step": 11700
1648
+ },
1649
+ {
1650
+ "epoch": 2.35,
1651
+ "grad_norm": 54.417327880859375,
1652
+ "learning_rate": 4.1285e-07,
1653
+ "loss": 1.5603,
1654
+ "step": 11750
1655
+ },
1656
+ {
1657
+ "epoch": 2.36,
1658
+ "grad_norm": 33.117340087890625,
1659
+ "learning_rate": 4.1034999999999997e-07,
1660
+ "loss": 1.5169,
1661
+ "step": 11800
1662
+ },
1663
+ {
1664
+ "epoch": 2.37,
1665
+ "grad_norm": 43.10250473022461,
1666
+ "learning_rate": 4.0785e-07,
1667
+ "loss": 1.4491,
1668
+ "step": 11850
1669
+ },
1670
+ {
1671
+ "epoch": 2.38,
1672
+ "grad_norm": 48.663917541503906,
1673
+ "learning_rate": 4.0534999999999996e-07,
1674
+ "loss": 1.5617,
1675
+ "step": 11900
1676
+ },
1677
+ {
1678
+ "epoch": 2.39,
1679
+ "grad_norm": 41.02192687988281,
1680
+ "learning_rate": 4.0285e-07,
1681
+ "loss": 1.6111,
1682
+ "step": 11950
1683
+ },
1684
+ {
1685
+ "epoch": 2.4,
1686
+ "grad_norm": 42.699256896972656,
1687
+ "learning_rate": 4.0034999999999996e-07,
1688
+ "loss": 1.4217,
1689
+ "step": 12000
1690
+ },
1691
+ {
1692
+ "epoch": 2.41,
1693
+ "grad_norm": 49.665855407714844,
1694
+ "learning_rate": 3.979e-07,
1695
+ "loss": 1.4223,
1696
+ "step": 12050
1697
+ },
1698
+ {
1699
+ "epoch": 2.42,
1700
+ "grad_norm": 43.366397857666016,
1701
+ "learning_rate": 3.9539999999999995e-07,
1702
+ "loss": 1.5452,
1703
+ "step": 12100
1704
+ },
1705
+ {
1706
+ "epoch": 2.43,
1707
+ "grad_norm": 35.55706024169922,
1708
+ "learning_rate": 3.9290000000000003e-07,
1709
+ "loss": 1.545,
1710
+ "step": 12150
1711
+ },
1712
+ {
1713
+ "epoch": 2.44,
1714
+ "grad_norm": 42.98080062866211,
1715
+ "learning_rate": 3.904e-07,
1716
+ "loss": 1.5648,
1717
+ "step": 12200
1718
+ },
1719
+ {
1720
+ "epoch": 2.45,
1721
+ "grad_norm": 36.39263916015625,
1722
+ "learning_rate": 3.879e-07,
1723
+ "loss": 1.5724,
1724
+ "step": 12250
1725
+ },
1726
+ {
1727
+ "epoch": 2.46,
1728
+ "grad_norm": 42.547401428222656,
1729
+ "learning_rate": 3.854e-07,
1730
+ "loss": 1.7179,
1731
+ "step": 12300
1732
+ },
1733
+ {
1734
+ "epoch": 2.4699999999999998,
1735
+ "grad_norm": 45.591087341308594,
1736
+ "learning_rate": 3.829e-07,
1737
+ "loss": 1.5122,
1738
+ "step": 12350
1739
+ },
1740
+ {
1741
+ "epoch": 2.48,
1742
+ "grad_norm": 43.481258392333984,
1743
+ "learning_rate": 3.804e-07,
1744
+ "loss": 1.4843,
1745
+ "step": 12400
1746
+ },
1747
+ {
1748
+ "epoch": 2.49,
1749
+ "grad_norm": 44.69862747192383,
1750
+ "learning_rate": 3.779e-07,
1751
+ "loss": 1.5978,
1752
+ "step": 12450
1753
+ },
1754
+ {
1755
+ "epoch": 2.5,
1756
+ "grad_norm": 43.04928970336914,
1757
+ "learning_rate": 3.754e-07,
1758
+ "loss": 1.512,
1759
+ "step": 12500
1760
+ },
1761
+ {
1762
+ "epoch": 2.51,
1763
+ "grad_norm": 35.41480255126953,
1764
+ "learning_rate": 3.729e-07,
1765
+ "loss": 1.6957,
1766
+ "step": 12550
1767
+ },
1768
+ {
1769
+ "epoch": 2.52,
1770
+ "grad_norm": 42.25669860839844,
1771
+ "learning_rate": 3.704e-07,
1772
+ "loss": 1.4797,
1773
+ "step": 12600
1774
+ },
1775
+ {
1776
+ "epoch": 2.5300000000000002,
1777
+ "grad_norm": 23.68979835510254,
1778
+ "learning_rate": 3.679e-07,
1779
+ "loss": 1.515,
1780
+ "step": 12650
1781
+ },
1782
+ {
1783
+ "epoch": 2.54,
1784
+ "grad_norm": 39.010196685791016,
1785
+ "learning_rate": 3.654e-07,
1786
+ "loss": 1.5484,
1787
+ "step": 12700
1788
+ },
1789
+ {
1790
+ "epoch": 2.55,
1791
+ "grad_norm": 40.15721130371094,
1792
+ "learning_rate": 3.629e-07,
1793
+ "loss": 1.595,
1794
+ "step": 12750
1795
+ },
1796
+ {
1797
+ "epoch": 2.56,
1798
+ "grad_norm": 32.100643157958984,
1799
+ "learning_rate": 3.6039999999999997e-07,
1800
+ "loss": 1.5125,
1801
+ "step": 12800
1802
+ },
1803
+ {
1804
+ "epoch": 2.57,
1805
+ "grad_norm": 51.687583923339844,
1806
+ "learning_rate": 3.579e-07,
1807
+ "loss": 1.6924,
1808
+ "step": 12850
1809
+ },
1810
+ {
1811
+ "epoch": 2.58,
1812
+ "grad_norm": 40.378807067871094,
1813
+ "learning_rate": 3.5539999999999997e-07,
1814
+ "loss": 1.5349,
1815
+ "step": 12900
1816
+ },
1817
+ {
1818
+ "epoch": 2.59,
1819
+ "grad_norm": 46.35411834716797,
1820
+ "learning_rate": 3.529e-07,
1821
+ "loss": 1.4451,
1822
+ "step": 12950
1823
+ },
1824
+ {
1825
+ "epoch": 2.6,
1826
+ "grad_norm": 39.091644287109375,
1827
+ "learning_rate": 3.5039999999999996e-07,
1828
+ "loss": 1.5871,
1829
+ "step": 13000
1830
+ },
1831
+ {
1832
+ "epoch": 2.61,
1833
+ "grad_norm": 46.588539123535156,
1834
+ "learning_rate": 3.479e-07,
1835
+ "loss": 1.5514,
1836
+ "step": 13050
1837
+ },
1838
+ {
1839
+ "epoch": 2.62,
1840
+ "grad_norm": 40.58433151245117,
1841
+ "learning_rate": 3.4539999999999996e-07,
1842
+ "loss": 1.6805,
1843
+ "step": 13100
1844
+ },
1845
+ {
1846
+ "epoch": 2.63,
1847
+ "grad_norm": 44.80520248413086,
1848
+ "learning_rate": 3.429e-07,
1849
+ "loss": 1.4672,
1850
+ "step": 13150
1851
+ },
1852
+ {
1853
+ "epoch": 2.64,
1854
+ "grad_norm": 42.662078857421875,
1855
+ "learning_rate": 3.4039999999999995e-07,
1856
+ "loss": 1.5712,
1857
+ "step": 13200
1858
+ },
1859
+ {
1860
+ "epoch": 2.65,
1861
+ "grad_norm": 40.33576583862305,
1862
+ "learning_rate": 3.379e-07,
1863
+ "loss": 1.4617,
1864
+ "step": 13250
1865
+ },
1866
+ {
1867
+ "epoch": 2.66,
1868
+ "grad_norm": 44.45260238647461,
1869
+ "learning_rate": 3.3539999999999995e-07,
1870
+ "loss": 1.5336,
1871
+ "step": 13300
1872
+ },
1873
+ {
1874
+ "epoch": 2.67,
1875
+ "grad_norm": 48.2208251953125,
1876
+ "learning_rate": 3.3289999999999997e-07,
1877
+ "loss": 1.5241,
1878
+ "step": 13350
1879
+ },
1880
+ {
1881
+ "epoch": 2.68,
1882
+ "grad_norm": 37.904762268066406,
1883
+ "learning_rate": 3.304e-07,
1884
+ "loss": 1.5679,
1885
+ "step": 13400
1886
+ },
1887
+ {
1888
+ "epoch": 2.69,
1889
+ "grad_norm": 39.32900619506836,
1890
+ "learning_rate": 3.279e-07,
1891
+ "loss": 1.5121,
1892
+ "step": 13450
1893
+ },
1894
+ {
1895
+ "epoch": 2.7,
1896
+ "grad_norm": 36.95504379272461,
1897
+ "learning_rate": 3.254e-07,
1898
+ "loss": 1.4855,
1899
+ "step": 13500
1900
+ },
1901
+ {
1902
+ "epoch": 2.71,
1903
+ "grad_norm": 29.145931243896484,
1904
+ "learning_rate": 3.229e-07,
1905
+ "loss": 1.5645,
1906
+ "step": 13550
1907
+ },
1908
+ {
1909
+ "epoch": 2.7199999999999998,
1910
+ "grad_norm": 41.01043701171875,
1911
+ "learning_rate": 3.204e-07,
1912
+ "loss": 1.4902,
1913
+ "step": 13600
1914
+ },
1915
+ {
1916
+ "epoch": 2.73,
1917
+ "grad_norm": 42.938472747802734,
1918
+ "learning_rate": 3.179e-07,
1919
+ "loss": 1.421,
1920
+ "step": 13650
1921
+ },
1922
+ {
1923
+ "epoch": 2.74,
1924
+ "grad_norm": 42.611812591552734,
1925
+ "learning_rate": 3.154e-07,
1926
+ "loss": 1.6238,
1927
+ "step": 13700
1928
+ },
1929
+ {
1930
+ "epoch": 2.75,
1931
+ "grad_norm": 40.880706787109375,
1932
+ "learning_rate": 3.129e-07,
1933
+ "loss": 1.554,
1934
+ "step": 13750
1935
+ },
1936
+ {
1937
+ "epoch": 2.76,
1938
+ "grad_norm": 44.605411529541016,
1939
+ "learning_rate": 3.104e-07,
1940
+ "loss": 1.631,
1941
+ "step": 13800
1942
+ },
1943
+ {
1944
+ "epoch": 2.77,
1945
+ "grad_norm": 35.732723236083984,
1946
+ "learning_rate": 3.079e-07,
1947
+ "loss": 1.5479,
1948
+ "step": 13850
1949
+ },
1950
+ {
1951
+ "epoch": 2.7800000000000002,
1952
+ "grad_norm": 38.93905258178711,
1953
+ "learning_rate": 3.0539999999999997e-07,
1954
+ "loss": 1.5417,
1955
+ "step": 13900
1956
+ },
1957
+ {
1958
+ "epoch": 2.79,
1959
+ "grad_norm": 50.35736083984375,
1960
+ "learning_rate": 3.029e-07,
1961
+ "loss": 1.6767,
1962
+ "step": 13950
1963
+ },
1964
+ {
1965
+ "epoch": 2.8,
1966
+ "grad_norm": 35.24892807006836,
1967
+ "learning_rate": 3.0039999999999996e-07,
1968
+ "loss": 1.5639,
1969
+ "step": 14000
1970
+ },
1971
+ {
1972
+ "epoch": 2.81,
1973
+ "grad_norm": 41.46996307373047,
1974
+ "learning_rate": 2.9795e-07,
1975
+ "loss": 1.4937,
1976
+ "step": 14050
1977
+ },
1978
+ {
1979
+ "epoch": 2.82,
1980
+ "grad_norm": 39.519866943359375,
1981
+ "learning_rate": 2.9544999999999996e-07,
1982
+ "loss": 1.6308,
1983
+ "step": 14100
1984
+ },
1985
+ {
1986
+ "epoch": 2.83,
1987
+ "grad_norm": 44.51643753051758,
1988
+ "learning_rate": 2.9295e-07,
1989
+ "loss": 1.4379,
1990
+ "step": 14150
1991
+ },
1992
+ {
1993
+ "epoch": 2.84,
1994
+ "grad_norm": 35.87133026123047,
1995
+ "learning_rate": 2.9044999999999996e-07,
1996
+ "loss": 1.4668,
1997
+ "step": 14200
1998
+ },
1999
+ {
2000
+ "epoch": 2.85,
2001
+ "grad_norm": 45.94535446166992,
2002
+ "learning_rate": 2.8795e-07,
2003
+ "loss": 1.6009,
2004
+ "step": 14250
2005
+ },
2006
+ {
2007
+ "epoch": 2.86,
2008
+ "grad_norm": 44.09996032714844,
2009
+ "learning_rate": 2.8544999999999995e-07,
2010
+ "loss": 1.5932,
2011
+ "step": 14300
2012
+ },
2013
+ {
2014
+ "epoch": 2.87,
2015
+ "grad_norm": 48.415313720703125,
2016
+ "learning_rate": 2.8295e-07,
2017
+ "loss": 1.5851,
2018
+ "step": 14350
2019
+ },
2020
+ {
2021
+ "epoch": 2.88,
2022
+ "grad_norm": 33.39976501464844,
2023
+ "learning_rate": 2.8044999999999995e-07,
2024
+ "loss": 1.6999,
2025
+ "step": 14400
2026
+ },
2027
+ {
2028
+ "epoch": 2.89,
2029
+ "grad_norm": 40.66514587402344,
2030
+ "learning_rate": 2.7794999999999997e-07,
2031
+ "loss": 1.4782,
2032
+ "step": 14450
2033
+ },
2034
+ {
2035
+ "epoch": 2.9,
2036
+ "grad_norm": 45.65864944458008,
2037
+ "learning_rate": 2.7544999999999994e-07,
2038
+ "loss": 1.6338,
2039
+ "step": 14500
2040
+ },
2041
+ {
2042
+ "epoch": 2.91,
2043
+ "grad_norm": 39.462677001953125,
2044
+ "learning_rate": 2.7295e-07,
2045
+ "loss": 1.5968,
2046
+ "step": 14550
2047
+ },
2048
+ {
2049
+ "epoch": 2.92,
2050
+ "grad_norm": 41.13777542114258,
2051
+ "learning_rate": 2.7045e-07,
2052
+ "loss": 1.4837,
2053
+ "step": 14600
2054
+ },
2055
+ {
2056
+ "epoch": 2.93,
2057
+ "grad_norm": 38.25054168701172,
2058
+ "learning_rate": 2.6795e-07,
2059
+ "loss": 1.5727,
2060
+ "step": 14650
2061
+ },
2062
+ {
2063
+ "epoch": 2.94,
2064
+ "grad_norm": 37.76020431518555,
2065
+ "learning_rate": 2.6545e-07,
2066
+ "loss": 1.642,
2067
+ "step": 14700
2068
+ },
2069
+ {
2070
+ "epoch": 2.95,
2071
+ "grad_norm": 50.80886459350586,
2072
+ "learning_rate": 2.6295e-07,
2073
+ "loss": 1.605,
2074
+ "step": 14750
2075
+ },
2076
+ {
2077
+ "epoch": 2.96,
2078
+ "grad_norm": 52.12263870239258,
2079
+ "learning_rate": 2.6045e-07,
2080
+ "loss": 1.5843,
2081
+ "step": 14800
2082
+ },
2083
+ {
2084
+ "epoch": 2.9699999999999998,
2085
+ "grad_norm": 40.298282623291016,
2086
+ "learning_rate": 2.5795e-07,
2087
+ "loss": 1.4729,
2088
+ "step": 14850
2089
+ },
2090
+ {
2091
+ "epoch": 2.98,
2092
+ "grad_norm": 30.732044219970703,
2093
+ "learning_rate": 2.5545e-07,
2094
+ "loss": 1.5218,
2095
+ "step": 14900
2096
+ },
2097
+ {
2098
+ "epoch": 2.99,
2099
+ "grad_norm": 43.94182586669922,
2100
+ "learning_rate": 2.5295e-07,
2101
+ "loss": 1.5147,
2102
+ "step": 14950
2103
+ },
2104
+ {
2105
+ "epoch": 3.0,
2106
+ "grad_norm": 60.4287223815918,
2107
+ "learning_rate": 2.5044999999999997e-07,
2108
+ "loss": 1.446,
2109
+ "step": 15000
2110
+ },
2111
+ {
2112
+ "epoch": 3.01,
2113
+ "grad_norm": 35.2944221496582,
2114
+ "learning_rate": 2.4795e-07,
2115
+ "loss": 1.5543,
2116
+ "step": 15050
2117
+ },
2118
+ {
2119
+ "epoch": 3.02,
2120
+ "grad_norm": 53.06623077392578,
2121
+ "learning_rate": 2.4544999999999996e-07,
2122
+ "loss": 1.5674,
2123
+ "step": 15100
2124
+ },
2125
+ {
2126
+ "epoch": 3.03,
2127
+ "grad_norm": 56.413692474365234,
2128
+ "learning_rate": 2.4295e-07,
2129
+ "loss": 1.4601,
2130
+ "step": 15150
2131
+ },
2132
+ {
2133
+ "epoch": 3.04,
2134
+ "grad_norm": 44.59477233886719,
2135
+ "learning_rate": 2.4045e-07,
2136
+ "loss": 1.5439,
2137
+ "step": 15200
2138
+ },
2139
+ {
2140
+ "epoch": 3.05,
2141
+ "grad_norm": 46.42093276977539,
2142
+ "learning_rate": 2.3794999999999998e-07,
2143
+ "loss": 1.4917,
2144
+ "step": 15250
2145
+ },
2146
+ {
2147
+ "epoch": 3.06,
2148
+ "grad_norm": 31.83967399597168,
2149
+ "learning_rate": 2.3544999999999998e-07,
2150
+ "loss": 1.5199,
2151
+ "step": 15300
2152
+ },
2153
+ {
2154
+ "epoch": 3.07,
2155
+ "grad_norm": 52.99776077270508,
2156
+ "learning_rate": 2.3294999999999998e-07,
2157
+ "loss": 1.509,
2158
+ "step": 15350
2159
+ },
2160
+ {
2161
+ "epoch": 3.08,
2162
+ "grad_norm": 47.901885986328125,
2163
+ "learning_rate": 2.3044999999999998e-07,
2164
+ "loss": 1.4072,
2165
+ "step": 15400
2166
+ },
2167
+ {
2168
+ "epoch": 3.09,
2169
+ "grad_norm": 35.03591537475586,
2170
+ "learning_rate": 2.2795e-07,
2171
+ "loss": 1.4489,
2172
+ "step": 15450
2173
+ },
2174
+ {
2175
+ "epoch": 3.1,
2176
+ "grad_norm": 40.465789794921875,
2177
+ "learning_rate": 2.2545e-07,
2178
+ "loss": 1.4475,
2179
+ "step": 15500
2180
+ },
2181
+ {
2182
+ "epoch": 3.11,
2183
+ "grad_norm": 38.67084503173828,
2184
+ "learning_rate": 2.2295e-07,
2185
+ "loss": 1.4501,
2186
+ "step": 15550
2187
+ },
2188
+ {
2189
+ "epoch": 3.12,
2190
+ "grad_norm": 44.53215026855469,
2191
+ "learning_rate": 2.2045e-07,
2192
+ "loss": 1.5509,
2193
+ "step": 15600
2194
+ },
2195
+ {
2196
+ "epoch": 3.13,
2197
+ "grad_norm": 50.45119857788086,
2198
+ "learning_rate": 2.1795e-07,
2199
+ "loss": 1.5862,
2200
+ "step": 15650
2201
+ },
2202
+ {
2203
+ "epoch": 3.14,
2204
+ "grad_norm": 42.37192153930664,
2205
+ "learning_rate": 2.1545e-07,
2206
+ "loss": 1.6511,
2207
+ "step": 15700
2208
+ },
2209
+ {
2210
+ "epoch": 3.15,
2211
+ "grad_norm": 39.11933898925781,
2212
+ "learning_rate": 2.1294999999999998e-07,
2213
+ "loss": 1.5206,
2214
+ "step": 15750
2215
+ },
2216
+ {
2217
+ "epoch": 3.16,
2218
+ "grad_norm": 39.057823181152344,
2219
+ "learning_rate": 2.1044999999999998e-07,
2220
+ "loss": 1.4265,
2221
+ "step": 15800
2222
+ },
2223
+ {
2224
+ "epoch": 3.17,
2225
+ "grad_norm": 32.509098052978516,
2226
+ "learning_rate": 2.0794999999999998e-07,
2227
+ "loss": 1.5199,
2228
+ "step": 15850
2229
+ },
2230
+ {
2231
+ "epoch": 3.18,
2232
+ "grad_norm": 34.1364860534668,
2233
+ "learning_rate": 2.0544999999999998e-07,
2234
+ "loss": 1.5162,
2235
+ "step": 15900
2236
+ },
2237
+ {
2238
+ "epoch": 3.19,
2239
+ "grad_norm": 48.204994201660156,
2240
+ "learning_rate": 2.0294999999999997e-07,
2241
+ "loss": 1.4383,
2242
+ "step": 15950
2243
+ },
2244
+ {
2245
+ "epoch": 3.2,
2246
+ "grad_norm": 53.505615234375,
2247
+ "learning_rate": 2.0044999999999997e-07,
2248
+ "loss": 1.5337,
2249
+ "step": 16000
2250
+ },
2251
+ {
2252
+ "epoch": 3.21,
2253
+ "grad_norm": 48.69506072998047,
2254
+ "learning_rate": 1.98e-07,
2255
+ "loss": 1.4313,
2256
+ "step": 16050
2257
+ },
2258
+ {
2259
+ "epoch": 3.22,
2260
+ "grad_norm": 45.98126220703125,
2261
+ "learning_rate": 1.955e-07,
2262
+ "loss": 1.5255,
2263
+ "step": 16100
2264
+ },
2265
+ {
2266
+ "epoch": 3.23,
2267
+ "grad_norm": 39.528587341308594,
2268
+ "learning_rate": 1.93e-07,
2269
+ "loss": 1.5407,
2270
+ "step": 16150
2271
+ },
2272
+ {
2273
+ "epoch": 3.24,
2274
+ "grad_norm": 41.41926956176758,
2275
+ "learning_rate": 1.905e-07,
2276
+ "loss": 1.4167,
2277
+ "step": 16200
2278
+ },
2279
+ {
2280
+ "epoch": 3.25,
2281
+ "grad_norm": 50.91017150878906,
2282
+ "learning_rate": 1.88e-07,
2283
+ "loss": 1.5819,
2284
+ "step": 16250
2285
+ },
2286
+ {
2287
+ "epoch": 3.26,
2288
+ "grad_norm": 51.679622650146484,
2289
+ "learning_rate": 1.855e-07,
2290
+ "loss": 1.573,
2291
+ "step": 16300
2292
+ },
2293
+ {
2294
+ "epoch": 3.27,
2295
+ "grad_norm": 39.97464370727539,
2296
+ "learning_rate": 1.8299999999999998e-07,
2297
+ "loss": 1.4209,
2298
+ "step": 16350
2299
+ },
2300
+ {
2301
+ "epoch": 3.2800000000000002,
2302
+ "grad_norm": 37.9398307800293,
2303
+ "learning_rate": 1.8049999999999998e-07,
2304
+ "loss": 1.4962,
2305
+ "step": 16400
2306
+ },
2307
+ {
2308
+ "epoch": 3.29,
2309
+ "grad_norm": 41.62467956542969,
2310
+ "learning_rate": 1.7799999999999998e-07,
2311
+ "loss": 1.5011,
2312
+ "step": 16450
2313
+ },
2314
+ {
2315
+ "epoch": 3.3,
2316
+ "grad_norm": 39.8488655090332,
2317
+ "learning_rate": 1.7549999999999998e-07,
2318
+ "loss": 1.5504,
2319
+ "step": 16500
2320
+ },
2321
+ {
2322
+ "epoch": 3.31,
2323
+ "grad_norm": 39.093360900878906,
2324
+ "learning_rate": 1.7299999999999997e-07,
2325
+ "loss": 1.4504,
2326
+ "step": 16550
2327
+ },
2328
+ {
2329
+ "epoch": 3.32,
2330
+ "grad_norm": 46.60807800292969,
2331
+ "learning_rate": 1.705e-07,
2332
+ "loss": 1.5712,
2333
+ "step": 16600
2334
+ },
2335
+ {
2336
+ "epoch": 3.33,
2337
+ "grad_norm": 37.40671920776367,
2338
+ "learning_rate": 1.68e-07,
2339
+ "loss": 1.5793,
2340
+ "step": 16650
2341
+ },
2342
+ {
2343
+ "epoch": 3.34,
2344
+ "grad_norm": 45.785953521728516,
2345
+ "learning_rate": 1.655e-07,
2346
+ "loss": 1.4975,
2347
+ "step": 16700
2348
+ },
2349
+ {
2350
+ "epoch": 3.35,
2351
+ "grad_norm": 42.13127517700195,
2352
+ "learning_rate": 1.63e-07,
2353
+ "loss": 1.5418,
2354
+ "step": 16750
2355
+ },
2356
+ {
2357
+ "epoch": 3.36,
2358
+ "grad_norm": 51.313777923583984,
2359
+ "learning_rate": 1.605e-07,
2360
+ "loss": 1.6146,
2361
+ "step": 16800
2362
+ },
2363
+ {
2364
+ "epoch": 3.37,
2365
+ "grad_norm": 38.90798568725586,
2366
+ "learning_rate": 1.5799999999999999e-07,
2367
+ "loss": 1.5328,
2368
+ "step": 16850
2369
+ },
2370
+ {
2371
+ "epoch": 3.38,
2372
+ "grad_norm": 42.80254364013672,
2373
+ "learning_rate": 1.5549999999999998e-07,
2374
+ "loss": 1.4382,
2375
+ "step": 16900
2376
+ },
2377
+ {
2378
+ "epoch": 3.39,
2379
+ "grad_norm": 38.583885192871094,
2380
+ "learning_rate": 1.5299999999999998e-07,
2381
+ "loss": 1.421,
2382
+ "step": 16950
2383
+ },
2384
+ {
2385
+ "epoch": 3.4,
2386
+ "grad_norm": 45.506473541259766,
2387
+ "learning_rate": 1.5049999999999998e-07,
2388
+ "loss": 1.4643,
2389
+ "step": 17000
2390
+ },
2391
+ {
2392
+ "epoch": 3.41,
2393
+ "grad_norm": 37.714202880859375,
2394
+ "learning_rate": 1.4799999999999998e-07,
2395
+ "loss": 1.4807,
2396
+ "step": 17050
2397
+ },
2398
+ {
2399
+ "epoch": 3.42,
2400
+ "grad_norm": 52.356605529785156,
2401
+ "learning_rate": 1.4549999999999997e-07,
2402
+ "loss": 1.4663,
2403
+ "step": 17100
2404
+ },
2405
+ {
2406
+ "epoch": 3.43,
2407
+ "grad_norm": 40.869590759277344,
2408
+ "learning_rate": 1.4299999999999997e-07,
2409
+ "loss": 1.542,
2410
+ "step": 17150
2411
+ },
2412
+ {
2413
+ "epoch": 3.44,
2414
+ "grad_norm": 55.49686813354492,
2415
+ "learning_rate": 1.4050000000000002e-07,
2416
+ "loss": 1.5719,
2417
+ "step": 17200
2418
+ },
2419
+ {
2420
+ "epoch": 3.45,
2421
+ "grad_norm": 41.89445114135742,
2422
+ "learning_rate": 1.3800000000000002e-07,
2423
+ "loss": 1.4327,
2424
+ "step": 17250
2425
+ },
2426
+ {
2427
+ "epoch": 3.46,
2428
+ "grad_norm": 41.772361755371094,
2429
+ "learning_rate": 1.3550000000000002e-07,
2430
+ "loss": 1.5065,
2431
+ "step": 17300
2432
+ },
2433
+ {
2434
+ "epoch": 3.4699999999999998,
2435
+ "grad_norm": 45.570396423339844,
2436
+ "learning_rate": 1.33e-07,
2437
+ "loss": 1.4851,
2438
+ "step": 17350
2439
+ },
2440
+ {
2441
+ "epoch": 3.48,
2442
+ "grad_norm": 45.119693756103516,
2443
+ "learning_rate": 1.305e-07,
2444
+ "loss": 1.4926,
2445
+ "step": 17400
2446
+ },
2447
+ {
2448
+ "epoch": 3.49,
2449
+ "grad_norm": 39.28087615966797,
2450
+ "learning_rate": 1.28e-07,
2451
+ "loss": 1.5135,
2452
+ "step": 17450
2453
+ },
2454
+ {
2455
+ "epoch": 3.5,
2456
+ "grad_norm": 49.64841079711914,
2457
+ "learning_rate": 1.255e-07,
2458
+ "loss": 1.5662,
2459
+ "step": 17500
2460
+ },
2461
+ {
2462
+ "epoch": 3.51,
2463
+ "grad_norm": 39.4202995300293,
2464
+ "learning_rate": 1.23e-07,
2465
+ "loss": 1.5854,
2466
+ "step": 17550
2467
+ },
2468
+ {
2469
+ "epoch": 3.52,
2470
+ "grad_norm": 39.55594253540039,
2471
+ "learning_rate": 1.205e-07,
2472
+ "loss": 1.5577,
2473
+ "step": 17600
2474
+ },
2475
+ {
2476
+ "epoch": 3.5300000000000002,
2477
+ "grad_norm": 38.05757522583008,
2478
+ "learning_rate": 1.1799999999999998e-07,
2479
+ "loss": 1.4087,
2480
+ "step": 17650
2481
+ },
2482
+ {
2483
+ "epoch": 3.54,
2484
+ "grad_norm": 35.86211013793945,
2485
+ "learning_rate": 1.155e-07,
2486
+ "loss": 1.4881,
2487
+ "step": 17700
2488
+ },
2489
+ {
2490
+ "epoch": 3.55,
2491
+ "grad_norm": 39.4666862487793,
2492
+ "learning_rate": 1.1299999999999999e-07,
2493
+ "loss": 1.5927,
2494
+ "step": 17750
2495
+ },
2496
+ {
2497
+ "epoch": 3.56,
2498
+ "grad_norm": 42.395320892333984,
2499
+ "learning_rate": 1.1049999999999999e-07,
2500
+ "loss": 1.5176,
2501
+ "step": 17800
2502
+ },
2503
+ {
2504
+ "epoch": 3.57,
2505
+ "grad_norm": 34.1979866027832,
2506
+ "learning_rate": 1.0799999999999999e-07,
2507
+ "loss": 1.545,
2508
+ "step": 17850
2509
+ },
2510
+ {
2511
+ "epoch": 3.58,
2512
+ "grad_norm": 41.41266632080078,
2513
+ "learning_rate": 1.0549999999999999e-07,
2514
+ "loss": 1.5388,
2515
+ "step": 17900
2516
+ },
2517
+ {
2518
+ "epoch": 3.59,
2519
+ "grad_norm": 40.88737869262695,
2520
+ "learning_rate": 1.03e-07,
2521
+ "loss": 1.4851,
2522
+ "step": 17950
2523
+ },
2524
+ {
2525
+ "epoch": 3.6,
2526
+ "grad_norm": 44.86513137817383,
2527
+ "learning_rate": 1.005e-07,
2528
+ "loss": 1.5413,
2529
+ "step": 18000
2530
+ },
2531
+ {
2532
+ "epoch": 3.61,
2533
+ "grad_norm": 38.92195510864258,
2534
+ "learning_rate": 9.805e-08,
2535
+ "loss": 1.5463,
2536
+ "step": 18050
2537
+ },
2538
+ {
2539
+ "epoch": 3.62,
2540
+ "grad_norm": 46.53792953491211,
2541
+ "learning_rate": 9.554999999999999e-08,
2542
+ "loss": 1.5022,
2543
+ "step": 18100
2544
+ },
2545
+ {
2546
+ "epoch": 3.63,
2547
+ "grad_norm": 35.3355598449707,
2548
+ "learning_rate": 9.304999999999999e-08,
2549
+ "loss": 1.5245,
2550
+ "step": 18150
2551
+ },
2552
+ {
2553
+ "epoch": 3.64,
2554
+ "grad_norm": 55.84636306762695,
2555
+ "learning_rate": 9.055e-08,
2556
+ "loss": 1.6236,
2557
+ "step": 18200
2558
+ },
2559
+ {
2560
+ "epoch": 3.65,
2561
+ "grad_norm": 36.84891128540039,
2562
+ "learning_rate": 8.805e-08,
2563
+ "loss": 1.3286,
2564
+ "step": 18250
2565
+ },
2566
+ {
2567
+ "epoch": 3.66,
2568
+ "grad_norm": 51.336795806884766,
2569
+ "learning_rate": 8.555e-08,
2570
+ "loss": 1.5488,
2571
+ "step": 18300
2572
+ },
2573
+ {
2574
+ "epoch": 3.67,
2575
+ "grad_norm": 38.55815887451172,
2576
+ "learning_rate": 8.304999999999999e-08,
2577
+ "loss": 1.4718,
2578
+ "step": 18350
2579
+ },
2580
+ {
2581
+ "epoch": 3.68,
2582
+ "grad_norm": 47.52061462402344,
2583
+ "learning_rate": 8.054999999999999e-08,
2584
+ "loss": 1.5148,
2585
+ "step": 18400
2586
+ },
2587
+ {
2588
+ "epoch": 3.69,
2589
+ "grad_norm": 42.93059539794922,
2590
+ "learning_rate": 7.804999999999999e-08,
2591
+ "loss": 1.5243,
2592
+ "step": 18450
2593
+ },
2594
+ {
2595
+ "epoch": 3.7,
2596
+ "grad_norm": 40.72081756591797,
2597
+ "learning_rate": 7.555e-08,
2598
+ "loss": 1.4389,
2599
+ "step": 18500
2600
+ },
2601
+ {
2602
+ "epoch": 3.71,
2603
+ "grad_norm": 42.075836181640625,
2604
+ "learning_rate": 7.305e-08,
2605
+ "loss": 1.5487,
2606
+ "step": 18550
2607
+ },
2608
+ {
2609
+ "epoch": 3.7199999999999998,
2610
+ "grad_norm": 50.6606559753418,
2611
+ "learning_rate": 7.054999999999999e-08,
2612
+ "loss": 1.6045,
2613
+ "step": 18600
2614
+ },
2615
+ {
2616
+ "epoch": 3.73,
2617
+ "grad_norm": 39.99626541137695,
2618
+ "learning_rate": 6.804999999999999e-08,
2619
+ "loss": 1.5214,
2620
+ "step": 18650
2621
+ },
2622
+ {
2623
+ "epoch": 3.74,
2624
+ "grad_norm": 48.29393005371094,
2625
+ "learning_rate": 6.554999999999999e-08,
2626
+ "loss": 1.471,
2627
+ "step": 18700
2628
+ },
2629
+ {
2630
+ "epoch": 3.75,
2631
+ "grad_norm": 41.546470642089844,
2632
+ "learning_rate": 6.304999999999999e-08,
2633
+ "loss": 1.4242,
2634
+ "step": 18750
2635
+ },
2636
+ {
2637
+ "epoch": 3.76,
2638
+ "grad_norm": 43.29277420043945,
2639
+ "learning_rate": 6.055e-08,
2640
+ "loss": 1.3872,
2641
+ "step": 18800
2642
+ },
2643
+ {
2644
+ "epoch": 3.77,
2645
+ "grad_norm": 60.80516052246094,
2646
+ "learning_rate": 5.8049999999999994e-08,
2647
+ "loss": 1.5309,
2648
+ "step": 18850
2649
+ },
2650
+ {
2651
+ "epoch": 3.7800000000000002,
2652
+ "grad_norm": 37.9664421081543,
2653
+ "learning_rate": 5.555e-08,
2654
+ "loss": 1.4366,
2655
+ "step": 18900
2656
+ },
2657
+ {
2658
+ "epoch": 3.79,
2659
+ "grad_norm": 41.17642593383789,
2660
+ "learning_rate": 5.3049999999999995e-08,
2661
+ "loss": 1.5797,
2662
+ "step": 18950
2663
+ },
2664
+ {
2665
+ "epoch": 3.8,
2666
+ "grad_norm": 36.21015167236328,
2667
+ "learning_rate": 5.054999999999999e-08,
2668
+ "loss": 1.5096,
2669
+ "step": 19000
2670
+ },
2671
+ {
2672
+ "epoch": 3.81,
2673
+ "grad_norm": 47.9669075012207,
2674
+ "learning_rate": 4.8050000000000003e-08,
2675
+ "loss": 1.5302,
2676
+ "step": 19050
2677
+ },
2678
+ {
2679
+ "epoch": 3.82,
2680
+ "grad_norm": 28.955060958862305,
2681
+ "learning_rate": 4.555e-08,
2682
+ "loss": 1.498,
2683
+ "step": 19100
2684
+ },
2685
+ {
2686
+ "epoch": 3.83,
2687
+ "grad_norm": 52.464290618896484,
2688
+ "learning_rate": 4.305e-08,
2689
+ "loss": 1.4575,
2690
+ "step": 19150
2691
+ },
2692
+ {
2693
+ "epoch": 3.84,
2694
+ "grad_norm": 29.458721160888672,
2695
+ "learning_rate": 4.055e-08,
2696
+ "loss": 1.484,
2697
+ "step": 19200
2698
+ },
2699
+ {
2700
+ "epoch": 3.85,
2701
+ "grad_norm": 41.32457733154297,
2702
+ "learning_rate": 3.805e-08,
2703
+ "loss": 1.4677,
2704
+ "step": 19250
2705
+ },
2706
+ {
2707
+ "epoch": 3.86,
2708
+ "grad_norm": 48.74892807006836,
2709
+ "learning_rate": 3.555e-08,
2710
+ "loss": 1.4268,
2711
+ "step": 19300
2712
+ },
2713
+ {
2714
+ "epoch": 3.87,
2715
+ "grad_norm": 30.619657516479492,
2716
+ "learning_rate": 3.305e-08,
2717
+ "loss": 1.5416,
2718
+ "step": 19350
2719
+ },
2720
+ {
2721
+ "epoch": 3.88,
2722
+ "grad_norm": 48.18694305419922,
2723
+ "learning_rate": 3.055e-08,
2724
+ "loss": 1.4068,
2725
+ "step": 19400
2726
+ },
2727
+ {
2728
+ "epoch": 3.89,
2729
+ "grad_norm": 35.75336456298828,
2730
+ "learning_rate": 2.8049999999999996e-08,
2731
+ "loss": 1.3868,
2732
+ "step": 19450
2733
+ },
2734
+ {
2735
+ "epoch": 3.9,
2736
+ "grad_norm": 46.65244674682617,
2737
+ "learning_rate": 2.5549999999999997e-08,
2738
+ "loss": 1.5598,
2739
+ "step": 19500
2740
+ },
2741
+ {
2742
+ "epoch": 3.91,
2743
+ "grad_norm": 48.187679290771484,
2744
+ "learning_rate": 2.305e-08,
2745
+ "loss": 1.5666,
2746
+ "step": 19550
2747
+ },
2748
+ {
2749
+ "epoch": 3.92,
2750
+ "grad_norm": 36.0362663269043,
2751
+ "learning_rate": 2.055e-08,
2752
+ "loss": 1.6636,
2753
+ "step": 19600
2754
+ },
2755
+ {
2756
+ "epoch": 3.93,
2757
+ "grad_norm": 55.22818374633789,
2758
+ "learning_rate": 1.805e-08,
2759
+ "loss": 1.5572,
2760
+ "step": 19650
2761
+ },
2762
+ {
2763
+ "epoch": 3.94,
2764
+ "grad_norm": 32.651451110839844,
2765
+ "learning_rate": 1.555e-08,
2766
+ "loss": 1.5158,
2767
+ "step": 19700
2768
+ },
2769
+ {
2770
+ "epoch": 3.95,
2771
+ "grad_norm": 42.34391403198242,
2772
+ "learning_rate": 1.3050000000000001e-08,
2773
+ "loss": 1.4648,
2774
+ "step": 19750
2775
+ },
2776
+ {
2777
+ "epoch": 3.96,
2778
+ "grad_norm": 48.5484733581543,
2779
+ "learning_rate": 1.055e-08,
2780
+ "loss": 1.5808,
2781
+ "step": 19800
2782
+ },
2783
+ {
2784
+ "epoch": 3.9699999999999998,
2785
+ "grad_norm": 51.64519500732422,
2786
+ "learning_rate": 8.05e-09,
2787
+ "loss": 1.4912,
2788
+ "step": 19850
2789
+ },
2790
+ {
2791
+ "epoch": 3.98,
2792
+ "grad_norm": 44.73149490356445,
2793
+ "learning_rate": 5.55e-09,
2794
+ "loss": 1.694,
2795
+ "step": 19900
2796
+ },
2797
+ {
2798
+ "epoch": 3.99,
2799
+ "grad_norm": 50.322174072265625,
2800
+ "learning_rate": 3.05e-09,
2801
+ "loss": 1.5025,
2802
+ "step": 19950
2803
+ },
2804
+ {
2805
+ "epoch": 4.0,
2806
+ "grad_norm": 51.53582763671875,
2807
+ "learning_rate": 5.5e-10,
2808
+ "loss": 1.5558,
2809
+ "step": 20000
2810
+ }
2811
+ ],
2812
+ "logging_steps": 50,
2813
+ "max_steps": 20000,
2814
+ "num_input_tokens_seen": 0,
2815
+ "num_train_epochs": 4,
2816
+ "save_steps": 1000,
2817
+ "stateful_callbacks": {
2818
+ "TrainerControl": {
2819
+ "args": {
2820
+ "should_epoch_stop": false,
2821
+ "should_evaluate": false,
2822
+ "should_log": false,
2823
+ "should_save": true,
2824
+ "should_training_stop": true
2825
+ },
2826
+ "attributes": {}
2827
+ }
2828
+ },
2829
+ "total_flos": 0.0,
2830
+ "train_batch_size": 8,
2831
+ "trial_name": null,
2832
+ "trial_params": null
2833
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:821091129660e331b5e9290e0f1e29183ca11670f5b35d0b75b8777e17556111
3
+ size 5304
vocab.txt ADDED
The diff for this file is too large to render. See raw diff