diff --git a/06-10-24_sd2.1_llama7b/checkpoint-1000/config.json b/06-10-24_sd2.1_llama7b/checkpoint-1000/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..0639a820712de71ee34e332a2dc678a4b39ba7e1
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b/checkpoint-1000/config.json
@@ -0,0 +1,48 @@
+{
+ "_flash_attn_2_enabled": true,
+ "_name_or_path": "/mnt/bn/bohanzhainas1/Public_Models/llama-2_7B_hf",
+ "architectures": [
+ "LlamaForCausalLM"
+ ],
+ "attention_bias": false,
+ "attention_dropout": 0.0,
+ "bos_token_id": 1,
+ "ensemble_size": 1,
+ "eos_token_id": 2,
+ "freeze_mm_mlp_adapter": false,
+ "hidden_act": "silu",
+ "hidden_size": 4096,
+ "image_aspect_ratio": "square",
+ "image_grid_pinpoints": null,
+ "img_size": 768,
+ "initializer_range": 0.02,
+ "intermediate_size": 11008,
+ "max_position_embeddings": 2048,
+ "mm_hidden_size": 1280,
+ "mm_projector_type": "mlp2x_gelu",
+ "mm_use_im_patch_token": false,
+ "mm_use_im_start_end": false,
+ "mm_vision_select_feature": "patch",
+ "mm_vision_select_layer": -2,
+ "mm_vision_tower": "stabilityai/stable-diffusion-2-1",
+ "model_type": "llava_llama",
+ "num_attention_heads": 32,
+ "num_hidden_layers": 32,
+ "num_key_value_heads": 32,
+ "pad_token_id": 0,
+ "pretraining_tp": 1,
+ "prompt": "",
+ "rms_norm_eps": 1e-06,
+ "rope_scaling": null,
+ "rope_theta": 10000.0,
+ "t": 1,
+ "tie_word_embeddings": false,
+ "torch_dtype": "float16",
+ "transformers_version": "4.38.2",
+ "tune_mm_mlp_adapter": true,
+ "up_ft_index": 0,
+ "use_cache": false,
+ "use_mm_proj": true,
+ "vision_tower": "stabilityai/stable-diffusion-2-1",
+ "vocab_size": 32000
+}
diff --git a/06-10-24_sd2.1_llama7b/checkpoint-1000/mm_projector.bin b/06-10-24_sd2.1_llama7b/checkpoint-1000/mm_projector.bin
new file mode 100644
index 0000000000000000000000000000000000000000..7413b9b133047540f6cd38fd21ef269cd741a577
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b/checkpoint-1000/mm_projector.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5e607334f96253aa16046f8e3a3dd1f09b2baa03c85079c1686619be4e05540a
+size 44058237
diff --git a/06-10-24_sd2.1_llama7b/checkpoint-2000/config.json b/06-10-24_sd2.1_llama7b/checkpoint-2000/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..0639a820712de71ee34e332a2dc678a4b39ba7e1
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b/checkpoint-2000/config.json
@@ -0,0 +1,48 @@
+{
+ "_flash_attn_2_enabled": true,
+ "_name_or_path": "/mnt/bn/bohanzhainas1/Public_Models/llama-2_7B_hf",
+ "architectures": [
+ "LlamaForCausalLM"
+ ],
+ "attention_bias": false,
+ "attention_dropout": 0.0,
+ "bos_token_id": 1,
+ "ensemble_size": 1,
+ "eos_token_id": 2,
+ "freeze_mm_mlp_adapter": false,
+ "hidden_act": "silu",
+ "hidden_size": 4096,
+ "image_aspect_ratio": "square",
+ "image_grid_pinpoints": null,
+ "img_size": 768,
+ "initializer_range": 0.02,
+ "intermediate_size": 11008,
+ "max_position_embeddings": 2048,
+ "mm_hidden_size": 1280,
+ "mm_projector_type": "mlp2x_gelu",
+ "mm_use_im_patch_token": false,
+ "mm_use_im_start_end": false,
+ "mm_vision_select_feature": "patch",
+ "mm_vision_select_layer": -2,
+ "mm_vision_tower": "stabilityai/stable-diffusion-2-1",
+ "model_type": "llava_llama",
+ "num_attention_heads": 32,
+ "num_hidden_layers": 32,
+ "num_key_value_heads": 32,
+ "pad_token_id": 0,
+ "pretraining_tp": 1,
+ "prompt": "",
+ "rms_norm_eps": 1e-06,
+ "rope_scaling": null,
+ "rope_theta": 10000.0,
+ "t": 1,
+ "tie_word_embeddings": false,
+ "torch_dtype": "float16",
+ "transformers_version": "4.38.2",
+ "tune_mm_mlp_adapter": true,
+ "up_ft_index": 0,
+ "use_cache": false,
+ "use_mm_proj": true,
+ "vision_tower": "stabilityai/stable-diffusion-2-1",
+ "vocab_size": 32000
+}
diff --git a/06-10-24_sd2.1_llama7b/checkpoint-2000/mm_projector.bin b/06-10-24_sd2.1_llama7b/checkpoint-2000/mm_projector.bin
new file mode 100644
index 0000000000000000000000000000000000000000..acfbfc578d451990e2ef72707447078e25c83ecf
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b/checkpoint-2000/mm_projector.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1ec18d6856f59407bcf4f0741053d2aea5b97b9d8afe7672d999ce1fbd7ef536
+size 44058237
diff --git a/06-10-24_sd2.1_llama7b/config.json b/06-10-24_sd2.1_llama7b/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..c06e6160eb5d0974169f1f618af78df016c8adb8
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b/config.json
@@ -0,0 +1,48 @@
+{
+ "_flash_attn_2_enabled": true,
+ "_name_or_path": "/mnt/bn/bohanzhainas1/Public_Models/llama-2_7B_hf",
+ "architectures": [
+ "LlamaForCausalLM"
+ ],
+ "attention_bias": false,
+ "attention_dropout": 0.0,
+ "bos_token_id": 1,
+ "ensemble_size": 1,
+ "eos_token_id": 2,
+ "freeze_mm_mlp_adapter": false,
+ "hidden_act": "silu",
+ "hidden_size": 4096,
+ "image_aspect_ratio": "square",
+ "image_grid_pinpoints": null,
+ "img_size": 768,
+ "initializer_range": 0.02,
+ "intermediate_size": 11008,
+ "max_position_embeddings": 2048,
+ "mm_hidden_size": 1280,
+ "mm_projector_type": "mlp2x_gelu",
+ "mm_use_im_patch_token": false,
+ "mm_use_im_start_end": false,
+ "mm_vision_select_feature": "patch",
+ "mm_vision_select_layer": -2,
+ "mm_vision_tower": "stabilityai/stable-diffusion-2-1",
+ "model_type": "llava_llama",
+ "num_attention_heads": 32,
+ "num_hidden_layers": 32,
+ "num_key_value_heads": 32,
+ "pad_token_id": 0,
+ "pretraining_tp": 1,
+ "prompt": "",
+ "rms_norm_eps": 1e-06,
+ "rope_scaling": null,
+ "rope_theta": 10000.0,
+ "t": 1,
+ "tie_word_embeddings": false,
+ "torch_dtype": "float16",
+ "transformers_version": "4.38.2",
+ "tune_mm_mlp_adapter": true,
+ "up_ft_index": 0,
+ "use_cache": true,
+ "use_mm_proj": true,
+ "vision_tower": "stabilityai/stable-diffusion-2-1",
+ "vocab_size": 32000
+}
diff --git a/06-10-24_sd2.1_llama7b/mm_projector.bin b/06-10-24_sd2.1_llama7b/mm_projector.bin
new file mode 100644
index 0000000000000000000000000000000000000000..492cd2c18997f4fb573a54c613d4809eb8cb6563
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b/mm_projector.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a832413970a6b62c4580d2d0161efcefed83f72caf5fba03dbdb86fe01ab5579
+size 44058237
diff --git a/06-10-24_sd2.1_llama7b/trainer_state.json b/06-10-24_sd2.1_llama7b/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..bef8693e13a5205719a3ae43cf56b5c99a981a38
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b/trainer_state.json
@@ -0,0 +1,15297 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 1.0,
+ "eval_steps": 500,
+ "global_step": 2181,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.0,
+ "grad_norm": 66.15180901715897,
+ "learning_rate": 1.5151515151515153e-05,
+ "loss": 7.45,
+ "step": 1
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 70.58287742918638,
+ "learning_rate": 3.0303030303030306e-05,
+ "loss": 7.4622,
+ "step": 2
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 54.61059606494013,
+ "learning_rate": 4.545454545454546e-05,
+ "loss": 6.9197,
+ "step": 3
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 23.09765584936969,
+ "learning_rate": 6.060606060606061e-05,
+ "loss": 5.9264,
+ "step": 4
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 13.816294809592497,
+ "learning_rate": 7.575757575757576e-05,
+ "loss": 5.5781,
+ "step": 5
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 18.433654866296106,
+ "learning_rate": 9.090909090909092e-05,
+ "loss": 5.2978,
+ "step": 6
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 9.630860407002816,
+ "learning_rate": 0.00010606060606060606,
+ "loss": 4.9536,
+ "step": 7
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 8.949762238359233,
+ "learning_rate": 0.00012121212121212122,
+ "loss": 4.8177,
+ "step": 8
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 8.567216188877417,
+ "learning_rate": 0.00013636363636363637,
+ "loss": 4.5507,
+ "step": 9
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 6.788454874308452,
+ "learning_rate": 0.00015151515151515152,
+ "loss": 4.3135,
+ "step": 10
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 4.879415008721834,
+ "learning_rate": 0.00016666666666666666,
+ "loss": 4.0669,
+ "step": 11
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 3.331042525334384,
+ "learning_rate": 0.00018181818181818183,
+ "loss": 3.9607,
+ "step": 12
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 2.5700490401273792,
+ "learning_rate": 0.00019696969696969695,
+ "loss": 3.8094,
+ "step": 13
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 2.541162051021705,
+ "learning_rate": 0.00021212121212121213,
+ "loss": 3.8652,
+ "step": 14
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 2.5984270840656567,
+ "learning_rate": 0.00022727272727272727,
+ "loss": 3.6997,
+ "step": 15
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.7613744060198775,
+ "learning_rate": 0.00024242424242424245,
+ "loss": 3.6166,
+ "step": 16
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.583064947503314,
+ "learning_rate": 0.00025757575757575756,
+ "loss": 3.5803,
+ "step": 17
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.233894352578882,
+ "learning_rate": 0.00027272727272727274,
+ "loss": 3.6039,
+ "step": 18
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 0.9204651196091533,
+ "learning_rate": 0.0002878787878787879,
+ "loss": 3.4699,
+ "step": 19
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 0.7936451791374144,
+ "learning_rate": 0.00030303030303030303,
+ "loss": 3.3764,
+ "step": 20
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 0.6588765931240139,
+ "learning_rate": 0.0003181818181818182,
+ "loss": 3.4447,
+ "step": 21
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 0.564264980972431,
+ "learning_rate": 0.0003333333333333333,
+ "loss": 3.4823,
+ "step": 22
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 0.49465966045214654,
+ "learning_rate": 0.0003484848484848485,
+ "loss": 3.3917,
+ "step": 23
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 0.5479125246593124,
+ "learning_rate": 0.00036363636363636367,
+ "loss": 3.3514,
+ "step": 24
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 0.4479260471887312,
+ "learning_rate": 0.0003787878787878788,
+ "loss": 3.4726,
+ "step": 25
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 0.4183706085619042,
+ "learning_rate": 0.0003939393939393939,
+ "loss": 3.3137,
+ "step": 26
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 0.34080903495237713,
+ "learning_rate": 0.00040909090909090913,
+ "loss": 3.3513,
+ "step": 27
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 0.420025479184975,
+ "learning_rate": 0.00042424242424242425,
+ "loss": 3.3705,
+ "step": 28
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 0.36218992891812585,
+ "learning_rate": 0.0004393939393939394,
+ "loss": 3.4135,
+ "step": 29
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 0.3096433434196739,
+ "learning_rate": 0.00045454545454545455,
+ "loss": 3.3605,
+ "step": 30
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 0.2807026693325098,
+ "learning_rate": 0.0004696969696969697,
+ "loss": 3.3288,
+ "step": 31
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 0.3009964909251879,
+ "learning_rate": 0.0004848484848484849,
+ "loss": 3.418,
+ "step": 32
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.31003615955700947,
+ "learning_rate": 0.0005,
+ "loss": 3.3855,
+ "step": 33
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.2783028514708941,
+ "learning_rate": 0.0005151515151515151,
+ "loss": 3.2931,
+ "step": 34
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.35674493138208874,
+ "learning_rate": 0.0005303030303030302,
+ "loss": 3.3635,
+ "step": 35
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.30949912461147805,
+ "learning_rate": 0.0005454545454545455,
+ "loss": 3.3317,
+ "step": 36
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.38908122672278156,
+ "learning_rate": 0.0005606060606060606,
+ "loss": 3.3245,
+ "step": 37
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.26431255935996784,
+ "learning_rate": 0.0005757575757575758,
+ "loss": 3.3801,
+ "step": 38
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.25767181172578363,
+ "learning_rate": 0.0005909090909090909,
+ "loss": 3.2561,
+ "step": 39
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.22586884430058085,
+ "learning_rate": 0.0006060606060606061,
+ "loss": 3.2544,
+ "step": 40
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.24522296246194483,
+ "learning_rate": 0.0006212121212121212,
+ "loss": 3.2865,
+ "step": 41
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.2613397595392716,
+ "learning_rate": 0.0006363636363636364,
+ "loss": 3.2942,
+ "step": 42
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.22287680350326503,
+ "learning_rate": 0.0006515151515151515,
+ "loss": 3.25,
+ "step": 43
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.22692188092074825,
+ "learning_rate": 0.0006666666666666666,
+ "loss": 3.3004,
+ "step": 44
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.21866678728129452,
+ "learning_rate": 0.0006818181818181818,
+ "loss": 3.2689,
+ "step": 45
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.20556698700512469,
+ "learning_rate": 0.000696969696969697,
+ "loss": 3.2188,
+ "step": 46
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.1983753782236358,
+ "learning_rate": 0.0007121212121212122,
+ "loss": 3.2435,
+ "step": 47
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.2634319598836911,
+ "learning_rate": 0.0007272727272727273,
+ "loss": 3.3002,
+ "step": 48
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.25082407215675556,
+ "learning_rate": 0.0007424242424242425,
+ "loss": 3.2474,
+ "step": 49
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.2375967536641897,
+ "learning_rate": 0.0007575757575757576,
+ "loss": 3.2127,
+ "step": 50
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.20696732126498024,
+ "learning_rate": 0.0007727272727272727,
+ "loss": 3.2099,
+ "step": 51
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.24265474741688506,
+ "learning_rate": 0.0007878787878787878,
+ "loss": 3.3539,
+ "step": 52
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.22266772388033193,
+ "learning_rate": 0.000803030303030303,
+ "loss": 3.2449,
+ "step": 53
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.22225045940489616,
+ "learning_rate": 0.0008181818181818183,
+ "loss": 3.2976,
+ "step": 54
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.24153054106814778,
+ "learning_rate": 0.0008333333333333334,
+ "loss": 3.2897,
+ "step": 55
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.20412685491428859,
+ "learning_rate": 0.0008484848484848485,
+ "loss": 3.2287,
+ "step": 56
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.21827449183413722,
+ "learning_rate": 0.0008636363636363636,
+ "loss": 3.3074,
+ "step": 57
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.18622872567362825,
+ "learning_rate": 0.0008787878787878789,
+ "loss": 3.3201,
+ "step": 58
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.20062612683025371,
+ "learning_rate": 0.000893939393939394,
+ "loss": 3.1586,
+ "step": 59
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.21473820905594826,
+ "learning_rate": 0.0009090909090909091,
+ "loss": 3.2013,
+ "step": 60
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.24383808863838155,
+ "learning_rate": 0.0009242424242424242,
+ "loss": 3.3438,
+ "step": 61
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.21601652978421765,
+ "learning_rate": 0.0009393939393939394,
+ "loss": 3.2749,
+ "step": 62
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.29900367447167747,
+ "learning_rate": 0.0009545454545454546,
+ "loss": 3.3167,
+ "step": 63
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.21355653550374018,
+ "learning_rate": 0.0009696969696969698,
+ "loss": 3.2358,
+ "step": 64
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.18945908829901142,
+ "learning_rate": 0.000984848484848485,
+ "loss": 3.2054,
+ "step": 65
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.20355164364360293,
+ "learning_rate": 0.001,
+ "loss": 3.2272,
+ "step": 66
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.2842539645365136,
+ "learning_rate": 0.0009999994484067654,
+ "loss": 3.234,
+ "step": 67
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.20583616474278296,
+ "learning_rate": 0.0009999977936282788,
+ "loss": 3.2786,
+ "step": 68
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.24305835320909328,
+ "learning_rate": 0.0009999950356681913,
+ "loss": 3.3004,
+ "step": 69
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.23357366259014867,
+ "learning_rate": 0.0009999911745325876,
+ "loss": 3.282,
+ "step": 70
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.30361334500985254,
+ "learning_rate": 0.0009999862102299873,
+ "loss": 3.2412,
+ "step": 71
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.286590019990373,
+ "learning_rate": 0.0009999801427713433,
+ "loss": 3.2641,
+ "step": 72
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.18539793496605042,
+ "learning_rate": 0.0009999729721700424,
+ "loss": 3.1712,
+ "step": 73
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.2589926682894037,
+ "learning_rate": 0.000999964698441906,
+ "loss": 3.2085,
+ "step": 74
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.24034664668735461,
+ "learning_rate": 0.0009999553216051892,
+ "loss": 3.1326,
+ "step": 75
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.23180930719109027,
+ "learning_rate": 0.00099994484168058,
+ "loss": 3.2729,
+ "step": 76
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.25347319835741183,
+ "learning_rate": 0.0009999332586912019,
+ "loss": 3.3387,
+ "step": 77
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.2462396010922684,
+ "learning_rate": 0.0009999205726626108,
+ "loss": 3.1833,
+ "step": 78
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.24864467180144795,
+ "learning_rate": 0.000999906783622797,
+ "loss": 3.2221,
+ "step": 79
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.25649737552783836,
+ "learning_rate": 0.0009998918916021842,
+ "loss": 3.2308,
+ "step": 80
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.22457621530227787,
+ "learning_rate": 0.0009998758966336297,
+ "loss": 3.2333,
+ "step": 81
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.24769145989843225,
+ "learning_rate": 0.0009998587987524242,
+ "loss": 3.2538,
+ "step": 82
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.28179964167546395,
+ "learning_rate": 0.0009998405979962926,
+ "loss": 3.1712,
+ "step": 83
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.2796987913023117,
+ "learning_rate": 0.000999821294405392,
+ "loss": 3.3078,
+ "step": 84
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.24009171626355022,
+ "learning_rate": 0.0009998008880223134,
+ "loss": 3.1914,
+ "step": 85
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.25344643749899454,
+ "learning_rate": 0.000999779378892081,
+ "loss": 3.2863,
+ "step": 86
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.2617649279329783,
+ "learning_rate": 0.0009997567670621522,
+ "loss": 3.2152,
+ "step": 87
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.247912380901747,
+ "learning_rate": 0.0009997330525824165,
+ "loss": 3.2351,
+ "step": 88
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.2900024120954255,
+ "learning_rate": 0.0009997082355051976,
+ "loss": 3.2128,
+ "step": 89
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.25101322265836973,
+ "learning_rate": 0.000999682315885251,
+ "loss": 3.1657,
+ "step": 90
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.2648386867068071,
+ "learning_rate": 0.0009996552937797645,
+ "loss": 3.2439,
+ "step": 91
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.2799749394773899,
+ "learning_rate": 0.0009996271692483596,
+ "loss": 3.2151,
+ "step": 92
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.2674270391585295,
+ "learning_rate": 0.0009995979423530893,
+ "loss": 3.1977,
+ "step": 93
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.3210412220143388,
+ "learning_rate": 0.000999567613158439,
+ "loss": 3.1077,
+ "step": 94
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.2947368262023574,
+ "learning_rate": 0.0009995361817313263,
+ "loss": 3.1676,
+ "step": 95
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.3456003427049554,
+ "learning_rate": 0.0009995036481411004,
+ "loss": 3.188,
+ "step": 96
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.3664523495245974,
+ "learning_rate": 0.0009994700124595429,
+ "loss": 3.1644,
+ "step": 97
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.42184118371899193,
+ "learning_rate": 0.0009994352747608663,
+ "loss": 3.1927,
+ "step": 98
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.27918338787851127,
+ "learning_rate": 0.0009993994351217151,
+ "loss": 3.1717,
+ "step": 99
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.29793102210825956,
+ "learning_rate": 0.000999362493621165,
+ "loss": 3.1233,
+ "step": 100
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.29087497348410807,
+ "learning_rate": 0.0009993244503407226,
+ "loss": 3.1813,
+ "step": 101
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.3571009272430818,
+ "learning_rate": 0.0009992853053643258,
+ "loss": 3.1666,
+ "step": 102
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.31331899199996993,
+ "learning_rate": 0.0009992450587783426,
+ "loss": 3.1657,
+ "step": 103
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.2826655202326434,
+ "learning_rate": 0.000999203710671572,
+ "loss": 3.1029,
+ "step": 104
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.5468975810993932,
+ "learning_rate": 0.0009991612611352438,
+ "loss": 3.1091,
+ "step": 105
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.33256400656790225,
+ "learning_rate": 0.0009991177102630173,
+ "loss": 3.2427,
+ "step": 106
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.31709581977891316,
+ "learning_rate": 0.0009990730581509817,
+ "loss": 3.0717,
+ "step": 107
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.34705697134885305,
+ "learning_rate": 0.0009990273048976566,
+ "loss": 3.1785,
+ "step": 108
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.35981912393352494,
+ "learning_rate": 0.0009989804506039905,
+ "loss": 3.1043,
+ "step": 109
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.346194274795714,
+ "learning_rate": 0.0009989324953733614,
+ "loss": 3.2477,
+ "step": 110
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.3401534066242236,
+ "learning_rate": 0.0009988834393115766,
+ "loss": 3.1536,
+ "step": 111
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.3554676679140988,
+ "learning_rate": 0.000998833282526872,
+ "loss": 3.1855,
+ "step": 112
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.3909977361122383,
+ "learning_rate": 0.0009987820251299122,
+ "loss": 3.2158,
+ "step": 113
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.37122346423058705,
+ "learning_rate": 0.00099872966723379,
+ "loss": 3.1672,
+ "step": 114
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.522554240685808,
+ "learning_rate": 0.0009986762089540266,
+ "loss": 3.1851,
+ "step": 115
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.35985866065735506,
+ "learning_rate": 0.0009986216504085709,
+ "loss": 3.1918,
+ "step": 116
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.30770921688199926,
+ "learning_rate": 0.0009985659917177991,
+ "loss": 3.1577,
+ "step": 117
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.40635750569504187,
+ "learning_rate": 0.0009985092330045155,
+ "loss": 3.1876,
+ "step": 118
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.3766732194461309,
+ "learning_rate": 0.0009984513743939508,
+ "loss": 3.0661,
+ "step": 119
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.3625203466025057,
+ "learning_rate": 0.0009983924160137626,
+ "loss": 3.1608,
+ "step": 120
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.47003774308771534,
+ "learning_rate": 0.000998332357994035,
+ "loss": 3.2001,
+ "step": 121
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.38478340886961315,
+ "learning_rate": 0.0009982712004672786,
+ "loss": 3.2099,
+ "step": 122
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.45838267585496867,
+ "learning_rate": 0.0009982089435684295,
+ "loss": 3.1826,
+ "step": 123
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.34412697766413863,
+ "learning_rate": 0.0009981455874348499,
+ "loss": 3.0881,
+ "step": 124
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.34807302562739034,
+ "learning_rate": 0.0009980811322063269,
+ "loss": 3.1833,
+ "step": 125
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.4516526453740421,
+ "learning_rate": 0.0009980155780250728,
+ "loss": 3.2114,
+ "step": 126
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.3508829564012217,
+ "learning_rate": 0.0009979489250357243,
+ "loss": 3.1938,
+ "step": 127
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.3582826948602218,
+ "learning_rate": 0.0009978811733853431,
+ "loss": 3.19,
+ "step": 128
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.44321431890876684,
+ "learning_rate": 0.0009978123232234147,
+ "loss": 3.0988,
+ "step": 129
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.35700023792523344,
+ "learning_rate": 0.000997742374701848,
+ "loss": 3.2637,
+ "step": 130
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.3665696464621877,
+ "learning_rate": 0.0009976713279749754,
+ "loss": 3.1424,
+ "step": 131
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.44510717812513917,
+ "learning_rate": 0.0009975991831995528,
+ "loss": 3.1873,
+ "step": 132
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.3734302321575592,
+ "learning_rate": 0.0009975259405347581,
+ "loss": 3.181,
+ "step": 133
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.42082445812512104,
+ "learning_rate": 0.0009974516001421926,
+ "loss": 3.1357,
+ "step": 134
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.3685053006045275,
+ "learning_rate": 0.000997376162185878,
+ "loss": 3.1298,
+ "step": 135
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.3594896612472505,
+ "learning_rate": 0.0009972996268322594,
+ "loss": 3.1629,
+ "step": 136
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.39079746834778073,
+ "learning_rate": 0.0009972219942502017,
+ "loss": 3.1533,
+ "step": 137
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.4252318434724073,
+ "learning_rate": 0.0009971432646109918,
+ "loss": 3.1193,
+ "step": 138
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.49812831272747765,
+ "learning_rate": 0.0009970634380883365,
+ "loss": 3.1244,
+ "step": 139
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.5110587237874594,
+ "learning_rate": 0.0009969825148583627,
+ "loss": 3.1887,
+ "step": 140
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.48143558413479914,
+ "learning_rate": 0.0009969004950996173,
+ "loss": 3.3034,
+ "step": 141
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.5112762539403898,
+ "learning_rate": 0.0009968173789930668,
+ "loss": 3.1917,
+ "step": 142
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.4805434453957813,
+ "learning_rate": 0.0009967331667220958,
+ "loss": 3.1652,
+ "step": 143
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.4874021606254128,
+ "learning_rate": 0.0009966478584725086,
+ "loss": 3.2053,
+ "step": 144
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.4297742728579911,
+ "learning_rate": 0.0009965614544325263,
+ "loss": 3.1733,
+ "step": 145
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.3905967070905647,
+ "learning_rate": 0.000996473954792789,
+ "loss": 3.0817,
+ "step": 146
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.4280834937647011,
+ "learning_rate": 0.0009963853597463532,
+ "loss": 3.1281,
+ "step": 147
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.3359043836449778,
+ "learning_rate": 0.000996295669488693,
+ "loss": 3.0964,
+ "step": 148
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.3534378791820928,
+ "learning_rate": 0.0009962048842176979,
+ "loss": 3.2623,
+ "step": 149
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.3066185390227753,
+ "learning_rate": 0.0009961130041336748,
+ "loss": 3.1143,
+ "step": 150
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.3803144582921856,
+ "learning_rate": 0.0009960200294393449,
+ "loss": 3.1212,
+ "step": 151
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.32082121352742826,
+ "learning_rate": 0.0009959259603398453,
+ "loss": 3.1619,
+ "step": 152
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.36701924997211677,
+ "learning_rate": 0.0009958307970427275,
+ "loss": 3.1991,
+ "step": 153
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.36686133419615596,
+ "learning_rate": 0.0009957345397579572,
+ "loss": 3.1865,
+ "step": 154
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.3564539329012241,
+ "learning_rate": 0.0009956371886979138,
+ "loss": 3.0974,
+ "step": 155
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.3418901143027754,
+ "learning_rate": 0.00099553874407739,
+ "loss": 3.0482,
+ "step": 156
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.38015423021009515,
+ "learning_rate": 0.0009954392061135916,
+ "loss": 3.2042,
+ "step": 157
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.4386073956196807,
+ "learning_rate": 0.0009953385750261364,
+ "loss": 3.2345,
+ "step": 158
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.3169445021310773,
+ "learning_rate": 0.0009952368510370538,
+ "loss": 3.1145,
+ "step": 159
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.35951450264953977,
+ "learning_rate": 0.0009951340343707852,
+ "loss": 3.1045,
+ "step": 160
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.32798069132131075,
+ "learning_rate": 0.0009950301252541823,
+ "loss": 3.1183,
+ "step": 161
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.3904989944568574,
+ "learning_rate": 0.0009949251239165075,
+ "loss": 3.1286,
+ "step": 162
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.30512398159770077,
+ "learning_rate": 0.000994819030589433,
+ "loss": 3.1768,
+ "step": 163
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.4011624069371094,
+ "learning_rate": 0.00099471184550704,
+ "loss": 3.0485,
+ "step": 164
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.41451481144561814,
+ "learning_rate": 0.0009946035689058189,
+ "loss": 3.1481,
+ "step": 165
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.26381935864972084,
+ "learning_rate": 0.0009944942010246681,
+ "loss": 3.0833,
+ "step": 166
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.3419242537979968,
+ "learning_rate": 0.0009943837421048942,
+ "loss": 3.0472,
+ "step": 167
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.34447990233394127,
+ "learning_rate": 0.0009942721923902106,
+ "loss": 3.0917,
+ "step": 168
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.3728645733588767,
+ "learning_rate": 0.0009941595521267377,
+ "loss": 3.1017,
+ "step": 169
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.34711049493990437,
+ "learning_rate": 0.0009940458215630017,
+ "loss": 3.0877,
+ "step": 170
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.42929467506027785,
+ "learning_rate": 0.0009939310009499348,
+ "loss": 3.0691,
+ "step": 171
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.3814021500241551,
+ "learning_rate": 0.000993815090540874,
+ "loss": 3.1251,
+ "step": 172
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.33545123474958655,
+ "learning_rate": 0.000993698090591561,
+ "loss": 3.0904,
+ "step": 173
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.36248493333958093,
+ "learning_rate": 0.0009935800013601416,
+ "loss": 3.0909,
+ "step": 174
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.407606412033747,
+ "learning_rate": 0.000993460823107164,
+ "loss": 3.015,
+ "step": 175
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.440066954136704,
+ "learning_rate": 0.0009933405560955803,
+ "loss": 3.1716,
+ "step": 176
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.3027853347371362,
+ "learning_rate": 0.0009932192005907446,
+ "loss": 3.1006,
+ "step": 177
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.44580073187868,
+ "learning_rate": 0.0009930967568604118,
+ "loss": 3.1437,
+ "step": 178
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.44241266508105725,
+ "learning_rate": 0.000992973225174739,
+ "loss": 3.1017,
+ "step": 179
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.40252865710661473,
+ "learning_rate": 0.0009928486058062827,
+ "loss": 3.0697,
+ "step": 180
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.3360873735383713,
+ "learning_rate": 0.0009927228990299999,
+ "loss": 3.1069,
+ "step": 181
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.4929878868250257,
+ "learning_rate": 0.0009925961051232468,
+ "loss": 3.0647,
+ "step": 182
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.3677316973159758,
+ "learning_rate": 0.000992468224365778,
+ "loss": 3.1592,
+ "step": 183
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.35336923910895757,
+ "learning_rate": 0.000992339257039746,
+ "loss": 3.1018,
+ "step": 184
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.4881781072457554,
+ "learning_rate": 0.0009922092034297006,
+ "loss": 3.1429,
+ "step": 185
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.4098565089723956,
+ "learning_rate": 0.0009920780638225891,
+ "loss": 3.0504,
+ "step": 186
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.3986354522702949,
+ "learning_rate": 0.0009919458385077538,
+ "loss": 3.0307,
+ "step": 187
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.42090433384126896,
+ "learning_rate": 0.0009918125277769336,
+ "loss": 3.0698,
+ "step": 188
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.3094627301738968,
+ "learning_rate": 0.0009916781319242614,
+ "loss": 3.0684,
+ "step": 189
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.36934208448043304,
+ "learning_rate": 0.0009915426512462646,
+ "loss": 3.0473,
+ "step": 190
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.4241182259162769,
+ "learning_rate": 0.0009914060860418644,
+ "loss": 2.9886,
+ "step": 191
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.360519110911635,
+ "learning_rate": 0.000991268436612374,
+ "loss": 3.0842,
+ "step": 192
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.3894310871575452,
+ "learning_rate": 0.0009911297032614997,
+ "loss": 3.0872,
+ "step": 193
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.3460496489199759,
+ "learning_rate": 0.000990989886295339,
+ "loss": 3.0611,
+ "step": 194
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.40715178718250794,
+ "learning_rate": 0.0009908489860223804,
+ "loss": 3.1642,
+ "step": 195
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.37299568736473254,
+ "learning_rate": 0.000990707002753502,
+ "loss": 3.1023,
+ "step": 196
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.38294803848331316,
+ "learning_rate": 0.0009905639368019724,
+ "loss": 3.1043,
+ "step": 197
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.3503362196219181,
+ "learning_rate": 0.0009904197884834482,
+ "loss": 3.0494,
+ "step": 198
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.33472918027169796,
+ "learning_rate": 0.0009902745581159742,
+ "loss": 3.0736,
+ "step": 199
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.3907983585030184,
+ "learning_rate": 0.0009901282460199829,
+ "loss": 3.1758,
+ "step": 200
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.4319910256466935,
+ "learning_rate": 0.0009899808525182935,
+ "loss": 3.2207,
+ "step": 201
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.40067230024591877,
+ "learning_rate": 0.0009898323779361107,
+ "loss": 3.104,
+ "step": 202
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.36248685288215027,
+ "learning_rate": 0.000989682822601025,
+ "loss": 3.099,
+ "step": 203
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.4385860370736562,
+ "learning_rate": 0.0009895321868430113,
+ "loss": 3.0837,
+ "step": 204
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.41327537411136184,
+ "learning_rate": 0.0009893804709944281,
+ "loss": 3.1774,
+ "step": 205
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.3242003626014227,
+ "learning_rate": 0.0009892276753900174,
+ "loss": 3.1147,
+ "step": 206
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.3316290082694402,
+ "learning_rate": 0.0009890738003669028,
+ "loss": 3.142,
+ "step": 207
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.3765821945043076,
+ "learning_rate": 0.0009889188462645904,
+ "loss": 3.0915,
+ "step": 208
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.41294071539326416,
+ "learning_rate": 0.0009887628134249667,
+ "loss": 3.0086,
+ "step": 209
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.45568227974494685,
+ "learning_rate": 0.0009886057021922983,
+ "loss": 3.0688,
+ "step": 210
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.3641627502224344,
+ "learning_rate": 0.0009884475129132311,
+ "loss": 3.1122,
+ "step": 211
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.3637979653745758,
+ "learning_rate": 0.0009882882459367897,
+ "loss": 3.0994,
+ "step": 212
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.4042583246319238,
+ "learning_rate": 0.0009881279016143766,
+ "loss": 3.1259,
+ "step": 213
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.43459733198585077,
+ "learning_rate": 0.0009879664802997707,
+ "loss": 3.091,
+ "step": 214
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.33729232123450803,
+ "learning_rate": 0.000987803982349128,
+ "loss": 3.023,
+ "step": 215
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.34830785595832925,
+ "learning_rate": 0.0009876404081209796,
+ "loss": 3.0465,
+ "step": 216
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.30321106455671487,
+ "learning_rate": 0.000987475757976231,
+ "loss": 3.0906,
+ "step": 217
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.32847808320070143,
+ "learning_rate": 0.000987310032278162,
+ "loss": 3.04,
+ "step": 218
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.37128576290028625,
+ "learning_rate": 0.0009871432313924254,
+ "loss": 3.1254,
+ "step": 219
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.3904406119810515,
+ "learning_rate": 0.000986975355687046,
+ "loss": 3.1185,
+ "step": 220
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.34449695720225765,
+ "learning_rate": 0.0009868064055324204,
+ "loss": 3.16,
+ "step": 221
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.3976340694450251,
+ "learning_rate": 0.0009866363813013153,
+ "loss": 3.0949,
+ "step": 222
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.34084669907604487,
+ "learning_rate": 0.0009864652833688676,
+ "loss": 3.0894,
+ "step": 223
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.33395654198947006,
+ "learning_rate": 0.0009862931121125836,
+ "loss": 3.0474,
+ "step": 224
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.358514166163155,
+ "learning_rate": 0.000986119867912337,
+ "loss": 3.0366,
+ "step": 225
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.4028139779170852,
+ "learning_rate": 0.000985945551150369,
+ "loss": 3.0603,
+ "step": 226
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.3529760515048113,
+ "learning_rate": 0.0009857701622112876,
+ "loss": 3.0667,
+ "step": 227
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.3170631531362109,
+ "learning_rate": 0.000985593701482066,
+ "loss": 3.0543,
+ "step": 228
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.370398749189497,
+ "learning_rate": 0.0009854161693520424,
+ "loss": 3.0696,
+ "step": 229
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.41262306466320503,
+ "learning_rate": 0.0009852375662129194,
+ "loss": 3.1347,
+ "step": 230
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.35015372867402056,
+ "learning_rate": 0.0009850578924587613,
+ "loss": 3.0721,
+ "step": 231
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.5247874313394523,
+ "learning_rate": 0.000984877148485996,
+ "loss": 3.0986,
+ "step": 232
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.3438656420808496,
+ "learning_rate": 0.000984695334693412,
+ "loss": 3.0774,
+ "step": 233
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.5068113209878243,
+ "learning_rate": 0.000984512451482158,
+ "loss": 3.0759,
+ "step": 234
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.3801666278223146,
+ "learning_rate": 0.0009843284992557431,
+ "loss": 3.0166,
+ "step": 235
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.4031713135230167,
+ "learning_rate": 0.000984143478420034,
+ "loss": 3.0716,
+ "step": 236
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.3919803174621406,
+ "learning_rate": 0.0009839573893832563,
+ "loss": 3.0114,
+ "step": 237
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.412614724427646,
+ "learning_rate": 0.000983770232555991,
+ "loss": 3.0284,
+ "step": 238
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.38861797436155937,
+ "learning_rate": 0.0009835820083511765,
+ "loss": 3.0204,
+ "step": 239
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.420649484317253,
+ "learning_rate": 0.0009833927171841055,
+ "loss": 3.0871,
+ "step": 240
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.3436895148234437,
+ "learning_rate": 0.0009832023594724246,
+ "loss": 3.0371,
+ "step": 241
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.4977533883161228,
+ "learning_rate": 0.0009830109356361344,
+ "loss": 3.0796,
+ "step": 242
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.46770630615594383,
+ "learning_rate": 0.0009828184460975867,
+ "loss": 3.1241,
+ "step": 243
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.3405127568515615,
+ "learning_rate": 0.0009826248912814855,
+ "loss": 3.0704,
+ "step": 244
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.43585885417141285,
+ "learning_rate": 0.0009824302716148847,
+ "loss": 3.0523,
+ "step": 245
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.3784397202246994,
+ "learning_rate": 0.0009822345875271884,
+ "loss": 3.0607,
+ "step": 246
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.3612886944968433,
+ "learning_rate": 0.0009820378394501481,
+ "loss": 3.0677,
+ "step": 247
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.46323298303665233,
+ "learning_rate": 0.0009818400278178636,
+ "loss": 3.0591,
+ "step": 248
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.3961179593677903,
+ "learning_rate": 0.0009816411530667814,
+ "loss": 3.1108,
+ "step": 249
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.3695238344091931,
+ "learning_rate": 0.000981441215635693,
+ "loss": 3.0924,
+ "step": 250
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.36843645332025493,
+ "learning_rate": 0.0009812402159657353,
+ "loss": 3.1609,
+ "step": 251
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.32276352504292505,
+ "learning_rate": 0.000981038154500388,
+ "loss": 3.0904,
+ "step": 252
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.37888253834219504,
+ "learning_rate": 0.0009808350316854746,
+ "loss": 3.0965,
+ "step": 253
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.41369471531397267,
+ "learning_rate": 0.0009806308479691594,
+ "loss": 3.0482,
+ "step": 254
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.5050477162622338,
+ "learning_rate": 0.0009804256038019482,
+ "loss": 3.1263,
+ "step": 255
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.34026371543946976,
+ "learning_rate": 0.0009802192996366857,
+ "loss": 3.061,
+ "step": 256
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.4245208940016001,
+ "learning_rate": 0.0009800119359285563,
+ "loss": 3.0431,
+ "step": 257
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.37506589166664517,
+ "learning_rate": 0.0009798035131350813,
+ "loss": 3.1463,
+ "step": 258
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.37313255951418756,
+ "learning_rate": 0.0009795940317161194,
+ "loss": 3.07,
+ "step": 259
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.42269961722536287,
+ "learning_rate": 0.0009793834921338646,
+ "loss": 3.1383,
+ "step": 260
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.3389548356810243,
+ "learning_rate": 0.0009791718948528457,
+ "loss": 3.0375,
+ "step": 261
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.32654709381308455,
+ "learning_rate": 0.0009789592403399252,
+ "loss": 3.0144,
+ "step": 262
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.40173434434090866,
+ "learning_rate": 0.0009787455290642985,
+ "loss": 3.1823,
+ "step": 263
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.408982640639924,
+ "learning_rate": 0.000978530761497492,
+ "loss": 3.0832,
+ "step": 264
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.38731718187779257,
+ "learning_rate": 0.0009783149381133633,
+ "loss": 3.0578,
+ "step": 265
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.42259224858776406,
+ "learning_rate": 0.0009780980593880992,
+ "loss": 3.1442,
+ "step": 266
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.3876621429917515,
+ "learning_rate": 0.0009778801258002153,
+ "loss": 3.0876,
+ "step": 267
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.4163519181888417,
+ "learning_rate": 0.000977661137830554,
+ "loss": 3.012,
+ "step": 268
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.38985011483720106,
+ "learning_rate": 0.0009774410959622845,
+ "loss": 3.1249,
+ "step": 269
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.46455592845532495,
+ "learning_rate": 0.000977220000680901,
+ "loss": 3.1456,
+ "step": 270
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.4027776245562286,
+ "learning_rate": 0.000976997852474223,
+ "loss": 3.0659,
+ "step": 271
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.3816985790047567,
+ "learning_rate": 0.0009767746518323914,
+ "loss": 3.0043,
+ "step": 272
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.41261844785420154,
+ "learning_rate": 0.0009765503992478704,
+ "loss": 3.009,
+ "step": 273
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.36835901123608655,
+ "learning_rate": 0.0009763250952154449,
+ "loss": 2.9904,
+ "step": 274
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.32998175178952166,
+ "learning_rate": 0.0009760987402322195,
+ "loss": 2.9029,
+ "step": 275
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.4260106192619869,
+ "learning_rate": 0.0009758713347976178,
+ "loss": 3.0216,
+ "step": 276
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.3870961495694908,
+ "learning_rate": 0.000975642879413381,
+ "loss": 2.9878,
+ "step": 277
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.4174527132207995,
+ "learning_rate": 0.0009754133745835665,
+ "loss": 3.0643,
+ "step": 278
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.4029533582642676,
+ "learning_rate": 0.0009751828208145482,
+ "loss": 3.0785,
+ "step": 279
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.3895931852306838,
+ "learning_rate": 0.0009749512186150131,
+ "loss": 3.0815,
+ "step": 280
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.5086565728112548,
+ "learning_rate": 0.0009747185684959625,
+ "loss": 3.0875,
+ "step": 281
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.37365257996112927,
+ "learning_rate": 0.000974484870970709,
+ "loss": 3.0746,
+ "step": 282
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.490076308009277,
+ "learning_rate": 0.0009742501265548767,
+ "loss": 3.0152,
+ "step": 283
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.41254506177996875,
+ "learning_rate": 0.0009740143357663993,
+ "loss": 2.9818,
+ "step": 284
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.3838775862128416,
+ "learning_rate": 0.000973777499125519,
+ "loss": 3.1868,
+ "step": 285
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.38913201426801886,
+ "learning_rate": 0.0009735396171547859,
+ "loss": 3.0598,
+ "step": 286
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.3823012457788521,
+ "learning_rate": 0.0009733006903790564,
+ "loss": 3.0607,
+ "step": 287
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.3978502328899154,
+ "learning_rate": 0.0009730607193254922,
+ "loss": 3.0591,
+ "step": 288
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.347077370076456,
+ "learning_rate": 0.0009728197045235585,
+ "loss": 3.0708,
+ "step": 289
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.37783800568719766,
+ "learning_rate": 0.0009725776465050242,
+ "loss": 3.0399,
+ "step": 290
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.4217624192785018,
+ "learning_rate": 0.0009723345458039594,
+ "loss": 3.0407,
+ "step": 291
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.32165483395756367,
+ "learning_rate": 0.000972090402956735,
+ "loss": 3.0493,
+ "step": 292
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.4360824150328011,
+ "learning_rate": 0.0009718452185020212,
+ "loss": 2.9652,
+ "step": 293
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.43330122037829966,
+ "learning_rate": 0.0009715989929807862,
+ "loss": 2.9801,
+ "step": 294
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.41001610598617005,
+ "learning_rate": 0.0009713517269362955,
+ "loss": 2.9461,
+ "step": 295
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.4144637963102469,
+ "learning_rate": 0.0009711034209141101,
+ "loss": 2.9965,
+ "step": 296
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.3701930647112967,
+ "learning_rate": 0.0009708540754620856,
+ "loss": 3.0521,
+ "step": 297
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.3825439615068257,
+ "learning_rate": 0.0009706036911303713,
+ "loss": 3.0686,
+ "step": 298
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.4128835260011084,
+ "learning_rate": 0.0009703522684714083,
+ "loss": 2.9953,
+ "step": 299
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.3280555843458318,
+ "learning_rate": 0.0009700998080399286,
+ "loss": 3.0322,
+ "step": 300
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.3725217170778832,
+ "learning_rate": 0.0009698463103929542,
+ "loss": 3.0147,
+ "step": 301
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.39515643278691637,
+ "learning_rate": 0.0009695917760897954,
+ "loss": 3.0105,
+ "step": 302
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.4232558536424627,
+ "learning_rate": 0.0009693362056920501,
+ "loss": 3.1031,
+ "step": 303
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.4202414580287667,
+ "learning_rate": 0.0009690795997636015,
+ "loss": 3.1314,
+ "step": 304
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.39624437333223433,
+ "learning_rate": 0.0009688219588706179,
+ "loss": 3.055,
+ "step": 305
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.41136642314878974,
+ "learning_rate": 0.0009685632835815518,
+ "loss": 2.989,
+ "step": 306
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.4177366332284791,
+ "learning_rate": 0.0009683035744671367,
+ "loss": 2.9753,
+ "step": 307
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.3970226513729988,
+ "learning_rate": 0.0009680428321003883,
+ "loss": 3.0365,
+ "step": 308
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.38110270918590355,
+ "learning_rate": 0.000967781057056601,
+ "loss": 2.9469,
+ "step": 309
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.3697971849456713,
+ "learning_rate": 0.0009675182499133485,
+ "loss": 2.9979,
+ "step": 310
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.4331451984129536,
+ "learning_rate": 0.0009672544112504813,
+ "loss": 3.0407,
+ "step": 311
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.3616484279728095,
+ "learning_rate": 0.0009669895416501257,
+ "loss": 3.1342,
+ "step": 312
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.37218154897792766,
+ "learning_rate": 0.0009667236416966833,
+ "loss": 3.0381,
+ "step": 313
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.37419327824541976,
+ "learning_rate": 0.0009664567119768281,
+ "loss": 2.8755,
+ "step": 314
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.3155059428062107,
+ "learning_rate": 0.0009661887530795067,
+ "loss": 2.9914,
+ "step": 315
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.3949939578745368,
+ "learning_rate": 0.0009659197655959365,
+ "loss": 3.0807,
+ "step": 316
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.31426579913752195,
+ "learning_rate": 0.000965649750119604,
+ "loss": 2.976,
+ "step": 317
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.3408230630340435,
+ "learning_rate": 0.0009653787072462643,
+ "loss": 3.0837,
+ "step": 318
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.32482881318206414,
+ "learning_rate": 0.0009651066375739388,
+ "loss": 3.0367,
+ "step": 319
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.4406793658735449,
+ "learning_rate": 0.000964833541702915,
+ "loss": 3.1011,
+ "step": 320
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.3481328978465737,
+ "learning_rate": 0.0009645594202357438,
+ "loss": 3.0784,
+ "step": 321
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.4393307086359459,
+ "learning_rate": 0.0009642842737772397,
+ "loss": 2.8947,
+ "step": 322
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.43752919462490486,
+ "learning_rate": 0.0009640081029344782,
+ "loss": 3.071,
+ "step": 323
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.37736357584126834,
+ "learning_rate": 0.0009637309083167956,
+ "loss": 2.9506,
+ "step": 324
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.3602814204911502,
+ "learning_rate": 0.0009634526905357859,
+ "loss": 3.0239,
+ "step": 325
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.3596100505994644,
+ "learning_rate": 0.000963173450205302,
+ "loss": 3.0367,
+ "step": 326
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.4334493087535296,
+ "learning_rate": 0.0009628931879414517,
+ "loss": 3.0921,
+ "step": 327
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.3783923722847131,
+ "learning_rate": 0.0009626119043625983,
+ "loss": 3.11,
+ "step": 328
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.3046694614313011,
+ "learning_rate": 0.0009623296000893582,
+ "loss": 3.0126,
+ "step": 329
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.425235501439072,
+ "learning_rate": 0.0009620462757446,
+ "loss": 3.0894,
+ "step": 330
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.3216106049676534,
+ "learning_rate": 0.0009617619319534428,
+ "loss": 3.1163,
+ "step": 331
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.3472098257471848,
+ "learning_rate": 0.000961476569343255,
+ "loss": 3.0816,
+ "step": 332
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.35173229152698954,
+ "learning_rate": 0.0009611901885436529,
+ "loss": 2.9969,
+ "step": 333
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.40106827096491526,
+ "learning_rate": 0.0009609027901864996,
+ "loss": 3.0212,
+ "step": 334
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.3929889679678991,
+ "learning_rate": 0.0009606143749059029,
+ "loss": 3.112,
+ "step": 335
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.39221382910515895,
+ "learning_rate": 0.0009603249433382144,
+ "loss": 3.0363,
+ "step": 336
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.3601887065466546,
+ "learning_rate": 0.0009600344961220282,
+ "loss": 2.9733,
+ "step": 337
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.33364639736779833,
+ "learning_rate": 0.0009597430338981791,
+ "loss": 3.0109,
+ "step": 338
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.37952514149492095,
+ "learning_rate": 0.0009594505573097414,
+ "loss": 3.0996,
+ "step": 339
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.343678600846518,
+ "learning_rate": 0.0009591570670020277,
+ "loss": 3.0483,
+ "step": 340
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.38162315190104007,
+ "learning_rate": 0.0009588625636225871,
+ "loss": 3.0494,
+ "step": 341
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.38900186267495107,
+ "learning_rate": 0.0009585670478212036,
+ "loss": 3.0475,
+ "step": 342
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.3547236979507731,
+ "learning_rate": 0.0009582705202498956,
+ "loss": 3.0814,
+ "step": 343
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.3541931958597881,
+ "learning_rate": 0.0009579729815629133,
+ "loss": 3.0476,
+ "step": 344
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.35049473194110103,
+ "learning_rate": 0.0009576744324167379,
+ "loss": 2.933,
+ "step": 345
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.34441489282496096,
+ "learning_rate": 0.0009573748734700804,
+ "loss": 3.0244,
+ "step": 346
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.4199185465525522,
+ "learning_rate": 0.0009570743053838796,
+ "loss": 3.0113,
+ "step": 347
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.38617070634441353,
+ "learning_rate": 0.0009567727288213005,
+ "loss": 3.1451,
+ "step": 348
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.44819902575061427,
+ "learning_rate": 0.0009564701444477337,
+ "loss": 2.9829,
+ "step": 349
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.34059077559714435,
+ "learning_rate": 0.000956166552930793,
+ "loss": 3.0406,
+ "step": 350
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.3645602618649916,
+ "learning_rate": 0.0009558619549403147,
+ "loss": 3.06,
+ "step": 351
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.39208578749562084,
+ "learning_rate": 0.0009555563511483555,
+ "loss": 3.0527,
+ "step": 352
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.3770502395520707,
+ "learning_rate": 0.0009552497422291912,
+ "loss": 3.1927,
+ "step": 353
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.3983228924369584,
+ "learning_rate": 0.0009549421288593157,
+ "loss": 2.997,
+ "step": 354
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.32744425245185593,
+ "learning_rate": 0.0009546335117174385,
+ "loss": 3.0433,
+ "step": 355
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.3835355627882164,
+ "learning_rate": 0.0009543238914844843,
+ "loss": 2.973,
+ "step": 356
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.34138468117166965,
+ "learning_rate": 0.0009540132688435907,
+ "loss": 3.0594,
+ "step": 357
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.359079563584912,
+ "learning_rate": 0.0009537016444801074,
+ "loss": 2.9678,
+ "step": 358
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.3223813307103922,
+ "learning_rate": 0.0009533890190815935,
+ "loss": 3.0013,
+ "step": 359
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.308504217885357,
+ "learning_rate": 0.0009530753933378173,
+ "loss": 3.0314,
+ "step": 360
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.4361122624243665,
+ "learning_rate": 0.0009527607679407545,
+ "loss": 2.9537,
+ "step": 361
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.4633391785851036,
+ "learning_rate": 0.0009524451435845857,
+ "loss": 3.0154,
+ "step": 362
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.5374727955719102,
+ "learning_rate": 0.0009521285209656963,
+ "loss": 3.0789,
+ "step": 363
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.39414308450429925,
+ "learning_rate": 0.0009518109007826734,
+ "loss": 2.9723,
+ "step": 364
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.44630026288084773,
+ "learning_rate": 0.0009514922837363059,
+ "loss": 2.9975,
+ "step": 365
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.3957709360320667,
+ "learning_rate": 0.0009511726705295817,
+ "loss": 2.9573,
+ "step": 366
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.33871651053995877,
+ "learning_rate": 0.000950852061867687,
+ "loss": 3.0267,
+ "step": 367
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.3728807763613917,
+ "learning_rate": 0.0009505304584580038,
+ "loss": 3.1214,
+ "step": 368
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.44764094688692174,
+ "learning_rate": 0.0009502078610101092,
+ "loss": 3.063,
+ "step": 369
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.29086555292606237,
+ "learning_rate": 0.0009498842702357736,
+ "loss": 3.0583,
+ "step": 370
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.3716739239837171,
+ "learning_rate": 0.0009495596868489587,
+ "loss": 3.0653,
+ "step": 371
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.39640398088367923,
+ "learning_rate": 0.0009492341115658167,
+ "loss": 3.0584,
+ "step": 372
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.3766703047691341,
+ "learning_rate": 0.0009489075451046879,
+ "loss": 3.1651,
+ "step": 373
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.35019574306766604,
+ "learning_rate": 0.0009485799881861,
+ "loss": 3.1084,
+ "step": 374
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.35841022172921566,
+ "learning_rate": 0.0009482514415327654,
+ "loss": 3.1133,
+ "step": 375
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.38936474944586286,
+ "learning_rate": 0.000947921905869581,
+ "loss": 3.0046,
+ "step": 376
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.39880468708779415,
+ "learning_rate": 0.0009475913819236248,
+ "loss": 3.0976,
+ "step": 377
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.4146888992893563,
+ "learning_rate": 0.0009472598704241561,
+ "loss": 3.0416,
+ "step": 378
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.357782133964611,
+ "learning_rate": 0.0009469273721026131,
+ "loss": 2.9554,
+ "step": 379
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.37755340517028924,
+ "learning_rate": 0.0009465938876926111,
+ "loss": 3.0289,
+ "step": 380
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.35010266246401905,
+ "learning_rate": 0.0009462594179299406,
+ "loss": 3.0015,
+ "step": 381
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.33963111070416246,
+ "learning_rate": 0.0009459239635525672,
+ "loss": 2.9498,
+ "step": 382
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.44878720298048863,
+ "learning_rate": 0.0009455875253006281,
+ "loss": 3.0511,
+ "step": 383
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.36595385625515603,
+ "learning_rate": 0.0009452501039164315,
+ "loss": 3.0609,
+ "step": 384
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.43688707472167565,
+ "learning_rate": 0.0009449117001444549,
+ "loss": 3.0517,
+ "step": 385
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.4429753487645164,
+ "learning_rate": 0.0009445723147313433,
+ "loss": 2.9969,
+ "step": 386
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.35202446136402515,
+ "learning_rate": 0.0009442319484259074,
+ "loss": 3.0091,
+ "step": 387
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.3797171070497617,
+ "learning_rate": 0.0009438906019791222,
+ "loss": 3.0492,
+ "step": 388
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.37165201774338413,
+ "learning_rate": 0.0009435482761441251,
+ "loss": 3.0925,
+ "step": 389
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.36595879634515716,
+ "learning_rate": 0.000943204971676215,
+ "loss": 2.9587,
+ "step": 390
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.3779647098052585,
+ "learning_rate": 0.0009428606893328493,
+ "loss": 3.0311,
+ "step": 391
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.35165670810273936,
+ "learning_rate": 0.0009425154298736432,
+ "loss": 2.976,
+ "step": 392
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.36498437355672114,
+ "learning_rate": 0.0009421691940603678,
+ "loss": 3.063,
+ "step": 393
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.35179412427489737,
+ "learning_rate": 0.0009418219826569488,
+ "loss": 3.1012,
+ "step": 394
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.36079967496034876,
+ "learning_rate": 0.0009414737964294635,
+ "loss": 3.0213,
+ "step": 395
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.34256607659527255,
+ "learning_rate": 0.000941124636146141,
+ "loss": 3.0679,
+ "step": 396
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.3549817318620011,
+ "learning_rate": 0.0009407745025773589,
+ "loss": 3.0031,
+ "step": 397
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.330078946370381,
+ "learning_rate": 0.0009404233964956423,
+ "loss": 2.9591,
+ "step": 398
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.40998871363969147,
+ "learning_rate": 0.0009400713186756625,
+ "loss": 2.9835,
+ "step": 399
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.352243618429148,
+ "learning_rate": 0.0009397182698942342,
+ "loss": 3.0615,
+ "step": 400
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.3808616925820115,
+ "learning_rate": 0.0009393642509303149,
+ "loss": 2.9857,
+ "step": 401
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.4548595948388096,
+ "learning_rate": 0.0009390092625650023,
+ "loss": 3.053,
+ "step": 402
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.33985124216131535,
+ "learning_rate": 0.0009386533055815332,
+ "loss": 2.9026,
+ "step": 403
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.3746595726901059,
+ "learning_rate": 0.0009382963807652813,
+ "loss": 3.0954,
+ "step": 404
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.36284595806226505,
+ "learning_rate": 0.000937938488903756,
+ "loss": 3.0051,
+ "step": 405
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.34181065429432705,
+ "learning_rate": 0.0009375796307866003,
+ "loss": 3.0497,
+ "step": 406
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.3968805290556801,
+ "learning_rate": 0.0009372198072055888,
+ "loss": 3.0338,
+ "step": 407
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.3767021528018277,
+ "learning_rate": 0.0009368590189546268,
+ "loss": 2.9779,
+ "step": 408
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.36215240217803607,
+ "learning_rate": 0.0009364972668297474,
+ "loss": 3.0198,
+ "step": 409
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.38600578427214505,
+ "learning_rate": 0.0009361345516291111,
+ "loss": 3.0258,
+ "step": 410
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.43476215558882736,
+ "learning_rate": 0.0009357708741530024,
+ "loss": 3.0052,
+ "step": 411
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.3370388121396525,
+ "learning_rate": 0.00093540623520383,
+ "loss": 2.8979,
+ "step": 412
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.38243454516908587,
+ "learning_rate": 0.000935040635586123,
+ "loss": 3.0426,
+ "step": 413
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.3583872518114288,
+ "learning_rate": 0.0009346740761065305,
+ "loss": 3.0802,
+ "step": 414
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.3546379643623189,
+ "learning_rate": 0.0009343065575738197,
+ "loss": 2.9866,
+ "step": 415
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.4091988613955461,
+ "learning_rate": 0.0009339380807988733,
+ "loss": 3.0785,
+ "step": 416
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.3860208094364181,
+ "learning_rate": 0.0009335686465946887,
+ "loss": 2.9699,
+ "step": 417
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.3432326152940613,
+ "learning_rate": 0.0009331982557763754,
+ "loss": 2.9306,
+ "step": 418
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.3429300378943377,
+ "learning_rate": 0.0009328269091611537,
+ "loss": 2.9609,
+ "step": 419
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.3641224088858899,
+ "learning_rate": 0.0009324546075683524,
+ "loss": 3.0038,
+ "step": 420
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.36519157896745197,
+ "learning_rate": 0.0009320813518194083,
+ "loss": 3.0605,
+ "step": 421
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.3965291718194792,
+ "learning_rate": 0.0009317071427378624,
+ "loss": 2.9831,
+ "step": 422
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.4271886417384712,
+ "learning_rate": 0.0009313319811493594,
+ "loss": 3.0276,
+ "step": 423
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.37843904982575016,
+ "learning_rate": 0.000930955867881646,
+ "loss": 2.9988,
+ "step": 424
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.36069240541378456,
+ "learning_rate": 0.0009305788037645681,
+ "loss": 3.0143,
+ "step": 425
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.36703673994094954,
+ "learning_rate": 0.0009302007896300697,
+ "loss": 2.9989,
+ "step": 426
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.3976170284027046,
+ "learning_rate": 0.0009298218263121911,
+ "loss": 2.9599,
+ "step": 427
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.4034457824631604,
+ "learning_rate": 0.0009294419146470668,
+ "loss": 3.0401,
+ "step": 428
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.418734191424572,
+ "learning_rate": 0.0009290610554729234,
+ "loss": 3.0595,
+ "step": 429
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.37274305578559314,
+ "learning_rate": 0.0009286792496300784,
+ "loss": 2.9852,
+ "step": 430
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.4170344028130344,
+ "learning_rate": 0.0009282964979609379,
+ "loss": 3.007,
+ "step": 431
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.3607730668793172,
+ "learning_rate": 0.0009279128013099947,
+ "loss": 2.9874,
+ "step": 432
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.3462683438292709,
+ "learning_rate": 0.0009275281605238268,
+ "loss": 3.0909,
+ "step": 433
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.3612093734564574,
+ "learning_rate": 0.0009271425764510953,
+ "loss": 2.9598,
+ "step": 434
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.3576686462551345,
+ "learning_rate": 0.0009267560499425423,
+ "loss": 2.9604,
+ "step": 435
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.35703506027179843,
+ "learning_rate": 0.0009263685818509895,
+ "loss": 3.1084,
+ "step": 436
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.3770989547235674,
+ "learning_rate": 0.000925980173031336,
+ "loss": 3.0226,
+ "step": 437
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.3932188905020812,
+ "learning_rate": 0.0009255908243405567,
+ "loss": 3.0149,
+ "step": 438
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.3876250513058803,
+ "learning_rate": 0.0009252005366376996,
+ "loss": 2.969,
+ "step": 439
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.32608013795696617,
+ "learning_rate": 0.0009248093107838852,
+ "loss": 2.9333,
+ "step": 440
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.3449540645912457,
+ "learning_rate": 0.0009244171476423036,
+ "loss": 2.9967,
+ "step": 441
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.3794289845839539,
+ "learning_rate": 0.0009240240480782129,
+ "loss": 2.9835,
+ "step": 442
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.4063737805808367,
+ "learning_rate": 0.0009236300129589376,
+ "loss": 3.1053,
+ "step": 443
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.3644018750247308,
+ "learning_rate": 0.0009232350431538657,
+ "loss": 3.0151,
+ "step": 444
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.42239440812917967,
+ "learning_rate": 0.0009228391395344482,
+ "loss": 2.9478,
+ "step": 445
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.33831732618878846,
+ "learning_rate": 0.000922442302974196,
+ "loss": 3.002,
+ "step": 446
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.4064280093515599,
+ "learning_rate": 0.0009220445343486785,
+ "loss": 3.034,
+ "step": 447
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.3844164278498118,
+ "learning_rate": 0.0009216458345355217,
+ "loss": 3.0303,
+ "step": 448
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.3616373758850986,
+ "learning_rate": 0.0009212462044144061,
+ "loss": 2.9903,
+ "step": 449
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.38399083454144134,
+ "learning_rate": 0.0009208456448670648,
+ "loss": 3.0291,
+ "step": 450
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.41163305173164816,
+ "learning_rate": 0.0009204441567772816,
+ "loss": 3.0085,
+ "step": 451
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.35898792847237554,
+ "learning_rate": 0.0009200417410308888,
+ "loss": 3.0465,
+ "step": 452
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.4109831864631326,
+ "learning_rate": 0.0009196383985157656,
+ "loss": 2.9328,
+ "step": 453
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.43211755121393275,
+ "learning_rate": 0.000919234130121836,
+ "loss": 3.032,
+ "step": 454
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.3681933198070181,
+ "learning_rate": 0.0009188289367410672,
+ "loss": 3.0247,
+ "step": 455
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.3934799673896836,
+ "learning_rate": 0.0009184228192674666,
+ "loss": 3.008,
+ "step": 456
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.42770916649350355,
+ "learning_rate": 0.0009180157785970808,
+ "loss": 2.9727,
+ "step": 457
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.4697866775584965,
+ "learning_rate": 0.0009176078156279932,
+ "loss": 3.0412,
+ "step": 458
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.42101208513743466,
+ "learning_rate": 0.0009171989312603226,
+ "loss": 2.9935,
+ "step": 459
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.4505515808661039,
+ "learning_rate": 0.0009167891263962202,
+ "loss": 2.9561,
+ "step": 460
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.4467306746864722,
+ "learning_rate": 0.0009163784019398685,
+ "loss": 2.8782,
+ "step": 461
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.3891637347745176,
+ "learning_rate": 0.0009159667587974785,
+ "loss": 3.0503,
+ "step": 462
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.3730094483881156,
+ "learning_rate": 0.0009155541978772887,
+ "loss": 3.0227,
+ "step": 463
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.48771240337649496,
+ "learning_rate": 0.0009151407200895625,
+ "loss": 3.0235,
+ "step": 464
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.36693303975440605,
+ "learning_rate": 0.000914726326346586,
+ "loss": 3.0011,
+ "step": 465
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.32971616282456623,
+ "learning_rate": 0.0009143110175626661,
+ "loss": 2.9545,
+ "step": 466
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.38150187536192526,
+ "learning_rate": 0.0009138947946541291,
+ "loss": 2.96,
+ "step": 467
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.4472040192267348,
+ "learning_rate": 0.0009134776585393181,
+ "loss": 3.0762,
+ "step": 468
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.3226878384022191,
+ "learning_rate": 0.0009130596101385906,
+ "loss": 2.9699,
+ "step": 469
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.42409300684786483,
+ "learning_rate": 0.0009126406503743174,
+ "loss": 2.9525,
+ "step": 470
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.45654470862857754,
+ "learning_rate": 0.0009122207801708802,
+ "loss": 2.9441,
+ "step": 471
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.38952346446223823,
+ "learning_rate": 0.0009118000004546689,
+ "loss": 3.0372,
+ "step": 472
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.3443706413974396,
+ "learning_rate": 0.0009113783121540807,
+ "loss": 2.9585,
+ "step": 473
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.4104338465788305,
+ "learning_rate": 0.0009109557161995172,
+ "loss": 2.9687,
+ "step": 474
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.4552101005088764,
+ "learning_rate": 0.0009105322135233828,
+ "loss": 2.987,
+ "step": 475
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.39145863922499496,
+ "learning_rate": 0.0009101078050600821,
+ "loss": 3.0379,
+ "step": 476
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.3783872072153069,
+ "learning_rate": 0.0009096824917460186,
+ "loss": 2.969,
+ "step": 477
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.4030080088320596,
+ "learning_rate": 0.0009092562745195921,
+ "loss": 2.9767,
+ "step": 478
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.33979560457747027,
+ "learning_rate": 0.0009088291543211967,
+ "loss": 2.9938,
+ "step": 479
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.42730419455295693,
+ "learning_rate": 0.0009084011320932188,
+ "loss": 2.9847,
+ "step": 480
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.410229211091005,
+ "learning_rate": 0.0009079722087800352,
+ "loss": 3.0274,
+ "step": 481
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.38101379180594525,
+ "learning_rate": 0.0009075423853280106,
+ "loss": 3.0676,
+ "step": 482
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.3407146841516656,
+ "learning_rate": 0.0009071116626854958,
+ "loss": 3.0172,
+ "step": 483
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.39152409867676435,
+ "learning_rate": 0.0009066800418028256,
+ "loss": 2.9887,
+ "step": 484
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.45181061123541305,
+ "learning_rate": 0.0009062475236323168,
+ "loss": 3.0301,
+ "step": 485
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.4652562871269597,
+ "learning_rate": 0.0009058141091282656,
+ "loss": 2.9417,
+ "step": 486
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.4448908407619625,
+ "learning_rate": 0.0009053797992469461,
+ "loss": 2.992,
+ "step": 487
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.4003611336072728,
+ "learning_rate": 0.0009049445949466078,
+ "loss": 3.0096,
+ "step": 488
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.3842463104216409,
+ "learning_rate": 0.0009045084971874737,
+ "loss": 3.0618,
+ "step": 489
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.3609147102251282,
+ "learning_rate": 0.0009040715069317382,
+ "loss": 3.0263,
+ "step": 490
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.47411502256984955,
+ "learning_rate": 0.0009036336251435648,
+ "loss": 2.9462,
+ "step": 491
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.38321533689637727,
+ "learning_rate": 0.0009031948527890839,
+ "loss": 3.0064,
+ "step": 492
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.34528294914377844,
+ "learning_rate": 0.000902755190836391,
+ "loss": 3.0187,
+ "step": 493
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.4646759985843907,
+ "learning_rate": 0.0009023146402555442,
+ "loss": 2.9627,
+ "step": 494
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.5835338877564443,
+ "learning_rate": 0.0009018732020185624,
+ "loss": 3.0271,
+ "step": 495
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.36377022526342,
+ "learning_rate": 0.0009014308770994235,
+ "loss": 3.0563,
+ "step": 496
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.49565074113720914,
+ "learning_rate": 0.0009009876664740605,
+ "loss": 2.9942,
+ "step": 497
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.4751826491897462,
+ "learning_rate": 0.0009005435711203618,
+ "loss": 3.0329,
+ "step": 498
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.3670861386945321,
+ "learning_rate": 0.000900098592018167,
+ "loss": 2.9491,
+ "step": 499
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.42485480682569554,
+ "learning_rate": 0.0008996527301492663,
+ "loss": 3.0531,
+ "step": 500
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.35943311972932884,
+ "learning_rate": 0.0008992059864973972,
+ "loss": 3.0024,
+ "step": 501
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.337417912732133,
+ "learning_rate": 0.0008987583620482427,
+ "loss": 3.0454,
+ "step": 502
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.3428296650946238,
+ "learning_rate": 0.0008983098577894292,
+ "loss": 2.9289,
+ "step": 503
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.36038286528985136,
+ "learning_rate": 0.0008978604747105246,
+ "loss": 3.0238,
+ "step": 504
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.3510666323811725,
+ "learning_rate": 0.0008974102138030354,
+ "loss": 2.9146,
+ "step": 505
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.3506505431274821,
+ "learning_rate": 0.000896959076060405,
+ "loss": 3.0443,
+ "step": 506
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.3069524001735543,
+ "learning_rate": 0.0008965070624780116,
+ "loss": 2.9504,
+ "step": 507
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.3131860083690858,
+ "learning_rate": 0.0008960541740531658,
+ "loss": 2.9002,
+ "step": 508
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.36367655052217773,
+ "learning_rate": 0.0008956004117851083,
+ "loss": 3.0219,
+ "step": 509
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.37444145617583885,
+ "learning_rate": 0.0008951457766750079,
+ "loss": 2.9686,
+ "step": 510
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.3619002349361225,
+ "learning_rate": 0.0008946902697259593,
+ "loss": 3.0131,
+ "step": 511
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.3376371471761367,
+ "learning_rate": 0.0008942338919429805,
+ "loss": 3.0174,
+ "step": 512
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.3696306909402485,
+ "learning_rate": 0.0008937766443330113,
+ "loss": 2.9895,
+ "step": 513
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.38754563503612915,
+ "learning_rate": 0.0008933185279049103,
+ "loss": 2.8997,
+ "step": 514
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.39290539104720124,
+ "learning_rate": 0.0008928595436694532,
+ "loss": 2.9938,
+ "step": 515
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.31920651405501405,
+ "learning_rate": 0.0008923996926393305,
+ "loss": 3.0605,
+ "step": 516
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.3958315899161004,
+ "learning_rate": 0.0008919389758291449,
+ "loss": 3.0501,
+ "step": 517
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.38094534700271443,
+ "learning_rate": 0.0008914773942554098,
+ "loss": 2.977,
+ "step": 518
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.3605023325478126,
+ "learning_rate": 0.000891014948936546,
+ "loss": 3.0329,
+ "step": 519
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.35453215931490867,
+ "learning_rate": 0.0008905516408928804,
+ "loss": 2.9367,
+ "step": 520
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.4108516131497139,
+ "learning_rate": 0.0008900874711466434,
+ "loss": 3.0332,
+ "step": 521
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.31203369408739334,
+ "learning_rate": 0.0008896224407219666,
+ "loss": 2.9061,
+ "step": 522
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.35855862716741904,
+ "learning_rate": 0.0008891565506448804,
+ "loss": 2.9996,
+ "step": 523
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.33713315410862926,
+ "learning_rate": 0.0008886898019433122,
+ "loss": 2.9601,
+ "step": 524
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.34409825947012346,
+ "learning_rate": 0.0008882221956470836,
+ "loss": 2.8993,
+ "step": 525
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.3902067674639701,
+ "learning_rate": 0.0008877537327879086,
+ "loss": 2.9346,
+ "step": 526
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.4474466740021483,
+ "learning_rate": 0.0008872844143993908,
+ "loss": 2.9799,
+ "step": 527
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.3842942455552743,
+ "learning_rate": 0.0008868142415170218,
+ "loss": 2.9134,
+ "step": 528
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.3674315649805593,
+ "learning_rate": 0.0008863432151781781,
+ "loss": 2.9784,
+ "step": 529
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.42064014224610025,
+ "learning_rate": 0.0008858713364221195,
+ "loss": 2.8683,
+ "step": 530
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.3713071697433955,
+ "learning_rate": 0.0008853986062899868,
+ "loss": 2.9929,
+ "step": 531
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.45447467935880426,
+ "learning_rate": 0.0008849250258247986,
+ "loss": 2.979,
+ "step": 532
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.37497523170590813,
+ "learning_rate": 0.0008844505960714503,
+ "loss": 2.9813,
+ "step": 533
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.361104019261604,
+ "learning_rate": 0.0008839753180767108,
+ "loss": 2.9985,
+ "step": 534
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.4025698558580232,
+ "learning_rate": 0.0008834991928892204,
+ "loss": 2.9941,
+ "step": 535
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.46463771964197514,
+ "learning_rate": 0.000883022221559489,
+ "loss": 2.9521,
+ "step": 536
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.35837948534073394,
+ "learning_rate": 0.0008825444051398934,
+ "loss": 3.0411,
+ "step": 537
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.3304227313989365,
+ "learning_rate": 0.0008820657446846745,
+ "loss": 3.0361,
+ "step": 538
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.3790526632003062,
+ "learning_rate": 0.000881586241249936,
+ "loss": 2.9866,
+ "step": 539
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.393477187545182,
+ "learning_rate": 0.0008811058958936411,
+ "loss": 2.9391,
+ "step": 540
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.33872378898612115,
+ "learning_rate": 0.000880624709675611,
+ "loss": 3.0038,
+ "step": 541
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.3511784535615359,
+ "learning_rate": 0.000880142683657522,
+ "loss": 2.9586,
+ "step": 542
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.36929385426185596,
+ "learning_rate": 0.0008796598189029029,
+ "loss": 3.0291,
+ "step": 543
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.3311190663226015,
+ "learning_rate": 0.0008791761164771338,
+ "loss": 3.0047,
+ "step": 544
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.35900153290900755,
+ "learning_rate": 0.0008786915774474424,
+ "loss": 2.9453,
+ "step": 545
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.3741342498523061,
+ "learning_rate": 0.0008782062028829027,
+ "loss": 3.0425,
+ "step": 546
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.3707356021270875,
+ "learning_rate": 0.0008777199938544318,
+ "loss": 2.9948,
+ "step": 547
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.36783798252764055,
+ "learning_rate": 0.0008772329514347883,
+ "loss": 2.9191,
+ "step": 548
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.35990151284903615,
+ "learning_rate": 0.0008767450766985694,
+ "loss": 2.9914,
+ "step": 549
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.38890731145346374,
+ "learning_rate": 0.0008762563707222086,
+ "loss": 2.9755,
+ "step": 550
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.3664666823024524,
+ "learning_rate": 0.0008757668345839738,
+ "loss": 3.0402,
+ "step": 551
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.3708294353615344,
+ "learning_rate": 0.0008752764693639638,
+ "loss": 3.0458,
+ "step": 552
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.42553113847287094,
+ "learning_rate": 0.0008747852761441078,
+ "loss": 2.9099,
+ "step": 553
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.3495238956935285,
+ "learning_rate": 0.0008742932560081607,
+ "loss": 2.9578,
+ "step": 554
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.3743044689380776,
+ "learning_rate": 0.0008738004100417025,
+ "loss": 2.9351,
+ "step": 555
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.343726093451688,
+ "learning_rate": 0.0008733067393321355,
+ "loss": 2.9648,
+ "step": 556
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.36468342710872254,
+ "learning_rate": 0.000872812244968681,
+ "loss": 3.0267,
+ "step": 557
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.35699980194865466,
+ "learning_rate": 0.0008723169280423783,
+ "loss": 2.9573,
+ "step": 558
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.383264688216931,
+ "learning_rate": 0.0008718207896460811,
+ "loss": 2.9877,
+ "step": 559
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.37277484651622494,
+ "learning_rate": 0.0008713238308744557,
+ "loss": 3.011,
+ "step": 560
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.39206445745470164,
+ "learning_rate": 0.0008708260528239789,
+ "loss": 2.9365,
+ "step": 561
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.371204491964282,
+ "learning_rate": 0.000870327456592934,
+ "loss": 3.0417,
+ "step": 562
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.3921852047996319,
+ "learning_rate": 0.0008698280432814107,
+ "loss": 2.9876,
+ "step": 563
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.43614917310490947,
+ "learning_rate": 0.000869327813991301,
+ "loss": 3.0017,
+ "step": 564
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.35089149751275567,
+ "learning_rate": 0.0008688267698262971,
+ "loss": 2.9324,
+ "step": 565
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.3358856617264399,
+ "learning_rate": 0.0008683249118918894,
+ "loss": 2.9521,
+ "step": 566
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.39063461567547386,
+ "learning_rate": 0.0008678222412953637,
+ "loss": 2.9671,
+ "step": 567
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.35525253637149373,
+ "learning_rate": 0.0008673187591457987,
+ "loss": 2.9608,
+ "step": 568
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.35545369780960845,
+ "learning_rate": 0.0008668144665540639,
+ "loss": 2.999,
+ "step": 569
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.3527031619396671,
+ "learning_rate": 0.0008663093646328167,
+ "loss": 2.9501,
+ "step": 570
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.4039194092554435,
+ "learning_rate": 0.0008658034544965003,
+ "loss": 2.8983,
+ "step": 571
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.40730708445215014,
+ "learning_rate": 0.0008652967372613412,
+ "loss": 3.0516,
+ "step": 572
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.3358476761370905,
+ "learning_rate": 0.0008647892140453466,
+ "loss": 3.0074,
+ "step": 573
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.40576001397109285,
+ "learning_rate": 0.0008642808859683021,
+ "loss": 2.8992,
+ "step": 574
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.3898104545419264,
+ "learning_rate": 0.0008637717541517689,
+ "loss": 2.9789,
+ "step": 575
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.3691200088648983,
+ "learning_rate": 0.0008632618197190816,
+ "loss": 3.001,
+ "step": 576
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.37944092326529066,
+ "learning_rate": 0.0008627510837953458,
+ "loss": 2.9984,
+ "step": 577
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.39224031981357727,
+ "learning_rate": 0.0008622395475074355,
+ "loss": 2.8906,
+ "step": 578
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.4118229079205374,
+ "learning_rate": 0.0008617272119839903,
+ "loss": 2.9857,
+ "step": 579
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.3325153953096171,
+ "learning_rate": 0.0008612140783554136,
+ "loss": 3.0059,
+ "step": 580
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.38426610462622196,
+ "learning_rate": 0.0008607001477538696,
+ "loss": 2.8733,
+ "step": 581
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.38008333851299325,
+ "learning_rate": 0.0008601854213132807,
+ "loss": 2.9627,
+ "step": 582
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.3793131914366739,
+ "learning_rate": 0.0008596699001693256,
+ "loss": 2.8991,
+ "step": 583
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.363830363664409,
+ "learning_rate": 0.000859153585459436,
+ "loss": 3.0055,
+ "step": 584
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.4451437958527428,
+ "learning_rate": 0.0008586364783227949,
+ "loss": 2.9515,
+ "step": 585
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.34040307511551604,
+ "learning_rate": 0.0008581185799003332,
+ "loss": 2.925,
+ "step": 586
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.352249678631388,
+ "learning_rate": 0.0008575998913347283,
+ "loss": 2.9069,
+ "step": 587
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.47017117850406265,
+ "learning_rate": 0.0008570804137704004,
+ "loss": 2.9156,
+ "step": 588
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.36586111848688074,
+ "learning_rate": 0.0008565601483535108,
+ "loss": 2.934,
+ "step": 589
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.33358332466977636,
+ "learning_rate": 0.0008560390962319591,
+ "loss": 2.945,
+ "step": 590
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.38438650609937136,
+ "learning_rate": 0.0008555172585553804,
+ "loss": 2.9552,
+ "step": 591
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.4442069186303742,
+ "learning_rate": 0.0008549946364751435,
+ "loss": 2.9533,
+ "step": 592
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.43701093306683925,
+ "learning_rate": 0.0008544712311443475,
+ "loss": 2.9994,
+ "step": 593
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.3612177805376717,
+ "learning_rate": 0.0008539470437178196,
+ "loss": 2.9345,
+ "step": 594
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.4115859790357746,
+ "learning_rate": 0.000853422075352113,
+ "loss": 3.0567,
+ "step": 595
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.35858061301333993,
+ "learning_rate": 0.0008528963272055035,
+ "loss": 3.0011,
+ "step": 596
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.35686966697691824,
+ "learning_rate": 0.0008523698004379877,
+ "loss": 3.0192,
+ "step": 597
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.41066475696087945,
+ "learning_rate": 0.00085184249621128,
+ "loss": 2.9431,
+ "step": 598
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.3970389271426842,
+ "learning_rate": 0.0008513144156888101,
+ "loss": 3.0076,
+ "step": 599
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.38302665054595586,
+ "learning_rate": 0.0008507855600357207,
+ "loss": 2.8779,
+ "step": 600
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.36343892264326666,
+ "learning_rate": 0.0008502559304188644,
+ "loss": 2.9326,
+ "step": 601
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.41970070230448897,
+ "learning_rate": 0.0008497255280068019,
+ "loss": 2.965,
+ "step": 602
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.4024437210622577,
+ "learning_rate": 0.0008491943539697986,
+ "loss": 2.9958,
+ "step": 603
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.3939772643847545,
+ "learning_rate": 0.0008486624094798226,
+ "loss": 2.9777,
+ "step": 604
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.36735871764049577,
+ "learning_rate": 0.0008481296957105417,
+ "loss": 3.0535,
+ "step": 605
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.48713930202570355,
+ "learning_rate": 0.0008475962138373213,
+ "loss": 3.0121,
+ "step": 606
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.4012150462336618,
+ "learning_rate": 0.0008470619650372211,
+ "loss": 2.9852,
+ "step": 607
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.3429394391596876,
+ "learning_rate": 0.0008465269504889934,
+ "loss": 2.9056,
+ "step": 608
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.37574731722199345,
+ "learning_rate": 0.0008459911713730799,
+ "loss": 3.1098,
+ "step": 609
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.3922764322030697,
+ "learning_rate": 0.0008454546288716089,
+ "loss": 2.9534,
+ "step": 610
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.3729365456421718,
+ "learning_rate": 0.0008449173241683935,
+ "loss": 2.9399,
+ "step": 611
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.3492550807047831,
+ "learning_rate": 0.0008443792584489281,
+ "loss": 2.9232,
+ "step": 612
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.39820617886328336,
+ "learning_rate": 0.0008438404329003863,
+ "loss": 2.9397,
+ "step": 613
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.4218483747860042,
+ "learning_rate": 0.0008433008487116183,
+ "loss": 2.9722,
+ "step": 614
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.3972233056525467,
+ "learning_rate": 0.0008427605070731481,
+ "loss": 3.0587,
+ "step": 615
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.3793363341273652,
+ "learning_rate": 0.0008422194091771708,
+ "loss": 2.8867,
+ "step": 616
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.33524083815787253,
+ "learning_rate": 0.0008416775562175503,
+ "loss": 2.9215,
+ "step": 617
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.4184927221646228,
+ "learning_rate": 0.000841134949389816,
+ "loss": 2.9101,
+ "step": 618
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.3974688763713095,
+ "learning_rate": 0.0008405915898911611,
+ "loss": 2.9988,
+ "step": 619
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.38480211921976964,
+ "learning_rate": 0.0008400474789204396,
+ "loss": 2.9472,
+ "step": 620
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.3674202341981731,
+ "learning_rate": 0.0008395026176781626,
+ "loss": 2.9722,
+ "step": 621
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.36692044533335305,
+ "learning_rate": 0.0008389570073664976,
+ "loss": 2.9596,
+ "step": 622
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.35569263917614125,
+ "learning_rate": 0.0008384106491892642,
+ "loss": 2.9057,
+ "step": 623
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.3726968587592252,
+ "learning_rate": 0.0008378635443519327,
+ "loss": 2.9168,
+ "step": 624
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.3826543787390288,
+ "learning_rate": 0.0008373156940616199,
+ "loss": 2.9195,
+ "step": 625
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.3949923552204002,
+ "learning_rate": 0.0008367670995270882,
+ "loss": 2.9144,
+ "step": 626
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.3518306911250362,
+ "learning_rate": 0.0008362177619587416,
+ "loss": 3.0139,
+ "step": 627
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.39492224902191664,
+ "learning_rate": 0.0008356676825686238,
+ "loss": 2.9048,
+ "step": 628
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.3800814799471778,
+ "learning_rate": 0.0008351168625704147,
+ "loss": 3.0319,
+ "step": 629
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.37690989158843985,
+ "learning_rate": 0.0008345653031794292,
+ "loss": 2.9879,
+ "step": 630
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.36729130976074215,
+ "learning_rate": 0.0008340130056126125,
+ "loss": 3.0222,
+ "step": 631
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.36139320084739035,
+ "learning_rate": 0.0008334599710885394,
+ "loss": 3.0363,
+ "step": 632
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.4102450641837781,
+ "learning_rate": 0.0008329062008274098,
+ "loss": 3.0164,
+ "step": 633
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.4045779059806604,
+ "learning_rate": 0.000832351696051048,
+ "loss": 2.9343,
+ "step": 634
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.33121111864594555,
+ "learning_rate": 0.000831796457982898,
+ "loss": 2.9336,
+ "step": 635
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.36488427191703204,
+ "learning_rate": 0.0008312404878480222,
+ "loss": 2.9755,
+ "step": 636
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.3502506836473527,
+ "learning_rate": 0.0008306837868730979,
+ "loss": 2.8905,
+ "step": 637
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.36376020498369427,
+ "learning_rate": 0.0008301263562864152,
+ "loss": 3.0015,
+ "step": 638
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.35799894898043094,
+ "learning_rate": 0.0008295681973178737,
+ "loss": 2.881,
+ "step": 639
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.40572619411768995,
+ "learning_rate": 0.0008290093111989804,
+ "loss": 2.9824,
+ "step": 640
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.36060378624344663,
+ "learning_rate": 0.0008284496991628465,
+ "loss": 2.8484,
+ "step": 641
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.3475060431672784,
+ "learning_rate": 0.0008278893624441847,
+ "loss": 2.9695,
+ "step": 642
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.39293126436037856,
+ "learning_rate": 0.000827328302279307,
+ "loss": 2.9318,
+ "step": 643
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.37672117309589426,
+ "learning_rate": 0.0008267665199061211,
+ "loss": 2.9086,
+ "step": 644
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.36086697292551706,
+ "learning_rate": 0.0008262040165641288,
+ "loss": 3.0185,
+ "step": 645
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.35504070532402215,
+ "learning_rate": 0.0008256407934944219,
+ "loss": 2.9033,
+ "step": 646
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.3286252406866373,
+ "learning_rate": 0.0008250768519396807,
+ "loss": 2.9103,
+ "step": 647
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.3669057480468127,
+ "learning_rate": 0.0008245121931441706,
+ "loss": 2.9421,
+ "step": 648
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.3435468532535197,
+ "learning_rate": 0.0008239468183537393,
+ "loss": 2.8939,
+ "step": 649
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.4456770341680199,
+ "learning_rate": 0.0008233807288158146,
+ "loss": 2.9644,
+ "step": 650
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.3228615414821542,
+ "learning_rate": 0.0008228139257794012,
+ "loss": 2.9007,
+ "step": 651
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.3615450397860662,
+ "learning_rate": 0.0008222464104950778,
+ "loss": 3.0324,
+ "step": 652
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.3227925510335484,
+ "learning_rate": 0.000821678184214995,
+ "loss": 2.9181,
+ "step": 653
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.3881725855344593,
+ "learning_rate": 0.0008211092481928716,
+ "loss": 2.9688,
+ "step": 654
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.36851216022867095,
+ "learning_rate": 0.0008205396036839927,
+ "loss": 2.9825,
+ "step": 655
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.32633085393092087,
+ "learning_rate": 0.0008199692519452069,
+ "loss": 2.9432,
+ "step": 656
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.3409600553984099,
+ "learning_rate": 0.0008193981942349224,
+ "loss": 2.9531,
+ "step": 657
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.3453528490356128,
+ "learning_rate": 0.0008188264318131056,
+ "loss": 2.9162,
+ "step": 658
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.3721736620260722,
+ "learning_rate": 0.0008182539659412776,
+ "loss": 2.9616,
+ "step": 659
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.39199320892686534,
+ "learning_rate": 0.0008176807978825118,
+ "loss": 2.9674,
+ "step": 660
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.39501407130892197,
+ "learning_rate": 0.0008171069289014306,
+ "loss": 2.9856,
+ "step": 661
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.37091473334615005,
+ "learning_rate": 0.0008165323602642028,
+ "loss": 2.9099,
+ "step": 662
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.3942643833143458,
+ "learning_rate": 0.0008159570932385414,
+ "loss": 2.971,
+ "step": 663
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.38375836556280657,
+ "learning_rate": 0.0008153811290936999,
+ "loss": 2.961,
+ "step": 664
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.3056415187111683,
+ "learning_rate": 0.0008148044691004698,
+ "loss": 2.956,
+ "step": 665
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.3686775822360584,
+ "learning_rate": 0.0008142271145311783,
+ "loss": 2.9315,
+ "step": 666
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.34674149110521235,
+ "learning_rate": 0.000813649066659685,
+ "loss": 3.0017,
+ "step": 667
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.38695993984538396,
+ "learning_rate": 0.0008130703267613787,
+ "loss": 3.0165,
+ "step": 668
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.42807013879542766,
+ "learning_rate": 0.0008124908961131759,
+ "loss": 2.9585,
+ "step": 669
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.3280284522818405,
+ "learning_rate": 0.0008119107759935163,
+ "loss": 2.9464,
+ "step": 670
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.37287753038870014,
+ "learning_rate": 0.0008113299676823615,
+ "loss": 2.95,
+ "step": 671
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.3633165173738787,
+ "learning_rate": 0.0008107484724611911,
+ "loss": 2.9057,
+ "step": 672
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.39529482013078093,
+ "learning_rate": 0.0008101662916130006,
+ "loss": 2.9094,
+ "step": 673
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.34080770488106626,
+ "learning_rate": 0.0008095834264222979,
+ "loss": 2.8651,
+ "step": 674
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.3781224526271376,
+ "learning_rate": 0.0008089998781751009,
+ "loss": 2.9671,
+ "step": 675
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.44421497492509276,
+ "learning_rate": 0.0008084156481589349,
+ "loss": 3.0159,
+ "step": 676
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.3758320716352735,
+ "learning_rate": 0.0008078307376628291,
+ "loss": 2.9855,
+ "step": 677
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.35815613480205566,
+ "learning_rate": 0.0008072451479773143,
+ "loss": 2.9093,
+ "step": 678
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.3604053694228736,
+ "learning_rate": 0.0008066588803944195,
+ "loss": 2.9741,
+ "step": 679
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.3576980824221762,
+ "learning_rate": 0.0008060719362076697,
+ "loss": 2.8876,
+ "step": 680
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.3363694893181191,
+ "learning_rate": 0.0008054843167120826,
+ "loss": 2.9098,
+ "step": 681
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.35338673792986325,
+ "learning_rate": 0.0008048960232041663,
+ "loss": 2.9775,
+ "step": 682
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.3810459731913614,
+ "learning_rate": 0.0008043070569819153,
+ "loss": 2.9146,
+ "step": 683
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.34133588333632825,
+ "learning_rate": 0.0008037174193448089,
+ "loss": 2.8479,
+ "step": 684
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.39205701888492184,
+ "learning_rate": 0.0008031271115938077,
+ "loss": 2.9702,
+ "step": 685
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.36622490311855194,
+ "learning_rate": 0.0008025361350313505,
+ "loss": 2.9487,
+ "step": 686
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.3926186524355684,
+ "learning_rate": 0.0008019444909613523,
+ "loss": 2.9715,
+ "step": 687
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.3595884802951641,
+ "learning_rate": 0.0008013521806892003,
+ "loss": 2.9401,
+ "step": 688
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.35439758082665274,
+ "learning_rate": 0.000800759205521752,
+ "loss": 2.8907,
+ "step": 689
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.36999185643919275,
+ "learning_rate": 0.0008001655667673318,
+ "loss": 2.8917,
+ "step": 690
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.4376529603561137,
+ "learning_rate": 0.0007995712657357279,
+ "loss": 3.0097,
+ "step": 691
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.3470565001032081,
+ "learning_rate": 0.0007989763037381904,
+ "loss": 2.9403,
+ "step": 692
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.37398930370445505,
+ "learning_rate": 0.0007983806820874271,
+ "loss": 2.9294,
+ "step": 693
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.3836438909087964,
+ "learning_rate": 0.0007977844020976016,
+ "loss": 3.0593,
+ "step": 694
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.3731549347408354,
+ "learning_rate": 0.00079718746508433,
+ "loss": 2.9294,
+ "step": 695
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.3500266288430294,
+ "learning_rate": 0.0007965898723646776,
+ "loss": 2.9354,
+ "step": 696
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.41450393210752684,
+ "learning_rate": 0.0007959916252571573,
+ "loss": 2.8205,
+ "step": 697
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.422905614814251,
+ "learning_rate": 0.000795392725081725,
+ "loss": 2.9446,
+ "step": 698
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.3702242362725959,
+ "learning_rate": 0.000794793173159778,
+ "loss": 2.924,
+ "step": 699
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.3420316313597123,
+ "learning_rate": 0.0007941929708141513,
+ "loss": 2.9151,
+ "step": 700
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.32604228136090574,
+ "learning_rate": 0.0007935921193691153,
+ "loss": 2.8879,
+ "step": 701
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.329612459912358,
+ "learning_rate": 0.0007929906201503722,
+ "loss": 2.888,
+ "step": 702
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.39898217803099467,
+ "learning_rate": 0.0007923884744850536,
+ "loss": 2.9652,
+ "step": 703
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.4087154592921235,
+ "learning_rate": 0.0007917856837017176,
+ "loss": 2.9195,
+ "step": 704
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.3528294783942855,
+ "learning_rate": 0.0007911822491303452,
+ "loss": 2.9571,
+ "step": 705
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.3494614052494441,
+ "learning_rate": 0.0007905781721023382,
+ "loss": 2.9358,
+ "step": 706
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.37756732180370467,
+ "learning_rate": 0.000789973453950516,
+ "loss": 2.9783,
+ "step": 707
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.3636299436328679,
+ "learning_rate": 0.000789368096009112,
+ "loss": 2.9363,
+ "step": 708
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.36410957970403396,
+ "learning_rate": 0.0007887620996137721,
+ "loss": 2.8743,
+ "step": 709
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.37753888586839307,
+ "learning_rate": 0.0007881554661015497,
+ "loss": 2.9782,
+ "step": 710
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.3635408796167595,
+ "learning_rate": 0.0007875481968109051,
+ "loss": 2.9454,
+ "step": 711
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.405947328556052,
+ "learning_rate": 0.0007869402930817007,
+ "loss": 2.9312,
+ "step": 712
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.33199147433653214,
+ "learning_rate": 0.0007863317562551987,
+ "loss": 2.9343,
+ "step": 713
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.34865099467737665,
+ "learning_rate": 0.0007857225876740584,
+ "loss": 3.0304,
+ "step": 714
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.38469001745672904,
+ "learning_rate": 0.0007851127886823327,
+ "loss": 2.8475,
+ "step": 715
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.41513133821885073,
+ "learning_rate": 0.0007845023606254658,
+ "loss": 2.8741,
+ "step": 716
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.4259437401838044,
+ "learning_rate": 0.0007838913048502894,
+ "loss": 2.9122,
+ "step": 717
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.40728963051524025,
+ "learning_rate": 0.0007832796227050208,
+ "loss": 2.9776,
+ "step": 718
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.4336969954721156,
+ "learning_rate": 0.0007826673155392587,
+ "loss": 2.9779,
+ "step": 719
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.4414949378457715,
+ "learning_rate": 0.000782054384703981,
+ "loss": 2.9477,
+ "step": 720
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.34367514488340073,
+ "learning_rate": 0.0007814408315515418,
+ "loss": 2.9435,
+ "step": 721
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.35020138993613775,
+ "learning_rate": 0.0007808266574356683,
+ "loss": 2.9562,
+ "step": 722
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.33364013333205084,
+ "learning_rate": 0.0007802118637114573,
+ "loss": 2.9742,
+ "step": 723
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.3637120420276998,
+ "learning_rate": 0.0007795964517353734,
+ "loss": 2.9141,
+ "step": 724
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.36583437947147274,
+ "learning_rate": 0.0007789804228652449,
+ "loss": 2.958,
+ "step": 725
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.36805452881759987,
+ "learning_rate": 0.0007783637784602609,
+ "loss": 2.9239,
+ "step": 726
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.4250421753592241,
+ "learning_rate": 0.0007777465198809692,
+ "loss": 2.8796,
+ "step": 727
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.4283976024538741,
+ "learning_rate": 0.0007771286484892722,
+ "loss": 2.929,
+ "step": 728
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.38876176439674026,
+ "learning_rate": 0.000776510165648425,
+ "loss": 2.8888,
+ "step": 729
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.40701889288435905,
+ "learning_rate": 0.0007758910727230311,
+ "loss": 2.9696,
+ "step": 730
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.4131545197734033,
+ "learning_rate": 0.0007752713710790404,
+ "loss": 2.8213,
+ "step": 731
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.3884081880553486,
+ "learning_rate": 0.0007746510620837459,
+ "loss": 2.8654,
+ "step": 732
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.4025946091220863,
+ "learning_rate": 0.0007740301471057807,
+ "loss": 2.892,
+ "step": 733
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.3668941415540861,
+ "learning_rate": 0.0007734086275151146,
+ "loss": 2.9063,
+ "step": 734
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.34898017439288703,
+ "learning_rate": 0.0007727865046830517,
+ "loss": 2.9198,
+ "step": 735
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.3793674271024693,
+ "learning_rate": 0.0007721637799822269,
+ "loss": 2.9007,
+ "step": 736
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.3714017104880598,
+ "learning_rate": 0.0007715404547866032,
+ "loss": 2.9481,
+ "step": 737
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.372781542760121,
+ "learning_rate": 0.0007709165304714685,
+ "loss": 2.9645,
+ "step": 738
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.3797122050026506,
+ "learning_rate": 0.0007702920084134324,
+ "loss": 2.9496,
+ "step": 739
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.3644274507037788,
+ "learning_rate": 0.0007696668899904236,
+ "loss": 2.8727,
+ "step": 740
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.3708561858004675,
+ "learning_rate": 0.0007690411765816864,
+ "loss": 2.9345,
+ "step": 741
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.3741297436173264,
+ "learning_rate": 0.0007684148695677778,
+ "loss": 2.9013,
+ "step": 742
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.3407795801328756,
+ "learning_rate": 0.000767787970330565,
+ "loss": 2.8935,
+ "step": 743
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.3265495944000509,
+ "learning_rate": 0.000767160480253221,
+ "loss": 2.9327,
+ "step": 744
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.3657551864454065,
+ "learning_rate": 0.0007665324007202235,
+ "loss": 2.8959,
+ "step": 745
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.33690469288668645,
+ "learning_rate": 0.0007659037331173498,
+ "loss": 2.8597,
+ "step": 746
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.35937529071424873,
+ "learning_rate": 0.0007652744788316752,
+ "loss": 3.0084,
+ "step": 747
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.34683560006041414,
+ "learning_rate": 0.0007646446392515692,
+ "loss": 2.8609,
+ "step": 748
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.36836836891348074,
+ "learning_rate": 0.000764014215766693,
+ "loss": 3.0853,
+ "step": 749
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.34989586340996254,
+ "learning_rate": 0.0007633832097679958,
+ "loss": 2.9781,
+ "step": 750
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.3420484644865297,
+ "learning_rate": 0.0007627516226477122,
+ "loss": 2.8819,
+ "step": 751
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.37576717287422395,
+ "learning_rate": 0.0007621194557993589,
+ "loss": 2.9155,
+ "step": 752
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.3583445769614946,
+ "learning_rate": 0.0007614867106177319,
+ "loss": 2.923,
+ "step": 753
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.39014389276324085,
+ "learning_rate": 0.0007608533884989029,
+ "loss": 3.0274,
+ "step": 754
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.36810079017133907,
+ "learning_rate": 0.0007602194908402166,
+ "loss": 2.9649,
+ "step": 755
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.3613460617873112,
+ "learning_rate": 0.0007595850190402877,
+ "loss": 2.9397,
+ "step": 756
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.41501962251575586,
+ "learning_rate": 0.0007589499744989976,
+ "loss": 2.9039,
+ "step": 757
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.3448498169068457,
+ "learning_rate": 0.0007583143586174916,
+ "loss": 2.9167,
+ "step": 758
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.3626194370995126,
+ "learning_rate": 0.000757678172798175,
+ "loss": 2.9237,
+ "step": 759
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.3996817700775701,
+ "learning_rate": 0.0007570414184447112,
+ "loss": 2.8966,
+ "step": 760
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.36215734381578274,
+ "learning_rate": 0.0007564040969620179,
+ "loss": 2.9148,
+ "step": 761
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.3706356707006613,
+ "learning_rate": 0.0007557662097562636,
+ "loss": 2.938,
+ "step": 762
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.37132537686767847,
+ "learning_rate": 0.0007551277582348658,
+ "loss": 2.9116,
+ "step": 763
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.3914307975355416,
+ "learning_rate": 0.0007544887438064862,
+ "loss": 2.9633,
+ "step": 764
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.3835302100776215,
+ "learning_rate": 0.0007538491678810294,
+ "loss": 2.9903,
+ "step": 765
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.37242146587846997,
+ "learning_rate": 0.0007532090318696381,
+ "loss": 2.9356,
+ "step": 766
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.3705929739255261,
+ "learning_rate": 0.0007525683371846913,
+ "loss": 2.9123,
+ "step": 767
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.38073423009589885,
+ "learning_rate": 0.0007519270852398001,
+ "loss": 2.967,
+ "step": 768
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.3916634168314619,
+ "learning_rate": 0.000751285277449806,
+ "loss": 3.0479,
+ "step": 769
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.4232179513402212,
+ "learning_rate": 0.0007506429152307756,
+ "loss": 3.0489,
+ "step": 770
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.37818788374724244,
+ "learning_rate": 0.00075,
+ "loss": 2.8857,
+ "step": 771
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.34365352612355976,
+ "learning_rate": 0.00074935653317599,
+ "loss": 2.8951,
+ "step": 772
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.3634469118584728,
+ "learning_rate": 0.000748712516178473,
+ "loss": 2.9137,
+ "step": 773
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.3410624188384334,
+ "learning_rate": 0.0007480679504283911,
+ "loss": 2.9121,
+ "step": 774
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.3565552026269083,
+ "learning_rate": 0.0007474228373478964,
+ "loss": 2.9651,
+ "step": 775
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.36220851258148923,
+ "learning_rate": 0.0007467771783603492,
+ "loss": 2.9761,
+ "step": 776
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.36168250421193454,
+ "learning_rate": 0.0007461309748903138,
+ "loss": 2.9767,
+ "step": 777
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.3987397285291486,
+ "learning_rate": 0.0007454842283635562,
+ "loss": 3.0935,
+ "step": 778
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.42588750943001225,
+ "learning_rate": 0.0007448369402070404,
+ "loss": 2.8373,
+ "step": 779
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.36968553722951925,
+ "learning_rate": 0.0007441891118489254,
+ "loss": 2.9663,
+ "step": 780
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.40638844763434606,
+ "learning_rate": 0.0007435407447185622,
+ "loss": 2.9886,
+ "step": 781
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.3905500885912328,
+ "learning_rate": 0.0007428918402464908,
+ "loss": 2.9342,
+ "step": 782
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.3606031958796562,
+ "learning_rate": 0.0007422423998644359,
+ "loss": 2.8973,
+ "step": 783
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.3936438271699715,
+ "learning_rate": 0.0007415924250053055,
+ "loss": 2.8959,
+ "step": 784
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.4107108457370275,
+ "learning_rate": 0.0007409419171031865,
+ "loss": 2.9163,
+ "step": 785
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.35650650649988874,
+ "learning_rate": 0.0007402908775933419,
+ "loss": 2.9027,
+ "step": 786
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.37411110663053326,
+ "learning_rate": 0.0007396393079122077,
+ "loss": 2.8941,
+ "step": 787
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.3634950712919395,
+ "learning_rate": 0.0007389872094973896,
+ "loss": 2.9141,
+ "step": 788
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.4357791118305008,
+ "learning_rate": 0.00073833458378766,
+ "loss": 3.0661,
+ "step": 789
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.40878151579741306,
+ "learning_rate": 0.0007376814322229544,
+ "loss": 3.0366,
+ "step": 790
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.4006491279802683,
+ "learning_rate": 0.0007370277562443688,
+ "loss": 2.9652,
+ "step": 791
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.4258891218701904,
+ "learning_rate": 0.0007363735572941564,
+ "loss": 2.8913,
+ "step": 792
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.41219453929549316,
+ "learning_rate": 0.0007357188368157236,
+ "loss": 3.002,
+ "step": 793
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.32797385550696295,
+ "learning_rate": 0.0007350635962536284,
+ "loss": 2.7631,
+ "step": 794
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.38551620419649013,
+ "learning_rate": 0.0007344078370535756,
+ "loss": 2.9291,
+ "step": 795
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.3991591726978952,
+ "learning_rate": 0.0007337515606624148,
+ "loss": 3.0154,
+ "step": 796
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.3680578224650872,
+ "learning_rate": 0.0007330947685281362,
+ "loss": 2.9919,
+ "step": 797
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.3526079719021457,
+ "learning_rate": 0.0007324374620998682,
+ "loss": 2.9704,
+ "step": 798
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.4204183953518501,
+ "learning_rate": 0.000731779642827874,
+ "loss": 2.8389,
+ "step": 799
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.3933828550178033,
+ "learning_rate": 0.0007311213121635483,
+ "loss": 2.9266,
+ "step": 800
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.43320102004530026,
+ "learning_rate": 0.0007304624715594139,
+ "loss": 2.8903,
+ "step": 801
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.4129007104352534,
+ "learning_rate": 0.0007298031224691193,
+ "loss": 2.9451,
+ "step": 802
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.36667211055162724,
+ "learning_rate": 0.0007291432663474339,
+ "loss": 2.939,
+ "step": 803
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.3714228342419672,
+ "learning_rate": 0.0007284829046502467,
+ "loss": 2.8774,
+ "step": 804
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.35894988886307405,
+ "learning_rate": 0.0007278220388345619,
+ "loss": 2.8726,
+ "step": 805
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.3739920241828815,
+ "learning_rate": 0.0007271606703584958,
+ "loss": 2.9266,
+ "step": 806
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.3822374695344719,
+ "learning_rate": 0.000726498800681274,
+ "loss": 2.9163,
+ "step": 807
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.4092270183899971,
+ "learning_rate": 0.0007258364312632279,
+ "loss": 2.9181,
+ "step": 808
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.40092844328854216,
+ "learning_rate": 0.0007251735635657915,
+ "loss": 2.9193,
+ "step": 809
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.4166045825038301,
+ "learning_rate": 0.000724510199051498,
+ "loss": 2.8708,
+ "step": 810
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.4251039393341536,
+ "learning_rate": 0.0007238463391839769,
+ "loss": 2.9666,
+ "step": 811
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.4060284198051544,
+ "learning_rate": 0.0007231819854279508,
+ "loss": 2.9654,
+ "step": 812
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.3970754430663875,
+ "learning_rate": 0.0007225171392492316,
+ "loss": 2.8559,
+ "step": 813
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.4396030232957301,
+ "learning_rate": 0.0007218518021147182,
+ "loss": 2.9552,
+ "step": 814
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.38256570912726096,
+ "learning_rate": 0.0007211859754923923,
+ "loss": 2.9348,
+ "step": 815
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.35727530326349516,
+ "learning_rate": 0.0007205196608513158,
+ "loss": 2.9613,
+ "step": 816
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.3843414414388155,
+ "learning_rate": 0.0007198528596616272,
+ "loss": 2.8036,
+ "step": 817
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.3963365544976552,
+ "learning_rate": 0.0007191855733945387,
+ "loss": 2.8857,
+ "step": 818
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.36531220947603005,
+ "learning_rate": 0.0007185178035223327,
+ "loss": 2.9171,
+ "step": 819
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.3410990499252709,
+ "learning_rate": 0.0007178495515183583,
+ "loss": 2.9469,
+ "step": 820
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.4240173900797112,
+ "learning_rate": 0.000717180818857029,
+ "loss": 2.9535,
+ "step": 821
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.4028067553422141,
+ "learning_rate": 0.0007165116070138182,
+ "loss": 3.0237,
+ "step": 822
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.38656369235077287,
+ "learning_rate": 0.0007158419174652569,
+ "loss": 2.9183,
+ "step": 823
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.36491809759258437,
+ "learning_rate": 0.00071517175168893,
+ "loss": 2.8593,
+ "step": 824
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.3925264375499225,
+ "learning_rate": 0.0007145011111634732,
+ "loss": 2.919,
+ "step": 825
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.4115380109849583,
+ "learning_rate": 0.0007138299973685694,
+ "loss": 3.0242,
+ "step": 826
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.40392460235613703,
+ "learning_rate": 0.0007131584117849459,
+ "loss": 2.9709,
+ "step": 827
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.3697115017368334,
+ "learning_rate": 0.0007124863558943713,
+ "loss": 2.8596,
+ "step": 828
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.3711056840304657,
+ "learning_rate": 0.0007118138311796514,
+ "loss": 2.8698,
+ "step": 829
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.3985708045617549,
+ "learning_rate": 0.0007111408391246262,
+ "loss": 3.0169,
+ "step": 830
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.3857791354797193,
+ "learning_rate": 0.0007104673812141675,
+ "loss": 2.9134,
+ "step": 831
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.3720506163395345,
+ "learning_rate": 0.0007097934589341745,
+ "loss": 2.9003,
+ "step": 832
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.4327745232584935,
+ "learning_rate": 0.0007091190737715711,
+ "loss": 2.8737,
+ "step": 833
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.37168188704034905,
+ "learning_rate": 0.0007084442272143026,
+ "loss": 2.9439,
+ "step": 834
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.37927954843330586,
+ "learning_rate": 0.000707768920751332,
+ "loss": 3.0149,
+ "step": 835
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.4007880246380382,
+ "learning_rate": 0.0007070931558726373,
+ "loss": 2.9574,
+ "step": 836
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.4014902880346036,
+ "learning_rate": 0.0007064169340692076,
+ "loss": 2.954,
+ "step": 837
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.3485236276175264,
+ "learning_rate": 0.0007057402568330407,
+ "loss": 2.9929,
+ "step": 838
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.3973059600323392,
+ "learning_rate": 0.0007050631256571389,
+ "loss": 2.9999,
+ "step": 839
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.3791438692719396,
+ "learning_rate": 0.000704385542035506,
+ "loss": 2.9018,
+ "step": 840
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.3921767183615086,
+ "learning_rate": 0.000703707507463144,
+ "loss": 2.98,
+ "step": 841
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.3773078314871746,
+ "learning_rate": 0.0007030290234360505,
+ "loss": 2.8878,
+ "step": 842
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.3957278596132006,
+ "learning_rate": 0.0007023500914512139,
+ "loss": 2.885,
+ "step": 843
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.40535634001520243,
+ "learning_rate": 0.0007016707130066116,
+ "loss": 2.9321,
+ "step": 844
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.37312228644549855,
+ "learning_rate": 0.0007009908896012055,
+ "loss": 2.9586,
+ "step": 845
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.3833052552168628,
+ "learning_rate": 0.0007003106227349399,
+ "loss": 2.9125,
+ "step": 846
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.3730197673595869,
+ "learning_rate": 0.000699629913908737,
+ "loss": 2.8948,
+ "step": 847
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.4154762355775169,
+ "learning_rate": 0.0006989487646244943,
+ "loss": 2.8563,
+ "step": 848
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.37521003924656016,
+ "learning_rate": 0.0006982671763850814,
+ "loss": 2.8883,
+ "step": 849
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.3596186911619955,
+ "learning_rate": 0.0006975851506943359,
+ "loss": 2.8794,
+ "step": 850
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.3764926711329026,
+ "learning_rate": 0.0006969026890570611,
+ "loss": 2.9739,
+ "step": 851
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.3734558074950923,
+ "learning_rate": 0.0006962197929790216,
+ "loss": 2.904,
+ "step": 852
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.34434419154221113,
+ "learning_rate": 0.0006955364639669409,
+ "loss": 2.9265,
+ "step": 853
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.35871619892045215,
+ "learning_rate": 0.0006948527035284978,
+ "loss": 2.9235,
+ "step": 854
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.3796001078576814,
+ "learning_rate": 0.0006941685131723225,
+ "loss": 2.918,
+ "step": 855
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.39200014893138657,
+ "learning_rate": 0.0006934838944079943,
+ "loss": 2.9586,
+ "step": 856
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.36188090593854577,
+ "learning_rate": 0.0006927988487460378,
+ "loss": 2.8315,
+ "step": 857
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.4045546191222693,
+ "learning_rate": 0.0006921133776979186,
+ "loss": 2.9228,
+ "step": 858
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.36023931468090276,
+ "learning_rate": 0.0006914274827760418,
+ "loss": 2.8813,
+ "step": 859
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.3630191952721033,
+ "learning_rate": 0.0006907411654937475,
+ "loss": 2.9995,
+ "step": 860
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.3563196720780499,
+ "learning_rate": 0.0006900544273653075,
+ "loss": 2.9292,
+ "step": 861
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.3366544920875894,
+ "learning_rate": 0.000689367269905922,
+ "loss": 2.9276,
+ "step": 862
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.36704474234823614,
+ "learning_rate": 0.0006886796946317168,
+ "loss": 2.9312,
+ "step": 863
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.3646739603293136,
+ "learning_rate": 0.0006879917030597397,
+ "loss": 2.9272,
+ "step": 864
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.38219428237099007,
+ "learning_rate": 0.0006873032967079561,
+ "loss": 2.9716,
+ "step": 865
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.3762064090717164,
+ "learning_rate": 0.0006866144770952474,
+ "loss": 2.8979,
+ "step": 866
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.37025708577514327,
+ "learning_rate": 0.0006859252457414067,
+ "loss": 2.9594,
+ "step": 867
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.37409400278170696,
+ "learning_rate": 0.0006852356041671351,
+ "loss": 2.9408,
+ "step": 868
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.3709509261386778,
+ "learning_rate": 0.0006845455538940394,
+ "loss": 2.9548,
+ "step": 869
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.3551271912328962,
+ "learning_rate": 0.0006838550964446276,
+ "loss": 2.8374,
+ "step": 870
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.3735359048719011,
+ "learning_rate": 0.0006831642333423067,
+ "loss": 2.9056,
+ "step": 871
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.36452265369342735,
+ "learning_rate": 0.000682472966111378,
+ "loss": 2.8651,
+ "step": 872
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.4067441557354666,
+ "learning_rate": 0.0006817812962770348,
+ "loss": 2.984,
+ "step": 873
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.4178553893276221,
+ "learning_rate": 0.0006810892253653589,
+ "loss": 2.891,
+ "step": 874
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.3729176873567436,
+ "learning_rate": 0.0006803967549033167,
+ "loss": 2.9533,
+ "step": 875
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.36567993361759743,
+ "learning_rate": 0.0006797038864187564,
+ "loss": 2.9406,
+ "step": 876
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.3966900082019893,
+ "learning_rate": 0.0006790106214404043,
+ "loss": 2.9829,
+ "step": 877
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.371411293269752,
+ "learning_rate": 0.0006783169614978614,
+ "loss": 2.9303,
+ "step": 878
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.39045936496338873,
+ "learning_rate": 0.0006776229081216001,
+ "loss": 2.9051,
+ "step": 879
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.3889221534518561,
+ "learning_rate": 0.0006769284628429611,
+ "loss": 2.9784,
+ "step": 880
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.3835581221227254,
+ "learning_rate": 0.0006762336271941498,
+ "loss": 2.9831,
+ "step": 881
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.4043671923597656,
+ "learning_rate": 0.0006755384027082326,
+ "loss": 2.8433,
+ "step": 882
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.41006010843196183,
+ "learning_rate": 0.0006748427909191342,
+ "loss": 2.9115,
+ "step": 883
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.4085941456754986,
+ "learning_rate": 0.0006741467933616335,
+ "loss": 2.9666,
+ "step": 884
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.35376132198564636,
+ "learning_rate": 0.0006734504115713604,
+ "loss": 2.8006,
+ "step": 885
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.36466974725064066,
+ "learning_rate": 0.0006727536470847932,
+ "loss": 2.8824,
+ "step": 886
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.34881152140232036,
+ "learning_rate": 0.000672056501439254,
+ "loss": 2.9315,
+ "step": 887
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.3578781550331035,
+ "learning_rate": 0.0006713589761729063,
+ "loss": 2.89,
+ "step": 888
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.42570738379991735,
+ "learning_rate": 0.0006706610728247508,
+ "loss": 2.8563,
+ "step": 889
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.34673302336294726,
+ "learning_rate": 0.0006699627929346227,
+ "loss": 2.895,
+ "step": 890
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.39088163469064047,
+ "learning_rate": 0.0006692641380431879,
+ "loss": 3.0072,
+ "step": 891
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.378818407765038,
+ "learning_rate": 0.0006685651096919393,
+ "loss": 2.8655,
+ "step": 892
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.3596838533477915,
+ "learning_rate": 0.0006678657094231944,
+ "loss": 2.9202,
+ "step": 893
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.49360419219769053,
+ "learning_rate": 0.0006671659387800909,
+ "loss": 2.9255,
+ "step": 894
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.3970986851486971,
+ "learning_rate": 0.000666465799306584,
+ "loss": 2.9144,
+ "step": 895
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.3980494980973069,
+ "learning_rate": 0.0006657652925474423,
+ "loss": 2.943,
+ "step": 896
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.4128414587039792,
+ "learning_rate": 0.000665064420048245,
+ "loss": 2.9228,
+ "step": 897
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.4176646142980371,
+ "learning_rate": 0.0006643631833553785,
+ "loss": 2.9406,
+ "step": 898
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.3771209874038338,
+ "learning_rate": 0.000663661584016032,
+ "loss": 2.9381,
+ "step": 899
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.3783353395964346,
+ "learning_rate": 0.0006629596235781957,
+ "loss": 2.924,
+ "step": 900
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.41183516299890327,
+ "learning_rate": 0.0006622573035906556,
+ "loss": 2.9857,
+ "step": 901
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.3952514280666571,
+ "learning_rate": 0.0006615546256029921,
+ "loss": 2.8099,
+ "step": 902
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.38046708912962407,
+ "learning_rate": 0.0006608515911655743,
+ "loss": 2.932,
+ "step": 903
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.3443707921265205,
+ "learning_rate": 0.0006601482018295591,
+ "loss": 2.9111,
+ "step": 904
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.3439385788134237,
+ "learning_rate": 0.0006594444591468851,
+ "loss": 2.9387,
+ "step": 905
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.3974999140609994,
+ "learning_rate": 0.0006587403646702713,
+ "loss": 2.9505,
+ "step": 906
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.3587690313294762,
+ "learning_rate": 0.0006580359199532126,
+ "loss": 2.896,
+ "step": 907
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.3839270915154719,
+ "learning_rate": 0.000657331126549977,
+ "loss": 2.86,
+ "step": 908
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.36288335206652844,
+ "learning_rate": 0.0006566259860156014,
+ "loss": 2.8312,
+ "step": 909
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.43518171664748695,
+ "learning_rate": 0.0006559204999058888,
+ "loss": 2.8962,
+ "step": 910
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.34532140561387625,
+ "learning_rate": 0.0006552146697774049,
+ "loss": 2.8777,
+ "step": 911
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.37261754481742704,
+ "learning_rate": 0.0006545084971874737,
+ "loss": 2.8486,
+ "step": 912
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.3919230959570363,
+ "learning_rate": 0.0006538019836941758,
+ "loss": 2.8955,
+ "step": 913
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.4234699141364421,
+ "learning_rate": 0.0006530951308563431,
+ "loss": 2.9468,
+ "step": 914
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.39496970494231204,
+ "learning_rate": 0.0006523879402335567,
+ "loss": 2.9315,
+ "step": 915
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.40052153884738834,
+ "learning_rate": 0.0006516804133861429,
+ "loss": 3.0455,
+ "step": 916
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.3941038027624462,
+ "learning_rate": 0.0006509725518751698,
+ "loss": 2.8514,
+ "step": 917
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.36836984212219037,
+ "learning_rate": 0.0006502643572624438,
+ "loss": 2.8633,
+ "step": 918
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.3416298917120285,
+ "learning_rate": 0.0006495558311105064,
+ "loss": 2.876,
+ "step": 919
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.35827478913328453,
+ "learning_rate": 0.0006488469749826305,
+ "loss": 2.9165,
+ "step": 920
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.3566278416506374,
+ "learning_rate": 0.000648137790442817,
+ "loss": 2.87,
+ "step": 921
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.36175257820259993,
+ "learning_rate": 0.0006474282790557916,
+ "loss": 2.9917,
+ "step": 922
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.38047256716495886,
+ "learning_rate": 0.000646718442387001,
+ "loss": 2.967,
+ "step": 923
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.38390708299192844,
+ "learning_rate": 0.0006460082820026094,
+ "loss": 2.9026,
+ "step": 924
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.3486793745385199,
+ "learning_rate": 0.0006452977994694959,
+ "loss": 2.947,
+ "step": 925
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.40709408324258994,
+ "learning_rate": 0.0006445869963552496,
+ "loss": 2.9484,
+ "step": 926
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.4204771928736857,
+ "learning_rate": 0.0006438758742281672,
+ "loss": 2.9578,
+ "step": 927
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.37322357722768207,
+ "learning_rate": 0.0006431644346572495,
+ "loss": 2.9297,
+ "step": 928
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.33511528057753515,
+ "learning_rate": 0.0006424526792121974,
+ "loss": 2.8631,
+ "step": 929
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.39306132279242384,
+ "learning_rate": 0.0006417406094634089,
+ "loss": 2.8743,
+ "step": 930
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.3868823542256048,
+ "learning_rate": 0.0006410282269819756,
+ "loss": 2.988,
+ "step": 931
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.4125424134005421,
+ "learning_rate": 0.0006403155333396787,
+ "loss": 2.9591,
+ "step": 932
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.36972041208784096,
+ "learning_rate": 0.0006396025301089863,
+ "loss": 2.9736,
+ "step": 933
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.3684781886634976,
+ "learning_rate": 0.0006388892188630493,
+ "loss": 2.9927,
+ "step": 934
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.3757447405278929,
+ "learning_rate": 0.0006381756011756982,
+ "loss": 2.9964,
+ "step": 935
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.4609886018677078,
+ "learning_rate": 0.0006374616786214403,
+ "loss": 2.9025,
+ "step": 936
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.37849533711046196,
+ "learning_rate": 0.0006367474527754544,
+ "loss": 2.925,
+ "step": 937
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.36750896525777965,
+ "learning_rate": 0.0006360329252135894,
+ "loss": 2.8841,
+ "step": 938
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.37133025854604923,
+ "learning_rate": 0.0006353180975123595,
+ "loss": 2.9342,
+ "step": 939
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.34727790053730084,
+ "learning_rate": 0.0006346029712489413,
+ "loss": 2.9284,
+ "step": 940
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.37838360907321306,
+ "learning_rate": 0.0006338875480011698,
+ "loss": 2.9437,
+ "step": 941
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.364494347025872,
+ "learning_rate": 0.0006331718293475357,
+ "loss": 2.8689,
+ "step": 942
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.34251547866347043,
+ "learning_rate": 0.0006324558168671811,
+ "loss": 3.0432,
+ "step": 943
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.364587472085189,
+ "learning_rate": 0.0006317395121398968,
+ "loss": 2.9739,
+ "step": 944
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.3843043244698266,
+ "learning_rate": 0.0006310229167461179,
+ "loss": 2.8975,
+ "step": 945
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.4115720193387321,
+ "learning_rate": 0.0006303060322669214,
+ "loss": 2.9048,
+ "step": 946
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.39577246089357326,
+ "learning_rate": 0.0006295888602840214,
+ "loss": 2.8083,
+ "step": 947
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.38295910638552877,
+ "learning_rate": 0.0006288714023797671,
+ "loss": 2.912,
+ "step": 948
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.3618177419224692,
+ "learning_rate": 0.000628153660137138,
+ "loss": 2.9012,
+ "step": 949
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.3645498477599135,
+ "learning_rate": 0.0006274356351397413,
+ "loss": 2.8838,
+ "step": 950
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.3452285826423024,
+ "learning_rate": 0.0006267173289718079,
+ "loss": 2.8527,
+ "step": 951
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.3509424301958051,
+ "learning_rate": 0.000625998743218189,
+ "loss": 2.9469,
+ "step": 952
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.3547497938631686,
+ "learning_rate": 0.000625279879464353,
+ "loss": 2.8532,
+ "step": 953
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.3925136728542854,
+ "learning_rate": 0.000624560739296381,
+ "loss": 2.9544,
+ "step": 954
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.35260453315488643,
+ "learning_rate": 0.0006238413243009648,
+ "loss": 2.8466,
+ "step": 955
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.37697677498738713,
+ "learning_rate": 0.000623121636065402,
+ "loss": 2.8691,
+ "step": 956
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.3753244798614963,
+ "learning_rate": 0.0006224016761775933,
+ "loss": 2.9145,
+ "step": 957
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.4035962211502982,
+ "learning_rate": 0.0006216814462260386,
+ "loss": 2.9384,
+ "step": 958
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.37192550119552326,
+ "learning_rate": 0.0006209609477998338,
+ "loss": 2.9376,
+ "step": 959
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.36672734439689136,
+ "learning_rate": 0.0006202401824886674,
+ "loss": 2.9194,
+ "step": 960
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.40793531975291203,
+ "learning_rate": 0.0006195191518828162,
+ "loss": 2.9214,
+ "step": 961
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.3783963177240007,
+ "learning_rate": 0.0006187978575731427,
+ "loss": 2.9291,
+ "step": 962
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.3411398259417625,
+ "learning_rate": 0.0006180763011510911,
+ "loss": 2.9165,
+ "step": 963
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.37383748960387536,
+ "learning_rate": 0.000617354484208684,
+ "loss": 2.8408,
+ "step": 964
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.3650749339308918,
+ "learning_rate": 0.0006166324083385189,
+ "loss": 2.936,
+ "step": 965
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.33263989069699923,
+ "learning_rate": 0.0006159100751337642,
+ "loss": 2.8197,
+ "step": 966
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.4358012981230188,
+ "learning_rate": 0.0006151874861881565,
+ "loss": 2.8867,
+ "step": 967
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.36358236826021756,
+ "learning_rate": 0.0006144646430959964,
+ "loss": 2.9413,
+ "step": 968
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.3807432204073136,
+ "learning_rate": 0.0006137415474521454,
+ "loss": 2.937,
+ "step": 969
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.4184026301641971,
+ "learning_rate": 0.0006130182008520222,
+ "loss": 2.9123,
+ "step": 970
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.3736954217046527,
+ "learning_rate": 0.000612294604891599,
+ "loss": 2.9185,
+ "step": 971
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.3919157182154508,
+ "learning_rate": 0.0006115707611673986,
+ "loss": 2.9478,
+ "step": 972
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.3968850483014488,
+ "learning_rate": 0.0006108466712764902,
+ "loss": 2.8914,
+ "step": 973
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.38005570346956646,
+ "learning_rate": 0.0006101223368164858,
+ "loss": 2.9482,
+ "step": 974
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.4136585965619559,
+ "learning_rate": 0.0006093977593855375,
+ "loss": 2.9132,
+ "step": 975
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.44336205352486646,
+ "learning_rate": 0.0006086729405823335,
+ "loss": 2.9232,
+ "step": 976
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.38395414638324976,
+ "learning_rate": 0.0006079478820060943,
+ "loss": 2.922,
+ "step": 977
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.38794855664301814,
+ "learning_rate": 0.0006072225852565695,
+ "loss": 2.8724,
+ "step": 978
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.35505385508128123,
+ "learning_rate": 0.0006064970519340341,
+ "loss": 2.9158,
+ "step": 979
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.39174967982638775,
+ "learning_rate": 0.0006057712836392856,
+ "loss": 2.8997,
+ "step": 980
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.38629380148000786,
+ "learning_rate": 0.0006050452819736389,
+ "loss": 2.9137,
+ "step": 981
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.40248895420431763,
+ "learning_rate": 0.000604319048538925,
+ "loss": 2.8497,
+ "step": 982
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.431078008223497,
+ "learning_rate": 0.0006035925849374855,
+ "loss": 2.9354,
+ "step": 983
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.3944503317530124,
+ "learning_rate": 0.0006028658927721697,
+ "loss": 2.947,
+ "step": 984
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.38804299660043384,
+ "learning_rate": 0.0006021389736463321,
+ "loss": 2.9314,
+ "step": 985
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.4163965042930912,
+ "learning_rate": 0.0006014118291638271,
+ "loss": 2.8353,
+ "step": 986
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.41521316323279395,
+ "learning_rate": 0.0006006844609290065,
+ "loss": 3.0005,
+ "step": 987
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.39657589111386665,
+ "learning_rate": 0.0005999568705467161,
+ "loss": 3.0282,
+ "step": 988
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.3706583962314621,
+ "learning_rate": 0.0005992290596222915,
+ "loss": 2.8694,
+ "step": 989
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.39824232982031466,
+ "learning_rate": 0.0005985010297615551,
+ "loss": 2.9443,
+ "step": 990
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.37853952705037447,
+ "learning_rate": 0.0005977727825708123,
+ "loss": 2.9477,
+ "step": 991
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.42581460839445323,
+ "learning_rate": 0.0005970443196568478,
+ "loss": 2.8755,
+ "step": 992
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.3819853738028648,
+ "learning_rate": 0.0005963156426269227,
+ "loss": 2.8382,
+ "step": 993
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.38515531113955787,
+ "learning_rate": 0.0005955867530887702,
+ "loss": 2.9202,
+ "step": 994
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.37489785135563064,
+ "learning_rate": 0.0005948576526505923,
+ "loss": 2.8807,
+ "step": 995
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.38565433205651506,
+ "learning_rate": 0.0005941283429210568,
+ "loss": 2.8849,
+ "step": 996
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.3812519285435577,
+ "learning_rate": 0.0005933988255092926,
+ "loss": 2.949,
+ "step": 997
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.3644703400135544,
+ "learning_rate": 0.0005926691020248874,
+ "loss": 2.8561,
+ "step": 998
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.4185768782286003,
+ "learning_rate": 0.0005919391740778833,
+ "loss": 2.9214,
+ "step": 999
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.3818423535202459,
+ "learning_rate": 0.0005912090432787736,
+ "loss": 2.9226,
+ "step": 1000
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.38018027605669547,
+ "learning_rate": 0.000590478711238499,
+ "loss": 2.8237,
+ "step": 1001
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.38853278232916244,
+ "learning_rate": 0.0005897481795684446,
+ "loss": 2.903,
+ "step": 1002
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.36012599864824646,
+ "learning_rate": 0.0005890174498804355,
+ "loss": 2.8763,
+ "step": 1003
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.4144782851671913,
+ "learning_rate": 0.0005882865237867339,
+ "loss": 2.932,
+ "step": 1004
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.411671223153121,
+ "learning_rate": 0.0005875554029000353,
+ "loss": 2.891,
+ "step": 1005
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.4114799986662115,
+ "learning_rate": 0.0005868240888334653,
+ "loss": 2.8002,
+ "step": 1006
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.36230295980464405,
+ "learning_rate": 0.0005860925832005753,
+ "loss": 2.8496,
+ "step": 1007
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.40053081780283,
+ "learning_rate": 0.0005853608876153395,
+ "loss": 2.9829,
+ "step": 1008
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.3733233829158717,
+ "learning_rate": 0.0005846290036921512,
+ "loss": 2.9174,
+ "step": 1009
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.39136985889485293,
+ "learning_rate": 0.0005838969330458195,
+ "loss": 2.8152,
+ "step": 1010
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.41980223881697165,
+ "learning_rate": 0.0005831646772915651,
+ "loss": 2.9502,
+ "step": 1011
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.3660023043909497,
+ "learning_rate": 0.0005824322380450173,
+ "loss": 2.8269,
+ "step": 1012
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.36275443767586885,
+ "learning_rate": 0.0005816996169222102,
+ "loss": 2.9735,
+ "step": 1013
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.38844212767471664,
+ "learning_rate": 0.0005809668155395793,
+ "loss": 2.8572,
+ "step": 1014
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.37372299334165904,
+ "learning_rate": 0.0005802338355139578,
+ "loss": 2.8786,
+ "step": 1015
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.3945819727264801,
+ "learning_rate": 0.0005795006784625728,
+ "loss": 2.8826,
+ "step": 1016
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.35702214470868304,
+ "learning_rate": 0.0005787673460030423,
+ "loss": 2.9448,
+ "step": 1017
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.36216962292085586,
+ "learning_rate": 0.000578033839753371,
+ "loss": 2.8308,
+ "step": 1018
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.37845059694477257,
+ "learning_rate": 0.0005773001613319476,
+ "loss": 2.8567,
+ "step": 1019
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.3707699108528708,
+ "learning_rate": 0.00057656631235754,
+ "loss": 2.8503,
+ "step": 1020
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.40209933440460177,
+ "learning_rate": 0.0005758322944492929,
+ "loss": 2.8596,
+ "step": 1021
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.3680508734491853,
+ "learning_rate": 0.0005750981092267237,
+ "loss": 2.997,
+ "step": 1022
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.3961539830895662,
+ "learning_rate": 0.0005743637583097183,
+ "loss": 2.8592,
+ "step": 1023
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.3500828646334298,
+ "learning_rate": 0.0005736292433185291,
+ "loss": 2.9122,
+ "step": 1024
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.36702705957556325,
+ "learning_rate": 0.0005728945658737699,
+ "loss": 2.9778,
+ "step": 1025
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.4131318836088528,
+ "learning_rate": 0.0005721597275964133,
+ "loss": 2.876,
+ "step": 1026
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.42931732666572225,
+ "learning_rate": 0.0005714247301077865,
+ "loss": 2.8732,
+ "step": 1027
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.389671088511301,
+ "learning_rate": 0.0005706895750295682,
+ "loss": 2.878,
+ "step": 1028
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.39711416621577034,
+ "learning_rate": 0.0005699542639837844,
+ "loss": 2.8982,
+ "step": 1029
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.4271983141068169,
+ "learning_rate": 0.0005692187985928055,
+ "loss": 2.888,
+ "step": 1030
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.426750436941153,
+ "learning_rate": 0.0005684831804793427,
+ "loss": 2.903,
+ "step": 1031
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.4628207442192803,
+ "learning_rate": 0.0005677474112664438,
+ "loss": 2.9667,
+ "step": 1032
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.4160028608402869,
+ "learning_rate": 0.0005670114925774899,
+ "loss": 2.8516,
+ "step": 1033
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.4061757879462875,
+ "learning_rate": 0.0005662754260361924,
+ "loss": 2.976,
+ "step": 1034
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.3515168770105341,
+ "learning_rate": 0.0005655392132665884,
+ "loss": 2.8902,
+ "step": 1035
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.37854224446637447,
+ "learning_rate": 0.000564802855893038,
+ "loss": 2.9027,
+ "step": 1036
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.40302704021247393,
+ "learning_rate": 0.0005640663555402198,
+ "loss": 2.8703,
+ "step": 1037
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.4390167685902175,
+ "learning_rate": 0.0005633297138331285,
+ "loss": 2.8302,
+ "step": 1038
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.37814723412937457,
+ "learning_rate": 0.0005625929323970705,
+ "loss": 2.8603,
+ "step": 1039
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.4456190183491553,
+ "learning_rate": 0.0005618560128576603,
+ "loss": 2.8885,
+ "step": 1040
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.3616937093941743,
+ "learning_rate": 0.0005611189568408173,
+ "loss": 2.8674,
+ "step": 1041
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.42868491402149095,
+ "learning_rate": 0.0005603817659727619,
+ "loss": 2.8978,
+ "step": 1042
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.4688031696672413,
+ "learning_rate": 0.0005596444418800121,
+ "loss": 2.9352,
+ "step": 1043
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.3837674126065026,
+ "learning_rate": 0.0005589069861893798,
+ "loss": 2.8324,
+ "step": 1044
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.40930621094674635,
+ "learning_rate": 0.0005581694005279673,
+ "loss": 2.8973,
+ "step": 1045
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.41665660247935443,
+ "learning_rate": 0.0005574316865231637,
+ "loss": 2.9959,
+ "step": 1046
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.46275655351164435,
+ "learning_rate": 0.0005566938458026411,
+ "loss": 2.9139,
+ "step": 1047
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.4248438458783021,
+ "learning_rate": 0.0005559558799943514,
+ "loss": 2.9248,
+ "step": 1048
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.4309105443823323,
+ "learning_rate": 0.0005552177907265223,
+ "loss": 2.937,
+ "step": 1049
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.42258095906341886,
+ "learning_rate": 0.000554479579627654,
+ "loss": 2.8673,
+ "step": 1050
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.3771337811816016,
+ "learning_rate": 0.0005537412483265157,
+ "loss": 2.9282,
+ "step": 1051
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.40636033015854744,
+ "learning_rate": 0.0005530027984521413,
+ "loss": 2.9575,
+ "step": 1052
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.460869495207544,
+ "learning_rate": 0.0005522642316338268,
+ "loss": 2.9539,
+ "step": 1053
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.38260112785978917,
+ "learning_rate": 0.0005515255495011259,
+ "loss": 2.8479,
+ "step": 1054
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.40180844821852474,
+ "learning_rate": 0.0005507867536838472,
+ "loss": 2.8835,
+ "step": 1055
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.3850691833180964,
+ "learning_rate": 0.0005500478458120492,
+ "loss": 2.9389,
+ "step": 1056
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.3886854416340128,
+ "learning_rate": 0.0005493088275160387,
+ "loss": 2.978,
+ "step": 1057
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.3732901505380103,
+ "learning_rate": 0.0005485697004263657,
+ "loss": 2.906,
+ "step": 1058
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.3865193420865317,
+ "learning_rate": 0.0005478304661738199,
+ "loss": 2.9456,
+ "step": 1059
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.45901313207205163,
+ "learning_rate": 0.0005470911263894279,
+ "loss": 2.8022,
+ "step": 1060
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.4045426946955099,
+ "learning_rate": 0.0005463516827044491,
+ "loss": 2.8156,
+ "step": 1061
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.3741117087020425,
+ "learning_rate": 0.000545612136750372,
+ "loss": 2.9438,
+ "step": 1062
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.3893792406678757,
+ "learning_rate": 0.0005448724901589107,
+ "loss": 2.9244,
+ "step": 1063
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.4498775799610652,
+ "learning_rate": 0.0005441327445620014,
+ "loss": 2.8905,
+ "step": 1064
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.38424805052914174,
+ "learning_rate": 0.0005433929015917988,
+ "loss": 2.8831,
+ "step": 1065
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.3423281281825885,
+ "learning_rate": 0.0005426529628806724,
+ "loss": 2.8234,
+ "step": 1066
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.44294983010537076,
+ "learning_rate": 0.0005419129300612029,
+ "loss": 2.8617,
+ "step": 1067
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.40736397111906714,
+ "learning_rate": 0.000541172804766179,
+ "loss": 2.926,
+ "step": 1068
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.4297429856325485,
+ "learning_rate": 0.0005404325886285927,
+ "loss": 2.8355,
+ "step": 1069
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.3828131654008914,
+ "learning_rate": 0.000539692283281637,
+ "loss": 2.9441,
+ "step": 1070
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.3534263150318813,
+ "learning_rate": 0.0005389518903587017,
+ "loss": 2.9181,
+ "step": 1071
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.39794930874203327,
+ "learning_rate": 0.0005382114114933695,
+ "loss": 2.8496,
+ "step": 1072
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.3987504215180094,
+ "learning_rate": 0.0005374708483194132,
+ "loss": 2.8291,
+ "step": 1073
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.379060533747186,
+ "learning_rate": 0.000536730202470791,
+ "loss": 2.8782,
+ "step": 1074
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.3699497319858817,
+ "learning_rate": 0.0005359894755816443,
+ "loss": 2.8874,
+ "step": 1075
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.4606638710942983,
+ "learning_rate": 0.0005352486692862926,
+ "loss": 2.8768,
+ "step": 1076
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.4182201469839071,
+ "learning_rate": 0.0005345077852192307,
+ "loss": 2.9011,
+ "step": 1077
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.41974912159584804,
+ "learning_rate": 0.0005337668250151254,
+ "loss": 2.8551,
+ "step": 1078
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.400096687905397,
+ "learning_rate": 0.0005330257903088111,
+ "loss": 2.7974,
+ "step": 1079
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.37671803803018905,
+ "learning_rate": 0.000532284682735287,
+ "loss": 2.9114,
+ "step": 1080
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.4187263951058271,
+ "learning_rate": 0.0005315435039297124,
+ "loss": 2.8507,
+ "step": 1081
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.37489667292912565,
+ "learning_rate": 0.0005308022555274046,
+ "loss": 2.8884,
+ "step": 1082
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.3935064516667747,
+ "learning_rate": 0.0005300609391638336,
+ "loss": 2.9711,
+ "step": 1083
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.3580046482622386,
+ "learning_rate": 0.0005293195564746201,
+ "loss": 2.879,
+ "step": 1084
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.36245170909609103,
+ "learning_rate": 0.0005285781090955304,
+ "loss": 2.9017,
+ "step": 1085
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.33305593566585034,
+ "learning_rate": 0.0005278365986624743,
+ "loss": 2.8423,
+ "step": 1086
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.3955848002266755,
+ "learning_rate": 0.0005270950268115001,
+ "loss": 2.9528,
+ "step": 1087
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.3744551046486652,
+ "learning_rate": 0.0005263533951787919,
+ "loss": 2.8752,
+ "step": 1088
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.38743571319400644,
+ "learning_rate": 0.000525611705400666,
+ "loss": 2.9431,
+ "step": 1089
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.3865803236914739,
+ "learning_rate": 0.0005248699591135664,
+ "loss": 2.9564,
+ "step": 1090
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.38387999465186234,
+ "learning_rate": 0.0005241281579540618,
+ "loss": 2.8469,
+ "step": 1091
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.4411278290061962,
+ "learning_rate": 0.0005233863035588427,
+ "loss": 2.9538,
+ "step": 1092
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.38992205100402105,
+ "learning_rate": 0.0005226443975647161,
+ "loss": 2.9438,
+ "step": 1093
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.41572550414653225,
+ "learning_rate": 0.0005219024416086036,
+ "loss": 2.8715,
+ "step": 1094
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.4007360674562819,
+ "learning_rate": 0.0005211604373275366,
+ "loss": 2.9455,
+ "step": 1095
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.36129838212535387,
+ "learning_rate": 0.0005204183863586533,
+ "loss": 2.9261,
+ "step": 1096
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.388508715608231,
+ "learning_rate": 0.0005196762903391951,
+ "loss": 2.9196,
+ "step": 1097
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.4409949184634728,
+ "learning_rate": 0.0005189341509065023,
+ "loss": 2.8521,
+ "step": 1098
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.3853393825953106,
+ "learning_rate": 0.0005181919696980112,
+ "loss": 2.8761,
+ "step": 1099
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.36376631566520995,
+ "learning_rate": 0.0005174497483512506,
+ "loss": 2.8533,
+ "step": 1100
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.36877022306860263,
+ "learning_rate": 0.0005167074885038374,
+ "loss": 2.8557,
+ "step": 1101
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.4151164469032429,
+ "learning_rate": 0.0005159651917934735,
+ "loss": 2.8098,
+ "step": 1102
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.43852808921450015,
+ "learning_rate": 0.0005152228598579428,
+ "loss": 2.8972,
+ "step": 1103
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.3895649665579918,
+ "learning_rate": 0.000514480494335106,
+ "loss": 2.8373,
+ "step": 1104
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.3856895192019478,
+ "learning_rate": 0.0005137380968628983,
+ "loss": 2.9505,
+ "step": 1105
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.42101043587231474,
+ "learning_rate": 0.0005129956690793255,
+ "loss": 2.8832,
+ "step": 1106
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.4620324997651997,
+ "learning_rate": 0.0005122532126224601,
+ "loss": 2.8509,
+ "step": 1107
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.39070557415224283,
+ "learning_rate": 0.0005115107291304378,
+ "loss": 2.847,
+ "step": 1108
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.38482915717153543,
+ "learning_rate": 0.0005107682202414544,
+ "loss": 2.8848,
+ "step": 1109
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.41856399292905094,
+ "learning_rate": 0.0005100256875937613,
+ "loss": 2.9004,
+ "step": 1110
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.38886069184472255,
+ "learning_rate": 0.0005092831328256625,
+ "loss": 2.9122,
+ "step": 1111
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.4404149985758266,
+ "learning_rate": 0.0005085405575755105,
+ "loss": 2.9421,
+ "step": 1112
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.36087861886365685,
+ "learning_rate": 0.0005077979634817034,
+ "loss": 2.9058,
+ "step": 1113
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.3963009469518461,
+ "learning_rate": 0.0005070553521826808,
+ "loss": 2.9403,
+ "step": 1114
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.40703747217683733,
+ "learning_rate": 0.00050631272531692,
+ "loss": 2.8371,
+ "step": 1115
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.41511438632901515,
+ "learning_rate": 0.0005055700845229327,
+ "loss": 2.934,
+ "step": 1116
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.4060123165890798,
+ "learning_rate": 0.000504827431439262,
+ "loss": 2.907,
+ "step": 1117
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.416146882509125,
+ "learning_rate": 0.000504084767704477,
+ "loss": 2.8399,
+ "step": 1118
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.48154359711623634,
+ "learning_rate": 0.0005033420949571712,
+ "loss": 2.9848,
+ "step": 1119
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.43126855819115867,
+ "learning_rate": 0.0005025994148359574,
+ "loss": 2.7974,
+ "step": 1120
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.4093703029962342,
+ "learning_rate": 0.0005018567289794651,
+ "loss": 2.8336,
+ "step": 1121
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.37504777525996863,
+ "learning_rate": 0.0005011140390263362,
+ "loss": 2.8996,
+ "step": 1122
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.41854595127174715,
+ "learning_rate": 0.0005003713466152218,
+ "loss": 2.8083,
+ "step": 1123
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.34869858388239916,
+ "learning_rate": 0.0004996286533847783,
+ "loss": 2.8556,
+ "step": 1124
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.3591909963585215,
+ "learning_rate": 0.000498885960973664,
+ "loss": 2.8456,
+ "step": 1125
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.3719485619632497,
+ "learning_rate": 0.000498143271020535,
+ "loss": 2.9032,
+ "step": 1126
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.3859262528634551,
+ "learning_rate": 0.0004974005851640428,
+ "loss": 2.9277,
+ "step": 1127
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.371641216650933,
+ "learning_rate": 0.000496657905042829,
+ "loss": 2.9696,
+ "step": 1128
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.41250329201178965,
+ "learning_rate": 0.0004959152322955232,
+ "loss": 2.9439,
+ "step": 1129
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.43270996883069135,
+ "learning_rate": 0.0004951725685607382,
+ "loss": 2.8226,
+ "step": 1130
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.35885595004052423,
+ "learning_rate": 0.0004944299154770673,
+ "loss": 2.7837,
+ "step": 1131
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.4108245675861117,
+ "learning_rate": 0.0004936872746830802,
+ "loss": 2.8543,
+ "step": 1132
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.4216056573155104,
+ "learning_rate": 0.0004929446478173195,
+ "loss": 2.829,
+ "step": 1133
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.39140333710511443,
+ "learning_rate": 0.0004922020365182968,
+ "loss": 2.9034,
+ "step": 1134
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.4154951239840649,
+ "learning_rate": 0.0004914594424244897,
+ "loss": 2.8889,
+ "step": 1135
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.419137662341494,
+ "learning_rate": 0.0004907168671743376,
+ "loss": 2.8747,
+ "step": 1136
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.37143460281050755,
+ "learning_rate": 0.0004899743124062387,
+ "loss": 2.9571,
+ "step": 1137
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.4058358193496128,
+ "learning_rate": 0.0004892317797585456,
+ "loss": 2.898,
+ "step": 1138
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.42397779563801347,
+ "learning_rate": 0.0004884892708695623,
+ "loss": 2.8464,
+ "step": 1139
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.41089968038313734,
+ "learning_rate": 0.0004877467873775402,
+ "loss": 2.85,
+ "step": 1140
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.4137296894606441,
+ "learning_rate": 0.00048700433092067473,
+ "loss": 2.8786,
+ "step": 1141
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.43084525889335645,
+ "learning_rate": 0.0004862619031371019,
+ "loss": 2.9459,
+ "step": 1142
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.39377000543453844,
+ "learning_rate": 0.0004855195056648942,
+ "loss": 2.894,
+ "step": 1143
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.40649747560754007,
+ "learning_rate": 0.00048477714014205734,
+ "loss": 2.8298,
+ "step": 1144
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.40564832985169447,
+ "learning_rate": 0.00048403480820652644,
+ "loss": 2.8606,
+ "step": 1145
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.3869324256610511,
+ "learning_rate": 0.0004832925114961629,
+ "loss": 2.9276,
+ "step": 1146
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.40018482454424825,
+ "learning_rate": 0.0004825502516487497,
+ "loss": 2.7854,
+ "step": 1147
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.3681770390680882,
+ "learning_rate": 0.00048180803030198896,
+ "loss": 2.9494,
+ "step": 1148
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.4091741057501164,
+ "learning_rate": 0.0004810658490934979,
+ "loss": 2.9523,
+ "step": 1149
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.3881333886412877,
+ "learning_rate": 0.000480323709660805,
+ "loss": 2.8278,
+ "step": 1150
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.3623986444493903,
+ "learning_rate": 0.0004795816136413467,
+ "loss": 2.8864,
+ "step": 1151
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.3832137297909502,
+ "learning_rate": 0.00047883956267246353,
+ "loss": 2.837,
+ "step": 1152
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.36372243409129634,
+ "learning_rate": 0.00047809755839139657,
+ "loss": 2.8101,
+ "step": 1153
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.38359440118095023,
+ "learning_rate": 0.0004773556024352841,
+ "loss": 2.925,
+ "step": 1154
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.4127139454884936,
+ "learning_rate": 0.00047661369644115754,
+ "loss": 2.9798,
+ "step": 1155
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.41553685212294883,
+ "learning_rate": 0.0004758718420459383,
+ "loss": 2.8386,
+ "step": 1156
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.3721245901859144,
+ "learning_rate": 0.0004751300408864339,
+ "loss": 2.9505,
+ "step": 1157
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.3866810138655728,
+ "learning_rate": 0.00047438829459933414,
+ "loss": 2.8576,
+ "step": 1158
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.4104936377567144,
+ "learning_rate": 0.0004736466048212082,
+ "loss": 2.8862,
+ "step": 1159
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.4079077952556695,
+ "learning_rate": 0.0004729049731885002,
+ "loss": 2.8638,
+ "step": 1160
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.4276568526107006,
+ "learning_rate": 0.000472163401337526,
+ "loss": 2.7613,
+ "step": 1161
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.4149806073340441,
+ "learning_rate": 0.00047142189090446985,
+ "loss": 2.8906,
+ "step": 1162
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.36600931172272105,
+ "learning_rate": 0.0004706804435253802,
+ "loss": 2.7529,
+ "step": 1163
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.5153788709308498,
+ "learning_rate": 0.0004699390608361665,
+ "loss": 2.8861,
+ "step": 1164
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.43055577238508813,
+ "learning_rate": 0.0004691977444725955,
+ "loss": 2.8205,
+ "step": 1165
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.380814151130713,
+ "learning_rate": 0.0004684564960702877,
+ "loss": 2.9209,
+ "step": 1166
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.4459947429359946,
+ "learning_rate": 0.0004677153172647131,
+ "loss": 2.895,
+ "step": 1167
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.4238971459190582,
+ "learning_rate": 0.00046697420969118894,
+ "loss": 2.8461,
+ "step": 1168
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.41030478090668826,
+ "learning_rate": 0.00046623317498487466,
+ "loss": 2.8013,
+ "step": 1169
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.3923346167883022,
+ "learning_rate": 0.0004654922147807694,
+ "loss": 2.8495,
+ "step": 1170
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.4212759857757913,
+ "learning_rate": 0.00046475133071370757,
+ "loss": 2.7816,
+ "step": 1171
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.4069095092091079,
+ "learning_rate": 0.00046401052441835574,
+ "loss": 2.9129,
+ "step": 1172
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.39443953718849933,
+ "learning_rate": 0.000463269797529209,
+ "loss": 2.8277,
+ "step": 1173
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.36163519774313646,
+ "learning_rate": 0.00046252915168058697,
+ "loss": 2.8383,
+ "step": 1174
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.3728628396234798,
+ "learning_rate": 0.0004617885885066305,
+ "loss": 2.8827,
+ "step": 1175
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.43603424666081314,
+ "learning_rate": 0.0004610481096412984,
+ "loss": 2.9089,
+ "step": 1176
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.40492229736228313,
+ "learning_rate": 0.000460307716718363,
+ "loss": 2.9022,
+ "step": 1177
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.42860256500215205,
+ "learning_rate": 0.0004595674113714074,
+ "loss": 2.8703,
+ "step": 1178
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.4168242256825474,
+ "learning_rate": 0.0004588271952338212,
+ "loss": 2.8098,
+ "step": 1179
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.4646122986014842,
+ "learning_rate": 0.00045808706993879714,
+ "loss": 2.8154,
+ "step": 1180
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.4195464771120652,
+ "learning_rate": 0.00045734703711932767,
+ "loss": 2.8856,
+ "step": 1181
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.44070515224102186,
+ "learning_rate": 0.0004566070984082013,
+ "loss": 2.9588,
+ "step": 1182
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.39086899269301945,
+ "learning_rate": 0.00045586725543799865,
+ "loss": 2.9289,
+ "step": 1183
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.4273573920513972,
+ "learning_rate": 0.00045512750984108937,
+ "loss": 2.9445,
+ "step": 1184
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.398509531837817,
+ "learning_rate": 0.000454387863249628,
+ "loss": 2.9052,
+ "step": 1185
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.3973653069282452,
+ "learning_rate": 0.00045364831729555096,
+ "loss": 2.8822,
+ "step": 1186
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.4485964441652958,
+ "learning_rate": 0.0004529088736105721,
+ "loss": 2.9346,
+ "step": 1187
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.3678393875813182,
+ "learning_rate": 0.0004521695338261802,
+ "loss": 2.8932,
+ "step": 1188
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.4397017710643942,
+ "learning_rate": 0.0004514302995736344,
+ "loss": 2.8708,
+ "step": 1189
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.41789305572656177,
+ "learning_rate": 0.0004506911724839613,
+ "loss": 2.893,
+ "step": 1190
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.3654122204678373,
+ "learning_rate": 0.0004499521541879508,
+ "loss": 2.8892,
+ "step": 1191
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.38247464524920555,
+ "learning_rate": 0.00044921324631615303,
+ "loss": 2.8602,
+ "step": 1192
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.4480269844664441,
+ "learning_rate": 0.0004484744504988742,
+ "loss": 2.8985,
+ "step": 1193
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.37353822805818315,
+ "learning_rate": 0.00044773576836617336,
+ "loss": 2.8773,
+ "step": 1194
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.3813154018854077,
+ "learning_rate": 0.0004469972015478588,
+ "loss": 2.8679,
+ "step": 1195
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.48797383749248235,
+ "learning_rate": 0.0004462587516734844,
+ "loss": 2.8244,
+ "step": 1196
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.3689416653214051,
+ "learning_rate": 0.00044552042037234596,
+ "loss": 2.8975,
+ "step": 1197
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.418665441310574,
+ "learning_rate": 0.00044478220927347774,
+ "loss": 2.9374,
+ "step": 1198
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.40353400676772533,
+ "learning_rate": 0.00044404412000564875,
+ "loss": 2.8206,
+ "step": 1199
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.41611719360719146,
+ "learning_rate": 0.000443306154197359,
+ "loss": 2.9053,
+ "step": 1200
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.39164000219053585,
+ "learning_rate": 0.00044256831347683646,
+ "loss": 2.9513,
+ "step": 1201
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.4844440129961595,
+ "learning_rate": 0.0004418305994720328,
+ "loss": 2.9116,
+ "step": 1202
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.4835485880228465,
+ "learning_rate": 0.0004410930138106203,
+ "loss": 2.8234,
+ "step": 1203
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.3895251478960923,
+ "learning_rate": 0.000440355558119988,
+ "loss": 2.9059,
+ "step": 1204
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.4608145139066173,
+ "learning_rate": 0.00043961823402723814,
+ "loss": 2.8198,
+ "step": 1205
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.554936418993156,
+ "learning_rate": 0.0004388810431591829,
+ "loss": 2.8904,
+ "step": 1206
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.39180759199289183,
+ "learning_rate": 0.0004381439871423398,
+ "loss": 2.891,
+ "step": 1207
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.38833387096894223,
+ "learning_rate": 0.00043740706760292966,
+ "loss": 2.9011,
+ "step": 1208
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.42022430073427636,
+ "learning_rate": 0.0004366702861668716,
+ "loss": 2.8597,
+ "step": 1209
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.4536172590887174,
+ "learning_rate": 0.00043593364445978036,
+ "loss": 2.8599,
+ "step": 1210
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.42380521319301256,
+ "learning_rate": 0.0004351971441069622,
+ "loss": 2.8775,
+ "step": 1211
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.38192281497268593,
+ "learning_rate": 0.0004344607867334116,
+ "loss": 2.8862,
+ "step": 1212
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.4362349783538025,
+ "learning_rate": 0.00043372457396380766,
+ "loss": 2.8983,
+ "step": 1213
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.5038201977741879,
+ "learning_rate": 0.00043298850742251013,
+ "loss": 2.9159,
+ "step": 1214
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.467790936020045,
+ "learning_rate": 0.0004322525887335563,
+ "loss": 2.8515,
+ "step": 1215
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.4064224017335376,
+ "learning_rate": 0.00043151681952065734,
+ "loss": 2.8632,
+ "step": 1216
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.4094015227271524,
+ "learning_rate": 0.00043078120140719456,
+ "loss": 2.9162,
+ "step": 1217
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.4115516249672138,
+ "learning_rate": 0.0004300457360162158,
+ "loss": 2.8189,
+ "step": 1218
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.3957204146499627,
+ "learning_rate": 0.0004293104249704319,
+ "loss": 2.8577,
+ "step": 1219
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.41298710564994906,
+ "learning_rate": 0.00042857526989221355,
+ "loss": 2.7539,
+ "step": 1220
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.41388533016550827,
+ "learning_rate": 0.00042784027240358674,
+ "loss": 2.9146,
+ "step": 1221
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.45881916500702685,
+ "learning_rate": 0.0004271054341262301,
+ "loss": 2.8748,
+ "step": 1222
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.43635846083798685,
+ "learning_rate": 0.000426370756681471,
+ "loss": 2.9047,
+ "step": 1223
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.3686359372261838,
+ "learning_rate": 0.0004256362416902817,
+ "loss": 2.9073,
+ "step": 1224
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.4084198249407971,
+ "learning_rate": 0.00042490189077327637,
+ "loss": 2.8318,
+ "step": 1225
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.37526142914146754,
+ "learning_rate": 0.00042416770555070703,
+ "loss": 2.8786,
+ "step": 1226
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.39635100354448255,
+ "learning_rate": 0.00042343368764246,
+ "loss": 2.9044,
+ "step": 1227
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.4243714166051996,
+ "learning_rate": 0.0004226998386680524,
+ "loss": 2.9257,
+ "step": 1228
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.3893252791455776,
+ "learning_rate": 0.000421966160246629,
+ "loss": 2.9097,
+ "step": 1229
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.43515285845173024,
+ "learning_rate": 0.00042123265399695783,
+ "loss": 2.7978,
+ "step": 1230
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.4005941985241359,
+ "learning_rate": 0.0004204993215374273,
+ "loss": 2.8581,
+ "step": 1231
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.4929084808978321,
+ "learning_rate": 0.00041976616448604226,
+ "loss": 2.9279,
+ "step": 1232
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.47573515760348134,
+ "learning_rate": 0.00041903318446042076,
+ "loss": 2.8879,
+ "step": 1233
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.44596690053165094,
+ "learning_rate": 0.00041830038307778984,
+ "loss": 2.8784,
+ "step": 1234
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.3994733451574598,
+ "learning_rate": 0.0004175677619549828,
+ "loss": 2.9049,
+ "step": 1235
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.41960436476872376,
+ "learning_rate": 0.000416835322708435,
+ "loss": 2.7931,
+ "step": 1236
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.43982871628180353,
+ "learning_rate": 0.00041610306695418056,
+ "loss": 2.8072,
+ "step": 1237
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.41053325067202734,
+ "learning_rate": 0.0004153709963078488,
+ "loss": 2.8364,
+ "step": 1238
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.37340239383348794,
+ "learning_rate": 0.0004146391123846606,
+ "loss": 2.9643,
+ "step": 1239
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.3749757111161589,
+ "learning_rate": 0.0004139074167994249,
+ "loss": 2.8324,
+ "step": 1240
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.37745330877446925,
+ "learning_rate": 0.00041317591116653486,
+ "loss": 2.847,
+ "step": 1241
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.3980163039373717,
+ "learning_rate": 0.0004124445970999648,
+ "loss": 2.839,
+ "step": 1242
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.4472144668108464,
+ "learning_rate": 0.00041171347621326627,
+ "loss": 2.9366,
+ "step": 1243
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.41809134441537793,
+ "learning_rate": 0.00041098255011956465,
+ "loss": 2.7723,
+ "step": 1244
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.39473517730035795,
+ "learning_rate": 0.00041025182043155547,
+ "loss": 2.9419,
+ "step": 1245
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.4310181751622193,
+ "learning_rate": 0.000409521288761501,
+ "loss": 2.7644,
+ "step": 1246
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.47553240342699926,
+ "learning_rate": 0.00040879095672122646,
+ "loss": 2.7773,
+ "step": 1247
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.43507628372781953,
+ "learning_rate": 0.0004080608259221167,
+ "loss": 2.858,
+ "step": 1248
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.438838578808969,
+ "learning_rate": 0.0004073308979751126,
+ "loss": 2.9197,
+ "step": 1249
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.40971759442571265,
+ "learning_rate": 0.0004066011744907074,
+ "loss": 2.8057,
+ "step": 1250
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.4275852325340933,
+ "learning_rate": 0.00040587165707894326,
+ "loss": 2.9033,
+ "step": 1251
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.43075229800247966,
+ "learning_rate": 0.0004051423473494076,
+ "loss": 2.8467,
+ "step": 1252
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.3737806409553765,
+ "learning_rate": 0.0004044132469112299,
+ "loss": 2.7773,
+ "step": 1253
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.3838569991277892,
+ "learning_rate": 0.00040368435737307733,
+ "loss": 2.9055,
+ "step": 1254
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.4129138643379668,
+ "learning_rate": 0.00040295568034315224,
+ "loss": 2.9757,
+ "step": 1255
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.36658829344347627,
+ "learning_rate": 0.0004022272174291878,
+ "loss": 2.8796,
+ "step": 1256
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.4302930210058821,
+ "learning_rate": 0.0004014989702384449,
+ "loss": 2.7681,
+ "step": 1257
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.40627039122585595,
+ "learning_rate": 0.00040077094037770843,
+ "loss": 2.8479,
+ "step": 1258
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.42477222721453956,
+ "learning_rate": 0.0004000431294532838,
+ "loss": 2.8965,
+ "step": 1259
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.398222091322044,
+ "learning_rate": 0.0003993155390709935,
+ "loss": 2.8767,
+ "step": 1260
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.398986250357761,
+ "learning_rate": 0.0003985881708361729,
+ "loss": 2.8722,
+ "step": 1261
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.4401526940350003,
+ "learning_rate": 0.00039786102635366784,
+ "loss": 2.8865,
+ "step": 1262
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.410297890315299,
+ "learning_rate": 0.0003971341072278302,
+ "loss": 2.7957,
+ "step": 1263
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.38625401763133144,
+ "learning_rate": 0.00039640741506251457,
+ "loss": 2.7956,
+ "step": 1264
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.4026726412803934,
+ "learning_rate": 0.00039568095146107495,
+ "loss": 2.9823,
+ "step": 1265
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.42475417226625595,
+ "learning_rate": 0.00039495471802636096,
+ "loss": 2.8916,
+ "step": 1266
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.45292198384515203,
+ "learning_rate": 0.0003942287163607145,
+ "loss": 2.8289,
+ "step": 1267
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.4366466512470455,
+ "learning_rate": 0.0003935029480659658,
+ "loss": 2.8152,
+ "step": 1268
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.4219694069194543,
+ "learning_rate": 0.00039277741474343054,
+ "loss": 2.8287,
+ "step": 1269
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.42077741606885666,
+ "learning_rate": 0.0003920521179939057,
+ "loss": 2.841,
+ "step": 1270
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.39202045587380474,
+ "learning_rate": 0.00039132705941766644,
+ "loss": 2.8883,
+ "step": 1271
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.3866556326743097,
+ "learning_rate": 0.0003906022406144624,
+ "loss": 2.7761,
+ "step": 1272
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.3829527490886568,
+ "learning_rate": 0.0003898776631835143,
+ "loss": 2.8656,
+ "step": 1273
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.42701725106974375,
+ "learning_rate": 0.00038915332872350994,
+ "loss": 2.7722,
+ "step": 1274
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.4395988921298699,
+ "learning_rate": 0.00038842923883260135,
+ "loss": 2.8402,
+ "step": 1275
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.3857025657295869,
+ "learning_rate": 0.00038770539510840093,
+ "loss": 2.8637,
+ "step": 1276
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.40677023888881897,
+ "learning_rate": 0.00038698179914797783,
+ "loss": 2.8031,
+ "step": 1277
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.39828668370290216,
+ "learning_rate": 0.0003862584525478545,
+ "loss": 2.8637,
+ "step": 1278
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.4191596941460243,
+ "learning_rate": 0.00038553535690400353,
+ "loss": 2.7918,
+ "step": 1279
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.3979374115529204,
+ "learning_rate": 0.00038481251381184355,
+ "loss": 2.7871,
+ "step": 1280
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.4206012265745624,
+ "learning_rate": 0.00038408992486623584,
+ "loss": 2.8266,
+ "step": 1281
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.40402407739036394,
+ "learning_rate": 0.00038336759166148117,
+ "loss": 2.8587,
+ "step": 1282
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.44451271532146797,
+ "learning_rate": 0.0003826455157913159,
+ "loss": 2.8343,
+ "step": 1283
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.42517342793952295,
+ "learning_rate": 0.00038192369884890886,
+ "loss": 2.8611,
+ "step": 1284
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.42450195523648176,
+ "learning_rate": 0.00038120214242685723,
+ "loss": 2.9286,
+ "step": 1285
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.39373314963698314,
+ "learning_rate": 0.00038048084811718373,
+ "loss": 2.831,
+ "step": 1286
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.44564166910303843,
+ "learning_rate": 0.0003797598175113327,
+ "loss": 2.8863,
+ "step": 1287
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.43742296066643827,
+ "learning_rate": 0.0003790390522001662,
+ "loss": 2.8938,
+ "step": 1288
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.4019467516591125,
+ "learning_rate": 0.0003783185537739615,
+ "loss": 2.8876,
+ "step": 1289
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.4099479827724102,
+ "learning_rate": 0.00037759832382240697,
+ "loss": 2.8687,
+ "step": 1290
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.4098825427718808,
+ "learning_rate": 0.00037687836393459826,
+ "loss": 2.8579,
+ "step": 1291
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.39923137387170976,
+ "learning_rate": 0.0003761586756990354,
+ "loss": 2.9153,
+ "step": 1292
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.38927744290991445,
+ "learning_rate": 0.0003754392607036191,
+ "loss": 2.8617,
+ "step": 1293
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.43240762985023135,
+ "learning_rate": 0.0003747201205356472,
+ "loss": 2.8841,
+ "step": 1294
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.3999127441522171,
+ "learning_rate": 0.0003740012567818111,
+ "loss": 2.8387,
+ "step": 1295
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.40878700871613505,
+ "learning_rate": 0.0003732826710281922,
+ "loss": 2.8785,
+ "step": 1296
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.4186720782020169,
+ "learning_rate": 0.0003725643648602588,
+ "loss": 2.8679,
+ "step": 1297
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.4123983368700397,
+ "learning_rate": 0.0003718463398628621,
+ "loss": 2.8162,
+ "step": 1298
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.40325859676584835,
+ "learning_rate": 0.0003711285976202331,
+ "loss": 2.8984,
+ "step": 1299
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.4584152802492568,
+ "learning_rate": 0.0003704111397159787,
+ "loss": 2.862,
+ "step": 1300
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.42501258281305815,
+ "learning_rate": 0.0003696939677330788,
+ "loss": 2.8206,
+ "step": 1301
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.5672441279607406,
+ "learning_rate": 0.00036897708325388213,
+ "loss": 2.8467,
+ "step": 1302
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.40222644296583954,
+ "learning_rate": 0.0003682604878601034,
+ "loss": 2.7933,
+ "step": 1303
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.3956858484430941,
+ "learning_rate": 0.000367544183132819,
+ "loss": 2.8328,
+ "step": 1304
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.5226882077059875,
+ "learning_rate": 0.0003668281706524645,
+ "loss": 2.796,
+ "step": 1305
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.43092179601156916,
+ "learning_rate": 0.0003661124519988304,
+ "loss": 2.8219,
+ "step": 1306
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.4308307213663028,
+ "learning_rate": 0.00036539702875105893,
+ "loss": 2.8694,
+ "step": 1307
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.40906815353792997,
+ "learning_rate": 0.0003646819024876406,
+ "loss": 2.8039,
+ "step": 1308
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.40498842930829,
+ "learning_rate": 0.0003639670747864107,
+ "loss": 2.8348,
+ "step": 1309
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.39896560479510457,
+ "learning_rate": 0.00036325254722454584,
+ "loss": 2.8687,
+ "step": 1310
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.51098067587612,
+ "learning_rate": 0.00036253832137855997,
+ "loss": 2.843,
+ "step": 1311
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.43167292519366846,
+ "learning_rate": 0.00036182439882430183,
+ "loss": 2.8804,
+ "step": 1312
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.43489385522571117,
+ "learning_rate": 0.00036111078113695096,
+ "loss": 2.8895,
+ "step": 1313
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.3949117803483212,
+ "learning_rate": 0.0003603974698910139,
+ "loss": 2.8902,
+ "step": 1314
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.4318264898137873,
+ "learning_rate": 0.0003596844666603214,
+ "loss": 2.8678,
+ "step": 1315
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.43362424462389704,
+ "learning_rate": 0.0003589717730180245,
+ "loss": 2.8481,
+ "step": 1316
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.39467086706525173,
+ "learning_rate": 0.00035825939053659117,
+ "loss": 2.843,
+ "step": 1317
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.3904557933751639,
+ "learning_rate": 0.00035754732078780273,
+ "loss": 2.9037,
+ "step": 1318
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.41392550361381714,
+ "learning_rate": 0.00035683556534275076,
+ "loss": 2.898,
+ "step": 1319
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.4092698720497119,
+ "learning_rate": 0.00035612412577183303,
+ "loss": 2.786,
+ "step": 1320
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.40765598401915487,
+ "learning_rate": 0.00035541300364475063,
+ "loss": 2.8647,
+ "step": 1321
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.4466584706719818,
+ "learning_rate": 0.0003547022005305043,
+ "loss": 2.8362,
+ "step": 1322
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.40893134912072876,
+ "learning_rate": 0.0003539917179973907,
+ "loss": 2.8483,
+ "step": 1323
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.4723698814934822,
+ "learning_rate": 0.00035328155761299917,
+ "loss": 2.8614,
+ "step": 1324
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.45908176338150053,
+ "learning_rate": 0.0003525717209442085,
+ "loss": 2.9046,
+ "step": 1325
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.4341214041653119,
+ "learning_rate": 0.00035186220955718306,
+ "loss": 2.8322,
+ "step": 1326
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.45051115878334164,
+ "learning_rate": 0.0003511530250173696,
+ "loss": 2.8808,
+ "step": 1327
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.4753597363712411,
+ "learning_rate": 0.00035044416888949364,
+ "loss": 2.8945,
+ "step": 1328
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.43288617024484705,
+ "learning_rate": 0.0003497356427375562,
+ "loss": 2.9336,
+ "step": 1329
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.4252505176541065,
+ "learning_rate": 0.00034902744812483034,
+ "loss": 2.9383,
+ "step": 1330
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.47295197564693475,
+ "learning_rate": 0.00034831958661385714,
+ "loss": 2.8953,
+ "step": 1331
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.5431524095190592,
+ "learning_rate": 0.0003476120597664434,
+ "loss": 2.8215,
+ "step": 1332
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.42934548437528786,
+ "learning_rate": 0.00034690486914365704,
+ "loss": 2.8921,
+ "step": 1333
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.4618710841340178,
+ "learning_rate": 0.00034619801630582435,
+ "loss": 2.7901,
+ "step": 1334
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.5725696134905678,
+ "learning_rate": 0.00034549150281252633,
+ "loss": 2.94,
+ "step": 1335
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.42564011332943413,
+ "learning_rate": 0.0003447853302225952,
+ "loss": 2.8457,
+ "step": 1336
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.42947565134072657,
+ "learning_rate": 0.00034407950009411126,
+ "loss": 2.9252,
+ "step": 1337
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.37748928675478943,
+ "learning_rate": 0.00034337401398439873,
+ "loss": 2.8342,
+ "step": 1338
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.47299303000167975,
+ "learning_rate": 0.00034266887345002305,
+ "loss": 2.8523,
+ "step": 1339
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.3728400628132185,
+ "learning_rate": 0.0003419640800467874,
+ "loss": 2.8454,
+ "step": 1340
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.432759651906034,
+ "learning_rate": 0.0003412596353297288,
+ "loss": 2.8104,
+ "step": 1341
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.49929673534119623,
+ "learning_rate": 0.00034055554085311493,
+ "loss": 2.9609,
+ "step": 1342
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.3861650413461158,
+ "learning_rate": 0.00033985179817044105,
+ "loss": 2.8897,
+ "step": 1343
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.3979244336720921,
+ "learning_rate": 0.0003391484088344257,
+ "loss": 2.8507,
+ "step": 1344
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.40667380012020937,
+ "learning_rate": 0.00033844537439700807,
+ "loss": 2.7677,
+ "step": 1345
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.40040043456865676,
+ "learning_rate": 0.00033774269640934445,
+ "loss": 2.8759,
+ "step": 1346
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.4033092009154451,
+ "learning_rate": 0.0003370403764218045,
+ "loss": 2.8164,
+ "step": 1347
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.39505749835773485,
+ "learning_rate": 0.000336338415983968,
+ "loss": 2.836,
+ "step": 1348
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.4669764200066612,
+ "learning_rate": 0.00033563681664462155,
+ "loss": 2.7758,
+ "step": 1349
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.386455573675286,
+ "learning_rate": 0.000334935579951755,
+ "loss": 2.928,
+ "step": 1350
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.4719892063456551,
+ "learning_rate": 0.0003342347074525578,
+ "loss": 2.7923,
+ "step": 1351
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.39833204480699974,
+ "learning_rate": 0.0003335342006934161,
+ "loss": 2.8617,
+ "step": 1352
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.40246649098622445,
+ "learning_rate": 0.00033283406121990914,
+ "loss": 2.9585,
+ "step": 1353
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.4004327295425212,
+ "learning_rate": 0.0003321342905768057,
+ "loss": 2.8671,
+ "step": 1354
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.38345632274930724,
+ "learning_rate": 0.00033143489030806086,
+ "loss": 2.8173,
+ "step": 1355
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.3734399393682334,
+ "learning_rate": 0.00033073586195681227,
+ "loss": 2.7896,
+ "step": 1356
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.4176151192687323,
+ "learning_rate": 0.00033003720706537736,
+ "loss": 2.8016,
+ "step": 1357
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.4806161139081573,
+ "learning_rate": 0.0003293389271752492,
+ "loss": 2.7485,
+ "step": 1358
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.3788205190672334,
+ "learning_rate": 0.00032864102382709374,
+ "loss": 2.776,
+ "step": 1359
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.5405653240352252,
+ "learning_rate": 0.000327943498560746,
+ "loss": 2.8776,
+ "step": 1360
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.4586316269628954,
+ "learning_rate": 0.00032724635291520694,
+ "loss": 2.8161,
+ "step": 1361
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.45757740484475795,
+ "learning_rate": 0.00032654958842863967,
+ "loss": 2.826,
+ "step": 1362
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.41480031779682386,
+ "learning_rate": 0.0003258532066383667,
+ "loss": 2.7777,
+ "step": 1363
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.3870499822387609,
+ "learning_rate": 0.000325157209080866,
+ "loss": 2.8366,
+ "step": 1364
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.4867557609042337,
+ "learning_rate": 0.00032446159729176743,
+ "loss": 2.9178,
+ "step": 1365
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.508289674653372,
+ "learning_rate": 0.0003237663728058502,
+ "loss": 2.87,
+ "step": 1366
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.39965771536427386,
+ "learning_rate": 0.0003230715371570389,
+ "loss": 2.7451,
+ "step": 1367
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.4166274911778635,
+ "learning_rate": 0.00032237709187839996,
+ "loss": 2.8655,
+ "step": 1368
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.44434144788249147,
+ "learning_rate": 0.0003216830385021388,
+ "loss": 2.971,
+ "step": 1369
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.422532740504434,
+ "learning_rate": 0.0003209893785595959,
+ "loss": 2.861,
+ "step": 1370
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.38718820064757564,
+ "learning_rate": 0.00032029611358124366,
+ "loss": 2.8735,
+ "step": 1371
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.4320672149790635,
+ "learning_rate": 0.00031960324509668336,
+ "loss": 2.8454,
+ "step": 1372
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.3954883541586397,
+ "learning_rate": 0.0003189107746346412,
+ "loss": 2.8932,
+ "step": 1373
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.41659381546306623,
+ "learning_rate": 0.0003182187037229653,
+ "loss": 2.8636,
+ "step": 1374
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.39489151362048697,
+ "learning_rate": 0.0003175270338886221,
+ "loss": 2.8924,
+ "step": 1375
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.490354655003526,
+ "learning_rate": 0.00031683576665769345,
+ "loss": 2.8031,
+ "step": 1376
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.38569249858457105,
+ "learning_rate": 0.0003161449035553724,
+ "loss": 2.8581,
+ "step": 1377
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.39960014216299583,
+ "learning_rate": 0.00031545444610596077,
+ "loss": 2.8176,
+ "step": 1378
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.505547989014189,
+ "learning_rate": 0.000314764395832865,
+ "loss": 2.8852,
+ "step": 1379
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.41269847093888684,
+ "learning_rate": 0.0003140747542585934,
+ "loss": 2.806,
+ "step": 1380
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.43439634013895967,
+ "learning_rate": 0.00031338552290475266,
+ "loss": 2.8296,
+ "step": 1381
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.38429508685546665,
+ "learning_rate": 0.00031269670329204396,
+ "loss": 2.8459,
+ "step": 1382
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.4688947078304626,
+ "learning_rate": 0.0003120082969402604,
+ "loss": 2.8387,
+ "step": 1383
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.4531077909837109,
+ "learning_rate": 0.00031132030536828314,
+ "loss": 2.9406,
+ "step": 1384
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.4296973212769677,
+ "learning_rate": 0.00031063273009407805,
+ "loss": 2.8741,
+ "step": 1385
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.42991735097780975,
+ "learning_rate": 0.00030994557263469265,
+ "loss": 2.8899,
+ "step": 1386
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.42140765075284814,
+ "learning_rate": 0.0003092588345062526,
+ "loss": 2.8638,
+ "step": 1387
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.42564027640763136,
+ "learning_rate": 0.0003085725172239582,
+ "loss": 2.8512,
+ "step": 1388
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.38041119917267907,
+ "learning_rate": 0.0003078866223020815,
+ "loss": 2.9406,
+ "step": 1389
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.39724719191947355,
+ "learning_rate": 0.0003072011512539624,
+ "loss": 2.842,
+ "step": 1390
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.46213300072498303,
+ "learning_rate": 0.00030651610559200574,
+ "loss": 2.8141,
+ "step": 1391
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.4269601020702076,
+ "learning_rate": 0.00030583148682767757,
+ "loss": 2.8632,
+ "step": 1392
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.42721356898213075,
+ "learning_rate": 0.00030514729647150243,
+ "loss": 2.8487,
+ "step": 1393
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.39075610985469733,
+ "learning_rate": 0.0003044635360330592,
+ "loss": 2.8271,
+ "step": 1394
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.4068375097347185,
+ "learning_rate": 0.00030378020702097845,
+ "loss": 2.8382,
+ "step": 1395
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.3856224411767291,
+ "learning_rate": 0.000303097310942939,
+ "loss": 2.7415,
+ "step": 1396
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.42568823205453826,
+ "learning_rate": 0.0003024148493056641,
+ "loss": 2.8698,
+ "step": 1397
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.4107661181357771,
+ "learning_rate": 0.00030173282361491865,
+ "loss": 2.9014,
+ "step": 1398
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.43989697641767794,
+ "learning_rate": 0.0003010512353755057,
+ "loss": 2.9073,
+ "step": 1399
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.43033116005428784,
+ "learning_rate": 0.00030037008609126313,
+ "loss": 2.8812,
+ "step": 1400
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.4268261668699936,
+ "learning_rate": 0.0002996893772650602,
+ "loss": 2.8557,
+ "step": 1401
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.40155348551455605,
+ "learning_rate": 0.0002990091103987945,
+ "loss": 2.8763,
+ "step": 1402
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.5227263064686254,
+ "learning_rate": 0.0002983292869933886,
+ "loss": 2.9703,
+ "step": 1403
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.4063658303440325,
+ "learning_rate": 0.0002976499085487862,
+ "loss": 2.8599,
+ "step": 1404
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.38923623560077386,
+ "learning_rate": 0.00029697097656394963,
+ "loss": 2.8616,
+ "step": 1405
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.46671775849817987,
+ "learning_rate": 0.00029629249253685595,
+ "loss": 2.8186,
+ "step": 1406
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.44194043172028435,
+ "learning_rate": 0.00029561445796449416,
+ "loss": 2.8597,
+ "step": 1407
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.42230665087039493,
+ "learning_rate": 0.0002949368743428612,
+ "loss": 2.7372,
+ "step": 1408
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.41662814558611877,
+ "learning_rate": 0.0002942597431669593,
+ "loss": 2.8812,
+ "step": 1409
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.43204116604041587,
+ "learning_rate": 0.0002935830659307924,
+ "loss": 2.8672,
+ "step": 1410
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.4613382449135805,
+ "learning_rate": 0.0002929068441273629,
+ "loss": 2.8182,
+ "step": 1411
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.46921148444460486,
+ "learning_rate": 0.0002922310792486681,
+ "loss": 2.8232,
+ "step": 1412
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.4166530061214115,
+ "learning_rate": 0.00029155577278569745,
+ "loss": 2.8029,
+ "step": 1413
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.5199847261496212,
+ "learning_rate": 0.00029088092622842895,
+ "loss": 2.7903,
+ "step": 1414
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.4540580906235668,
+ "learning_rate": 0.00029020654106582544,
+ "loss": 2.8631,
+ "step": 1415
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.398617503021683,
+ "learning_rate": 0.0002895326187858326,
+ "loss": 2.8537,
+ "step": 1416
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.42837287159998044,
+ "learning_rate": 0.00028885916087537377,
+ "loss": 2.7813,
+ "step": 1417
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.43623456094526947,
+ "learning_rate": 0.00028818616882034877,
+ "loss": 2.8579,
+ "step": 1418
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.49640853495662185,
+ "learning_rate": 0.0002875136441056286,
+ "loss": 2.7958,
+ "step": 1419
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.413602325255384,
+ "learning_rate": 0.000286841588215054,
+ "loss": 2.8097,
+ "step": 1420
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.43697148166458005,
+ "learning_rate": 0.0002861700026314308,
+ "loss": 2.8738,
+ "step": 1421
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.4738988260201328,
+ "learning_rate": 0.00028549888883652686,
+ "loss": 2.8756,
+ "step": 1422
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.44073896946993113,
+ "learning_rate": 0.00028482824831107,
+ "loss": 2.757,
+ "step": 1423
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.4918229259462307,
+ "learning_rate": 0.000284158082534743,
+ "loss": 2.8785,
+ "step": 1424
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.4919377655877921,
+ "learning_rate": 0.00028348839298618177,
+ "loss": 2.8308,
+ "step": 1425
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.4400655962672087,
+ "learning_rate": 0.0002828191811429709,
+ "loss": 2.8156,
+ "step": 1426
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.4716063266487306,
+ "learning_rate": 0.00028215044848164164,
+ "loss": 2.9037,
+ "step": 1427
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.471936739831525,
+ "learning_rate": 0.00028148219647766747,
+ "loss": 2.8375,
+ "step": 1428
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.4371603736900249,
+ "learning_rate": 0.00028081442660546124,
+ "loss": 2.8972,
+ "step": 1429
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.3834907424533424,
+ "learning_rate": 0.0002801471403383728,
+ "loss": 2.816,
+ "step": 1430
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.40710421298387783,
+ "learning_rate": 0.00027948033914868415,
+ "loss": 2.7918,
+ "step": 1431
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.4271142691852784,
+ "learning_rate": 0.00027881402450760775,
+ "loss": 2.8332,
+ "step": 1432
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.38296389552988713,
+ "learning_rate": 0.00027814819788528165,
+ "loss": 2.7961,
+ "step": 1433
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.43920697626706573,
+ "learning_rate": 0.00027748286075076836,
+ "loss": 2.8827,
+ "step": 1434
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.42225567216556265,
+ "learning_rate": 0.00027681801457204937,
+ "loss": 2.8096,
+ "step": 1435
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.4767178793075064,
+ "learning_rate": 0.00027615366081602306,
+ "loss": 2.8481,
+ "step": 1436
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.476530859967564,
+ "learning_rate": 0.0002754898009485021,
+ "loss": 2.908,
+ "step": 1437
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.4475715733569199,
+ "learning_rate": 0.0002748264364342085,
+ "loss": 2.8318,
+ "step": 1438
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.4819609379487271,
+ "learning_rate": 0.00027416356873677204,
+ "loss": 2.8805,
+ "step": 1439
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.5216746852516032,
+ "learning_rate": 0.0002735011993187258,
+ "loss": 2.85,
+ "step": 1440
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.4251283429039105,
+ "learning_rate": 0.0002728393296415042,
+ "loss": 2.7471,
+ "step": 1441
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.45951821914482954,
+ "learning_rate": 0.00027217796116543817,
+ "loss": 2.8168,
+ "step": 1442
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.42951255838420643,
+ "learning_rate": 0.0002715170953497532,
+ "loss": 2.8411,
+ "step": 1443
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.4808684045625494,
+ "learning_rate": 0.00027085673365256614,
+ "loss": 2.8063,
+ "step": 1444
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.4496370251290002,
+ "learning_rate": 0.00027019687753088075,
+ "loss": 2.7853,
+ "step": 1445
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.39229747199363785,
+ "learning_rate": 0.00026953752844058597,
+ "loss": 2.9101,
+ "step": 1446
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.47770895957354437,
+ "learning_rate": 0.0002688786878364516,
+ "loss": 2.7956,
+ "step": 1447
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.4618788598588267,
+ "learning_rate": 0.00026822035717212597,
+ "loss": 2.7435,
+ "step": 1448
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.41637538170572225,
+ "learning_rate": 0.00026756253790013193,
+ "loss": 2.8563,
+ "step": 1449
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.4068260750336872,
+ "learning_rate": 0.0002669052314718641,
+ "loss": 2.7667,
+ "step": 1450
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.4785129519370895,
+ "learning_rate": 0.0002662484393375855,
+ "loss": 2.8727,
+ "step": 1451
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.44032862240764553,
+ "learning_rate": 0.00026559216294642446,
+ "loss": 2.8685,
+ "step": 1452
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.44151260184130114,
+ "learning_rate": 0.0002649364037463718,
+ "loss": 2.8865,
+ "step": 1453
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.41572330727586726,
+ "learning_rate": 0.0002642811631842764,
+ "loss": 2.8561,
+ "step": 1454
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.4202519727310426,
+ "learning_rate": 0.0002636264427058439,
+ "loss": 2.8416,
+ "step": 1455
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.4384973735519534,
+ "learning_rate": 0.00026297224375563123,
+ "loss": 2.9354,
+ "step": 1456
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.45861878231947134,
+ "learning_rate": 0.00026231856777704575,
+ "loss": 2.7897,
+ "step": 1457
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.3835256946837243,
+ "learning_rate": 0.00026166541621234026,
+ "loss": 2.7988,
+ "step": 1458
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.4506775901403342,
+ "learning_rate": 0.00026101279050261045,
+ "loss": 2.8568,
+ "step": 1459
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.48890854220382163,
+ "learning_rate": 0.00026036069208779247,
+ "loss": 2.8419,
+ "step": 1460
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.39727902857277486,
+ "learning_rate": 0.0002597091224066581,
+ "loss": 2.7941,
+ "step": 1461
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.49562123357546695,
+ "learning_rate": 0.00025905808289681365,
+ "loss": 2.824,
+ "step": 1462
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.5146051078777808,
+ "learning_rate": 0.0002584075749946946,
+ "loss": 2.7678,
+ "step": 1463
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.44136101510848025,
+ "learning_rate": 0.00025775760013556424,
+ "loss": 2.8725,
+ "step": 1464
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.43514473433726836,
+ "learning_rate": 0.0002571081597535095,
+ "loss": 2.7764,
+ "step": 1465
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.40243598604249126,
+ "learning_rate": 0.00025645925528143776,
+ "loss": 2.8101,
+ "step": 1466
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.39849344825214716,
+ "learning_rate": 0.0002558108881510747,
+ "loss": 2.9069,
+ "step": 1467
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.4626291624062401,
+ "learning_rate": 0.00025516305979295963,
+ "loss": 2.849,
+ "step": 1468
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.3977577037516438,
+ "learning_rate": 0.0002545157716364439,
+ "loss": 2.8416,
+ "step": 1469
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.4105138707247631,
+ "learning_rate": 0.00025386902510968624,
+ "loss": 2.7966,
+ "step": 1470
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.45367342784470743,
+ "learning_rate": 0.00025322282163965095,
+ "loss": 2.866,
+ "step": 1471
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.38161719391982174,
+ "learning_rate": 0.00025257716265210384,
+ "loss": 2.8293,
+ "step": 1472
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.43707250663138786,
+ "learning_rate": 0.0002519320495716091,
+ "loss": 2.7776,
+ "step": 1473
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.4675949291831829,
+ "learning_rate": 0.00025128748382152716,
+ "loss": 2.8682,
+ "step": 1474
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.41030051821256924,
+ "learning_rate": 0.00025064346682401016,
+ "loss": 2.7459,
+ "step": 1475
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.4543518896350469,
+ "learning_rate": 0.0002500000000000001,
+ "loss": 2.912,
+ "step": 1476
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.5135507139019255,
+ "learning_rate": 0.0002493570847692246,
+ "loss": 2.8012,
+ "step": 1477
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.4140802840133955,
+ "learning_rate": 0.00024871472255019424,
+ "loss": 2.8235,
+ "step": 1478
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.446673372683363,
+ "learning_rate": 0.00024807291476019994,
+ "loss": 2.8646,
+ "step": 1479
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.49841903985559716,
+ "learning_rate": 0.00024743166281530877,
+ "loss": 2.8319,
+ "step": 1480
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.45225171472909886,
+ "learning_rate": 0.000246790968130362,
+ "loss": 2.8378,
+ "step": 1481
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.42176910195486855,
+ "learning_rate": 0.0002461508321189706,
+ "loss": 2.8348,
+ "step": 1482
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.3906736069478268,
+ "learning_rate": 0.00024551125619351385,
+ "loss": 2.8002,
+ "step": 1483
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.4934530980673722,
+ "learning_rate": 0.00024487224176513453,
+ "loss": 2.9077,
+ "step": 1484
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.4467760395137845,
+ "learning_rate": 0.0002442337902437365,
+ "loss": 2.776,
+ "step": 1485
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.39316263435175924,
+ "learning_rate": 0.0002435959030379824,
+ "loss": 2.8338,
+ "step": 1486
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.4200212856171051,
+ "learning_rate": 0.00024295858155528888,
+ "loss": 2.8391,
+ "step": 1487
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.5199830937100344,
+ "learning_rate": 0.00024232182720182523,
+ "loss": 2.8436,
+ "step": 1488
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.5371367301802311,
+ "learning_rate": 0.00024168564138250855,
+ "loss": 2.8797,
+ "step": 1489
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.4636634621159753,
+ "learning_rate": 0.00024105002550100246,
+ "loss": 2.858,
+ "step": 1490
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.42950571841807966,
+ "learning_rate": 0.00024041498095971254,
+ "loss": 2.8027,
+ "step": 1491
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.49024170673205253,
+ "learning_rate": 0.0002397805091597835,
+ "loss": 2.8464,
+ "step": 1492
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.4612469485240091,
+ "learning_rate": 0.0002391466115010973,
+ "loss": 2.8593,
+ "step": 1493
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.42285342507221957,
+ "learning_rate": 0.00023851328938226808,
+ "loss": 2.7818,
+ "step": 1494
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.4452892615556217,
+ "learning_rate": 0.00023788054420064109,
+ "loss": 2.8751,
+ "step": 1495
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.4375655465619132,
+ "learning_rate": 0.00023724837735228773,
+ "loss": 2.8519,
+ "step": 1496
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.45268586107642655,
+ "learning_rate": 0.00023661679023200422,
+ "loss": 2.8665,
+ "step": 1497
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.4100122367479021,
+ "learning_rate": 0.00023598578423330714,
+ "loss": 2.7833,
+ "step": 1498
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.415894593525007,
+ "learning_rate": 0.00023535536074843083,
+ "loss": 2.8633,
+ "step": 1499
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.44911451367820737,
+ "learning_rate": 0.00023472552116832502,
+ "loss": 2.8396,
+ "step": 1500
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.4048526598918118,
+ "learning_rate": 0.0002340962668826503,
+ "loss": 2.7572,
+ "step": 1501
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.44482654597488075,
+ "learning_rate": 0.00023346759927977663,
+ "loss": 2.7903,
+ "step": 1502
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.43738033981787744,
+ "learning_rate": 0.0002328395197467789,
+ "loss": 2.8115,
+ "step": 1503
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.4373710004221427,
+ "learning_rate": 0.00023221202966943515,
+ "loss": 2.8994,
+ "step": 1504
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.49380386963728856,
+ "learning_rate": 0.0002315851304322223,
+ "loss": 2.9155,
+ "step": 1505
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.42362469256303953,
+ "learning_rate": 0.0002309588234183137,
+ "loss": 2.8235,
+ "step": 1506
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.47696638034816363,
+ "learning_rate": 0.00023033311000957653,
+ "loss": 2.8058,
+ "step": 1507
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.4924779787954233,
+ "learning_rate": 0.00022970799158656758,
+ "loss": 2.8008,
+ "step": 1508
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.4511821400093407,
+ "learning_rate": 0.0002290834695285316,
+ "loss": 2.8819,
+ "step": 1509
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.4151014541965032,
+ "learning_rate": 0.00022845954521339678,
+ "loss": 2.8009,
+ "step": 1510
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.44150614956352513,
+ "learning_rate": 0.0002278362200177732,
+ "loss": 2.8229,
+ "step": 1511
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.5284680687197258,
+ "learning_rate": 0.00022721349531694852,
+ "loss": 2.7925,
+ "step": 1512
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.45470669512985973,
+ "learning_rate": 0.0002265913724848855,
+ "loss": 2.8367,
+ "step": 1513
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.43122842388539895,
+ "learning_rate": 0.00022596985289421946,
+ "loss": 2.9102,
+ "step": 1514
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.4508526407918431,
+ "learning_rate": 0.00022534893791625405,
+ "loss": 2.7356,
+ "step": 1515
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.4191787332817626,
+ "learning_rate": 0.00022472862892095968,
+ "loss": 2.729,
+ "step": 1516
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.4270405900453616,
+ "learning_rate": 0.00022410892727696896,
+ "loss": 2.8606,
+ "step": 1517
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.5224003538039955,
+ "learning_rate": 0.0002234898343515751,
+ "loss": 2.8547,
+ "step": 1518
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.45371515647572447,
+ "learning_rate": 0.00022287135151072792,
+ "loss": 2.7276,
+ "step": 1519
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.42289348038274605,
+ "learning_rate": 0.00022225348011903096,
+ "loss": 2.7945,
+ "step": 1520
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.4353152147000041,
+ "learning_rate": 0.0002216362215397393,
+ "loss": 2.7592,
+ "step": 1521
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.49681411858589575,
+ "learning_rate": 0.00022101957713475522,
+ "loss": 2.9466,
+ "step": 1522
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.4548406810979048,
+ "learning_rate": 0.00022040354826462666,
+ "loss": 2.8781,
+ "step": 1523
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.435985058905358,
+ "learning_rate": 0.0002197881362885426,
+ "loss": 2.7369,
+ "step": 1524
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.4260823406107798,
+ "learning_rate": 0.0002191733425643318,
+ "loss": 2.803,
+ "step": 1525
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.44252913487503076,
+ "learning_rate": 0.00021855916844845826,
+ "loss": 2.8544,
+ "step": 1526
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.4233134243183541,
+ "learning_rate": 0.00021794561529601898,
+ "loss": 2.8537,
+ "step": 1527
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.4630873707205013,
+ "learning_rate": 0.00021733268446074138,
+ "loss": 2.8531,
+ "step": 1528
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.4844155657813618,
+ "learning_rate": 0.00021672037729497917,
+ "loss": 2.8798,
+ "step": 1529
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.44221319299145534,
+ "learning_rate": 0.0002161086951497106,
+ "loss": 2.8112,
+ "step": 1530
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.4897899397688967,
+ "learning_rate": 0.00021549763937453442,
+ "loss": 2.7799,
+ "step": 1531
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.39735224290387444,
+ "learning_rate": 0.00021488721131766736,
+ "loss": 2.8019,
+ "step": 1532
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.459572437146095,
+ "learning_rate": 0.00021427741232594183,
+ "loss": 2.8545,
+ "step": 1533
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.49385232493470166,
+ "learning_rate": 0.0002136682437448013,
+ "loss": 2.9065,
+ "step": 1534
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.39989003214468966,
+ "learning_rate": 0.0002130597069182994,
+ "loss": 2.7646,
+ "step": 1535
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.44383061119095485,
+ "learning_rate": 0.0002124518031890948,
+ "loss": 2.7784,
+ "step": 1536
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.41104243275716257,
+ "learning_rate": 0.0002118445338984502,
+ "loss": 2.8613,
+ "step": 1537
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.41272313802844207,
+ "learning_rate": 0.00021123790038622808,
+ "loss": 2.7501,
+ "step": 1538
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.5063537515210857,
+ "learning_rate": 0.0002106319039908879,
+ "loss": 2.8675,
+ "step": 1539
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.43212692706818656,
+ "learning_rate": 0.00021002654604948412,
+ "loss": 2.8748,
+ "step": 1540
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.4178740626929161,
+ "learning_rate": 0.00020942182789766172,
+ "loss": 2.7987,
+ "step": 1541
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.4302780249945131,
+ "learning_rate": 0.00020881775086965492,
+ "loss": 2.7788,
+ "step": 1542
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.4953069460510728,
+ "learning_rate": 0.00020821431629828246,
+ "loss": 2.7585,
+ "step": 1543
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.49846499060322086,
+ "learning_rate": 0.00020761152551494643,
+ "loss": 2.9739,
+ "step": 1544
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.43668240721068313,
+ "learning_rate": 0.00020700937984962798,
+ "loss": 2.7768,
+ "step": 1545
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.3918309029936311,
+ "learning_rate": 0.0002064078806308848,
+ "loss": 2.7656,
+ "step": 1546
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.45905054533593914,
+ "learning_rate": 0.00020580702918584882,
+ "loss": 2.8824,
+ "step": 1547
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.4314286175365896,
+ "learning_rate": 0.000205206826840222,
+ "loss": 2.8113,
+ "step": 1548
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.48726337934214575,
+ "learning_rate": 0.0002046072749182751,
+ "loss": 2.8023,
+ "step": 1549
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.4635806199546038,
+ "learning_rate": 0.00020400837474284273,
+ "loss": 2.7463,
+ "step": 1550
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.4202955207505032,
+ "learning_rate": 0.0002034101276353224,
+ "loss": 2.7783,
+ "step": 1551
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.4477981806596739,
+ "learning_rate": 0.00020281253491567027,
+ "loss": 2.8264,
+ "step": 1552
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.44926886188322424,
+ "learning_rate": 0.0002022155979023984,
+ "loss": 2.9089,
+ "step": 1553
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.4063415627617428,
+ "learning_rate": 0.000201619317912573,
+ "loss": 2.8626,
+ "step": 1554
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.3870196196154553,
+ "learning_rate": 0.00020102369626180962,
+ "loss": 2.8577,
+ "step": 1555
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.4192907923775115,
+ "learning_rate": 0.0002004287342642721,
+ "loss": 2.8281,
+ "step": 1556
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.42612932363156625,
+ "learning_rate": 0.00019983443323266824,
+ "loss": 2.7577,
+ "step": 1557
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.44299732076482246,
+ "learning_rate": 0.00019924079447824805,
+ "loss": 2.8363,
+ "step": 1558
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.45901705782141033,
+ "learning_rate": 0.00019864781931079977,
+ "loss": 2.8667,
+ "step": 1559
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.4802865963867156,
+ "learning_rate": 0.00019805550903864773,
+ "loss": 2.8887,
+ "step": 1560
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.46707678704358596,
+ "learning_rate": 0.00019746386496864948,
+ "loss": 2.8483,
+ "step": 1561
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.4357030879334335,
+ "learning_rate": 0.00019687288840619226,
+ "loss": 2.8279,
+ "step": 1562
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.482521040391164,
+ "learning_rate": 0.0001962825806551911,
+ "loss": 2.8291,
+ "step": 1563
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.506258857315303,
+ "learning_rate": 0.0001956929430180846,
+ "loss": 2.8352,
+ "step": 1564
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.4334437086466586,
+ "learning_rate": 0.00019510397679583374,
+ "loss": 2.7681,
+ "step": 1565
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.5004398229737487,
+ "learning_rate": 0.0001945156832879174,
+ "loss": 2.7727,
+ "step": 1566
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.4554659692418587,
+ "learning_rate": 0.00019392806379233036,
+ "loss": 2.9161,
+ "step": 1567
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.44164555102422776,
+ "learning_rate": 0.00019334111960558065,
+ "loss": 2.8027,
+ "step": 1568
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.44457395886908085,
+ "learning_rate": 0.00019275485202268573,
+ "loss": 2.8297,
+ "step": 1569
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.4552575372831752,
+ "learning_rate": 0.00019216926233717085,
+ "loss": 2.7667,
+ "step": 1570
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.45928708208884417,
+ "learning_rate": 0.00019158435184106498,
+ "loss": 2.8685,
+ "step": 1571
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.4125533504793555,
+ "learning_rate": 0.00019100012182489905,
+ "loss": 2.7901,
+ "step": 1572
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.43278167732587713,
+ "learning_rate": 0.00019041657357770226,
+ "loss": 2.7577,
+ "step": 1573
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.4202335499141103,
+ "learning_rate": 0.00018983370838699943,
+ "loss": 2.8456,
+ "step": 1574
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.4934621448278314,
+ "learning_rate": 0.00018925152753880892,
+ "loss": 2.9006,
+ "step": 1575
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.41834261951638096,
+ "learning_rate": 0.00018867003231763847,
+ "loss": 2.7953,
+ "step": 1576
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.4701317180347522,
+ "learning_rate": 0.00018808922400648375,
+ "loss": 2.8816,
+ "step": 1577
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.4796048776275349,
+ "learning_rate": 0.00018750910388682428,
+ "loss": 2.8688,
+ "step": 1578
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.44152726769684386,
+ "learning_rate": 0.00018692967323862125,
+ "loss": 2.7641,
+ "step": 1579
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.4180808107194215,
+ "learning_rate": 0.00018635093334031517,
+ "loss": 2.7514,
+ "step": 1580
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.4676220151718303,
+ "learning_rate": 0.00018577288546882165,
+ "loss": 2.8132,
+ "step": 1581
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.4763888529498976,
+ "learning_rate": 0.00018519553089953023,
+ "loss": 2.8464,
+ "step": 1582
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.4283110883481533,
+ "learning_rate": 0.0001846188709063001,
+ "loss": 2.8538,
+ "step": 1583
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.43574896069904634,
+ "learning_rate": 0.00018404290676145857,
+ "loss": 2.7339,
+ "step": 1584
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.4383862741374429,
+ "learning_rate": 0.00018346763973579722,
+ "loss": 2.8307,
+ "step": 1585
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.41610288936026485,
+ "learning_rate": 0.00018289307109856939,
+ "loss": 2.9015,
+ "step": 1586
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.45728636920415994,
+ "learning_rate": 0.0001823192021174882,
+ "loss": 2.77,
+ "step": 1587
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.41015777332159264,
+ "learning_rate": 0.0001817460340587223,
+ "loss": 2.8129,
+ "step": 1588
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.4700771653051499,
+ "learning_rate": 0.00018117356818689445,
+ "loss": 2.8377,
+ "step": 1589
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.3969480158562193,
+ "learning_rate": 0.00018060180576507756,
+ "loss": 2.7744,
+ "step": 1590
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.4215125718776959,
+ "learning_rate": 0.00018003074805479313,
+ "loss": 2.7411,
+ "step": 1591
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.41170555957205435,
+ "learning_rate": 0.00017946039631600724,
+ "loss": 2.8282,
+ "step": 1592
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.4476460297125828,
+ "learning_rate": 0.00017889075180712837,
+ "loss": 2.848,
+ "step": 1593
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.4609664017586798,
+ "learning_rate": 0.00017832181578500512,
+ "loss": 2.6379,
+ "step": 1594
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.5100574353575766,
+ "learning_rate": 0.0001777535895049221,
+ "loss": 2.917,
+ "step": 1595
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.5212223462535124,
+ "learning_rate": 0.0001771860742205988,
+ "loss": 2.8791,
+ "step": 1596
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.4990550594741003,
+ "learning_rate": 0.00017661927118418525,
+ "loss": 2.8238,
+ "step": 1597
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.453098815783682,
+ "learning_rate": 0.00017605318164626066,
+ "loss": 2.7915,
+ "step": 1598
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.4576865375064053,
+ "learning_rate": 0.00017548780685582949,
+ "loss": 2.8384,
+ "step": 1599
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.44159885200539023,
+ "learning_rate": 0.00017492314806031922,
+ "loss": 2.8581,
+ "step": 1600
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.4920325213521335,
+ "learning_rate": 0.00017435920650557806,
+ "loss": 2.7771,
+ "step": 1601
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.4407115774814077,
+ "learning_rate": 0.00017379598343587112,
+ "loss": 2.7799,
+ "step": 1602
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.44994919977516595,
+ "learning_rate": 0.00017323348009387878,
+ "loss": 2.7912,
+ "step": 1603
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.39850967581979685,
+ "learning_rate": 0.0001726716977206929,
+ "loss": 2.8078,
+ "step": 1604
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.49110026121730044,
+ "learning_rate": 0.00017211063755581525,
+ "loss": 2.9171,
+ "step": 1605
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.4806203599631984,
+ "learning_rate": 0.0001715503008371536,
+ "loss": 2.7964,
+ "step": 1606
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.5222063200901961,
+ "learning_rate": 0.0001709906888010196,
+ "loss": 2.8944,
+ "step": 1607
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.4601006542127793,
+ "learning_rate": 0.00017043180268212638,
+ "loss": 2.8248,
+ "step": 1608
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.46525413628258583,
+ "learning_rate": 0.00016987364371358481,
+ "loss": 2.7717,
+ "step": 1609
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.4457321134344966,
+ "learning_rate": 0.00016931621312690214,
+ "loss": 2.9579,
+ "step": 1610
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.4812487897895445,
+ "learning_rate": 0.00016875951215197777,
+ "loss": 2.7783,
+ "step": 1611
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.45089824053523875,
+ "learning_rate": 0.00016820354201710214,
+ "loss": 2.8838,
+ "step": 1612
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.4549424985949396,
+ "learning_rate": 0.00016764830394895203,
+ "loss": 2.7704,
+ "step": 1613
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.45767507369640725,
+ "learning_rate": 0.00016709379917259027,
+ "loss": 2.8077,
+ "step": 1614
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.4958870476023741,
+ "learning_rate": 0.00016654002891146091,
+ "loss": 2.805,
+ "step": 1615
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.4477627022686802,
+ "learning_rate": 0.00016598699438738764,
+ "loss": 2.8301,
+ "step": 1616
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.4873190520972597,
+ "learning_rate": 0.00016543469682057105,
+ "loss": 2.8935,
+ "step": 1617
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.44566791761536084,
+ "learning_rate": 0.00016488313742958526,
+ "loss": 2.784,
+ "step": 1618
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.4784282115787143,
+ "learning_rate": 0.00016433231743137646,
+ "loss": 2.8267,
+ "step": 1619
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.4927562327997018,
+ "learning_rate": 0.0001637822380412584,
+ "loss": 2.8019,
+ "step": 1620
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.4959128970168383,
+ "learning_rate": 0.00016323290047291195,
+ "loss": 2.892,
+ "step": 1621
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.4321885418017388,
+ "learning_rate": 0.0001626843059383803,
+ "loss": 2.847,
+ "step": 1622
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.5113852262920446,
+ "learning_rate": 0.00016213645564806752,
+ "loss": 2.8327,
+ "step": 1623
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.4011880919991537,
+ "learning_rate": 0.0001615893508107359,
+ "loss": 2.846,
+ "step": 1624
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.46889484683399063,
+ "learning_rate": 0.00016104299263350252,
+ "loss": 2.7714,
+ "step": 1625
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.4500796591063575,
+ "learning_rate": 0.00016049738232183758,
+ "loss": 2.8126,
+ "step": 1626
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.5021922601428124,
+ "learning_rate": 0.0001599525210795606,
+ "loss": 2.7702,
+ "step": 1627
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.4861784552027496,
+ "learning_rate": 0.00015940841010883889,
+ "loss": 2.8101,
+ "step": 1628
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.4944379748156068,
+ "learning_rate": 0.00015886505061018413,
+ "loss": 2.8636,
+ "step": 1629
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.4824283622704169,
+ "learning_rate": 0.0001583224437824498,
+ "loss": 2.8404,
+ "step": 1630
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.39538892048797597,
+ "learning_rate": 0.0001577805908228293,
+ "loss": 2.7958,
+ "step": 1631
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.43348128688965387,
+ "learning_rate": 0.00015723949292685191,
+ "loss": 2.8297,
+ "step": 1632
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.4603016293236658,
+ "learning_rate": 0.0001566991512883818,
+ "loss": 2.7597,
+ "step": 1633
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.4677621603270386,
+ "learning_rate": 0.00015615956709961378,
+ "loss": 2.8311,
+ "step": 1634
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.4280568442610595,
+ "learning_rate": 0.00015562074155107215,
+ "loss": 2.8824,
+ "step": 1635
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.4589596583491055,
+ "learning_rate": 0.0001550826758316068,
+ "loss": 2.8603,
+ "step": 1636
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.4401931819555108,
+ "learning_rate": 0.00015454537112839122,
+ "loss": 2.8764,
+ "step": 1637
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.46698260281646314,
+ "learning_rate": 0.00015400882862692033,
+ "loss": 2.7629,
+ "step": 1638
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.4531177768614569,
+ "learning_rate": 0.00015347304951100665,
+ "loss": 2.7922,
+ "step": 1639
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.5201678165941565,
+ "learning_rate": 0.00015293803496277907,
+ "loss": 2.7698,
+ "step": 1640
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.45198714862381784,
+ "learning_rate": 0.00015240378616267886,
+ "loss": 2.8414,
+ "step": 1641
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.4692806831081012,
+ "learning_rate": 0.00015187030428945843,
+ "loss": 2.8549,
+ "step": 1642
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.46646010676700933,
+ "learning_rate": 0.0001513375905201776,
+ "loss": 2.7534,
+ "step": 1643
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.4843158147099916,
+ "learning_rate": 0.00015080564603020142,
+ "loss": 2.8969,
+ "step": 1644
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.4198665897506918,
+ "learning_rate": 0.0001502744719931982,
+ "loss": 2.8027,
+ "step": 1645
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.47024720263573994,
+ "learning_rate": 0.00014974406958113558,
+ "loss": 2.8837,
+ "step": 1646
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.5652724478858238,
+ "learning_rate": 0.00014921443996427947,
+ "loss": 2.8876,
+ "step": 1647
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.4878765755653741,
+ "learning_rate": 0.0001486855843111901,
+ "loss": 2.7716,
+ "step": 1648
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.5012532800838254,
+ "learning_rate": 0.0001481575037887201,
+ "loss": 2.8843,
+ "step": 1649
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.4889950679384071,
+ "learning_rate": 0.00014763019956201253,
+ "loss": 2.7881,
+ "step": 1650
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.4458245372461846,
+ "learning_rate": 0.0001471036727944966,
+ "loss": 2.8408,
+ "step": 1651
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.4236078735521666,
+ "learning_rate": 0.0001465779246478872,
+ "loss": 2.7527,
+ "step": 1652
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.5310306585758239,
+ "learning_rate": 0.00014605295628218045,
+ "loss": 2.8812,
+ "step": 1653
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.4494672125075514,
+ "learning_rate": 0.0001455287688556527,
+ "loss": 2.8037,
+ "step": 1654
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.41975622465331525,
+ "learning_rate": 0.00014500536352485673,
+ "loss": 2.7997,
+ "step": 1655
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.4853431242018824,
+ "learning_rate": 0.00014448274144461965,
+ "loss": 2.901,
+ "step": 1656
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.5033543404706295,
+ "learning_rate": 0.00014396090376804112,
+ "loss": 2.8626,
+ "step": 1657
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.43778286883318807,
+ "learning_rate": 0.00014343985164648926,
+ "loss": 2.7306,
+ "step": 1658
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.48827213851056933,
+ "learning_rate": 0.00014291958622959973,
+ "loss": 2.7868,
+ "step": 1659
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.48602698013174744,
+ "learning_rate": 0.00014240010866527176,
+ "loss": 2.8027,
+ "step": 1660
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.43721240416498997,
+ "learning_rate": 0.00014188142009966686,
+ "loss": 2.7997,
+ "step": 1661
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.4635408215203086,
+ "learning_rate": 0.0001413635216772053,
+ "loss": 2.7495,
+ "step": 1662
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.4639530091200529,
+ "learning_rate": 0.000140846414540564,
+ "loss": 2.7354,
+ "step": 1663
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.44089178143403496,
+ "learning_rate": 0.00014033009983067452,
+ "loss": 2.8641,
+ "step": 1664
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.42913940369426196,
+ "learning_rate": 0.00013981457868671927,
+ "loss": 2.8609,
+ "step": 1665
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.4482482950515447,
+ "learning_rate": 0.0001392998522461305,
+ "loss": 2.8366,
+ "step": 1666
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.46214491582123696,
+ "learning_rate": 0.00013878592164458635,
+ "loss": 2.8034,
+ "step": 1667
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.4593625858446614,
+ "learning_rate": 0.00013827278801600978,
+ "loss": 2.7695,
+ "step": 1668
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.5609400344763316,
+ "learning_rate": 0.0001377604524925647,
+ "loss": 2.8973,
+ "step": 1669
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.46593006802096193,
+ "learning_rate": 0.00013724891620465424,
+ "loss": 2.7716,
+ "step": 1670
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.5021359042985811,
+ "learning_rate": 0.0001367381802809185,
+ "loss": 2.7291,
+ "step": 1671
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.4432888288658072,
+ "learning_rate": 0.00013622824584823113,
+ "loss": 2.7789,
+ "step": 1672
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.49382076529196034,
+ "learning_rate": 0.00013571911403169795,
+ "loss": 2.8012,
+ "step": 1673
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.4387504966078737,
+ "learning_rate": 0.0001352107859546533,
+ "loss": 2.7485,
+ "step": 1674
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.47298915010495457,
+ "learning_rate": 0.00013470326273865886,
+ "loss": 2.8563,
+ "step": 1675
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.4761813652679637,
+ "learning_rate": 0.00013419654550349985,
+ "loss": 2.8246,
+ "step": 1676
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.4342911153570488,
+ "learning_rate": 0.00013369063536718346,
+ "loss": 2.7928,
+ "step": 1677
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.5829340970119365,
+ "learning_rate": 0.00013318553344593632,
+ "loss": 2.7479,
+ "step": 1678
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.5565563940746442,
+ "learning_rate": 0.00013268124085420136,
+ "loss": 2.8591,
+ "step": 1679
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.47016131372043907,
+ "learning_rate": 0.0001321777587046364,
+ "loss": 2.798,
+ "step": 1680
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.5495626974042576,
+ "learning_rate": 0.00013167508810811059,
+ "loss": 2.8199,
+ "step": 1681
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.594564199746637,
+ "learning_rate": 0.0001311732301737029,
+ "loss": 2.7742,
+ "step": 1682
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.5481849519158465,
+ "learning_rate": 0.0001306721860086991,
+ "loss": 2.8054,
+ "step": 1683
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.4350419082101748,
+ "learning_rate": 0.00013017195671858928,
+ "loss": 2.7923,
+ "step": 1684
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.4631732757883125,
+ "learning_rate": 0.0001296725434070661,
+ "loss": 2.8378,
+ "step": 1685
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.48507449241583356,
+ "learning_rate": 0.00012917394717602121,
+ "loss": 2.8328,
+ "step": 1686
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.5679399448309966,
+ "learning_rate": 0.00012867616912554426,
+ "loss": 2.8797,
+ "step": 1687
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.5609085150274051,
+ "learning_rate": 0.00012817921035391882,
+ "loss": 2.8248,
+ "step": 1688
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.43727739622254547,
+ "learning_rate": 0.00012768307195762168,
+ "loss": 2.9011,
+ "step": 1689
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.4637360975787124,
+ "learning_rate": 0.00012718775503131908,
+ "loss": 2.7482,
+ "step": 1690
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.4902871639145194,
+ "learning_rate": 0.0001266932606678646,
+ "loss": 2.811,
+ "step": 1691
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.4515796820693117,
+ "learning_rate": 0.00012619958995829756,
+ "loss": 2.7938,
+ "step": 1692
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.4864943603344407,
+ "learning_rate": 0.0001257067439918394,
+ "loss": 2.7558,
+ "step": 1693
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.41896450763885373,
+ "learning_rate": 0.00012521472385589234,
+ "loss": 2.774,
+ "step": 1694
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.4371268726053108,
+ "learning_rate": 0.00012472353063603626,
+ "loss": 2.7058,
+ "step": 1695
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.4619373888755256,
+ "learning_rate": 0.0001242331654160263,
+ "loss": 2.869,
+ "step": 1696
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.45022010224258774,
+ "learning_rate": 0.0001237436292777914,
+ "loss": 2.8865,
+ "step": 1697
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.4622598106329338,
+ "learning_rate": 0.00012325492330143061,
+ "loss": 2.8311,
+ "step": 1698
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.47512329293722894,
+ "learning_rate": 0.00012276704856521175,
+ "loss": 2.7867,
+ "step": 1699
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.4723632432370962,
+ "learning_rate": 0.00012228000614556816,
+ "loss": 2.9297,
+ "step": 1700
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.4525632859793654,
+ "learning_rate": 0.00012179379711709738,
+ "loss": 2.8893,
+ "step": 1701
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.4144142394495729,
+ "learning_rate": 0.0001213084225525577,
+ "loss": 2.79,
+ "step": 1702
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.4267153108397144,
+ "learning_rate": 0.00012082388352286627,
+ "loss": 2.8476,
+ "step": 1703
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.4414656549212938,
+ "learning_rate": 0.00012034018109709716,
+ "loss": 2.7714,
+ "step": 1704
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.4409069735361653,
+ "learning_rate": 0.00011985731634247809,
+ "loss": 2.7794,
+ "step": 1705
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.4892575756577339,
+ "learning_rate": 0.00011937529032438904,
+ "loss": 2.9111,
+ "step": 1706
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.4543049107061881,
+ "learning_rate": 0.00011889410410635887,
+ "loss": 2.7716,
+ "step": 1707
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.472712152772135,
+ "learning_rate": 0.0001184137587500641,
+ "loss": 2.8086,
+ "step": 1708
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.48896412341299267,
+ "learning_rate": 0.00011793425531532564,
+ "loss": 2.7344,
+ "step": 1709
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.48296230056815137,
+ "learning_rate": 0.00011745559486010671,
+ "loss": 2.8351,
+ "step": 1710
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.4605371616824428,
+ "learning_rate": 0.00011697777844051105,
+ "loss": 2.8429,
+ "step": 1711
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.450909043461953,
+ "learning_rate": 0.00011650080711077964,
+ "loss": 2.9372,
+ "step": 1712
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.4599959761352405,
+ "learning_rate": 0.00011602468192328936,
+ "loss": 2.7129,
+ "step": 1713
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.4356870784416878,
+ "learning_rate": 0.00011554940392854973,
+ "loss": 2.7683,
+ "step": 1714
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.474528664981417,
+ "learning_rate": 0.00011507497417520146,
+ "loss": 2.8486,
+ "step": 1715
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.41437494595648533,
+ "learning_rate": 0.00011460139371001339,
+ "loss": 2.7569,
+ "step": 1716
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.4652993744516826,
+ "learning_rate": 0.00011412866357788049,
+ "loss": 2.7844,
+ "step": 1717
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.4482079409209579,
+ "learning_rate": 0.00011365678482182207,
+ "loss": 2.8118,
+ "step": 1718
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.4611147426697049,
+ "learning_rate": 0.0001131857584829783,
+ "loss": 2.8501,
+ "step": 1719
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.4582942262061266,
+ "learning_rate": 0.0001127155856006093,
+ "loss": 2.7903,
+ "step": 1720
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.4646178330046188,
+ "learning_rate": 0.00011224626721209141,
+ "loss": 2.8275,
+ "step": 1721
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.4524515385556378,
+ "learning_rate": 0.0001117778043529164,
+ "loss": 2.9021,
+ "step": 1722
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.4265413668352775,
+ "learning_rate": 0.0001113101980566879,
+ "loss": 2.7713,
+ "step": 1723
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.44466267123093,
+ "learning_rate": 0.00011084344935511958,
+ "loss": 2.9057,
+ "step": 1724
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.4683911625199972,
+ "learning_rate": 0.00011037755927803345,
+ "loss": 2.6683,
+ "step": 1725
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.45658069482771674,
+ "learning_rate": 0.00010991252885335651,
+ "loss": 2.8608,
+ "step": 1726
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.46167795538279816,
+ "learning_rate": 0.00010944835910711958,
+ "loss": 2.8399,
+ "step": 1727
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.4270097434730215,
+ "learning_rate": 0.00010898505106345396,
+ "loss": 2.6576,
+ "step": 1728
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.45154665551841433,
+ "learning_rate": 0.00010852260574459022,
+ "loss": 2.8335,
+ "step": 1729
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.449223632792442,
+ "learning_rate": 0.00010806102417085512,
+ "loss": 2.7148,
+ "step": 1730
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.47664878905687347,
+ "learning_rate": 0.00010760030736066951,
+ "loss": 2.7642,
+ "step": 1731
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.4762364063254877,
+ "learning_rate": 0.00010714045633054687,
+ "loss": 2.7876,
+ "step": 1732
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.46424534369409126,
+ "learning_rate": 0.00010668147209508971,
+ "loss": 2.8251,
+ "step": 1733
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.4713335083609869,
+ "learning_rate": 0.00010622335566698877,
+ "loss": 2.8347,
+ "step": 1734
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.45883242607389363,
+ "learning_rate": 0.00010576610805701942,
+ "loss": 2.847,
+ "step": 1735
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.43356630659594364,
+ "learning_rate": 0.00010530973027404073,
+ "loss": 2.8327,
+ "step": 1736
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.4538114172941742,
+ "learning_rate": 0.00010485422332499212,
+ "loss": 2.8096,
+ "step": 1737
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.44886026740196666,
+ "learning_rate": 0.00010439958821489165,
+ "loss": 2.7444,
+ "step": 1738
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.48772921431352306,
+ "learning_rate": 0.00010394582594683428,
+ "loss": 2.803,
+ "step": 1739
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.4457456184062026,
+ "learning_rate": 0.0001034929375219884,
+ "loss": 2.7722,
+ "step": 1740
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.4740856120669514,
+ "learning_rate": 0.00010304092393959514,
+ "loss": 2.8132,
+ "step": 1741
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.4357054576647068,
+ "learning_rate": 0.00010258978619696468,
+ "loss": 2.7666,
+ "step": 1742
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.4535580464524673,
+ "learning_rate": 0.00010213952528947551,
+ "loss": 2.7815,
+ "step": 1743
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.4819708342836,
+ "learning_rate": 0.00010169014221057089,
+ "loss": 2.8675,
+ "step": 1744
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.49376659428377645,
+ "learning_rate": 0.00010124163795175734,
+ "loss": 2.6408,
+ "step": 1745
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.4909115811666675,
+ "learning_rate": 0.00010079401350260287,
+ "loss": 2.8368,
+ "step": 1746
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.46573605255807177,
+ "learning_rate": 0.00010034726985073362,
+ "loss": 2.8699,
+ "step": 1747
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.46367981884768944,
+ "learning_rate": 9.9901407981833e-05,
+ "loss": 2.8907,
+ "step": 1748
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.48246645320281606,
+ "learning_rate": 9.94564288796384e-05,
+ "loss": 2.7218,
+ "step": 1749
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.46284901610133045,
+ "learning_rate": 9.901233352593953e-05,
+ "loss": 2.8225,
+ "step": 1750
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.4739424280043419,
+ "learning_rate": 9.856912290057668e-05,
+ "loss": 2.7687,
+ "step": 1751
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.49525024160583986,
+ "learning_rate": 9.812679798143748e-05,
+ "loss": 2.6696,
+ "step": 1752
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.4814362259098244,
+ "learning_rate": 9.768535974445586e-05,
+ "loss": 2.7704,
+ "step": 1753
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.4971972006239607,
+ "learning_rate": 9.724480916360906e-05,
+ "loss": 2.8264,
+ "step": 1754
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.4522362108521222,
+ "learning_rate": 9.68051472109162e-05,
+ "loss": 2.7744,
+ "step": 1755
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.48617291505166516,
+ "learning_rate": 9.636637485643529e-05,
+ "loss": 2.8136,
+ "step": 1756
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.522089562714972,
+ "learning_rate": 9.592849306826174e-05,
+ "loss": 2.8566,
+ "step": 1757
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.47118634038386026,
+ "learning_rate": 9.549150281252633e-05,
+ "loss": 2.8676,
+ "step": 1758
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.4697676365450015,
+ "learning_rate": 9.505540505339223e-05,
+ "loss": 2.7314,
+ "step": 1759
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.4379956008651422,
+ "learning_rate": 9.4620200753054e-05,
+ "loss": 2.7882,
+ "step": 1760
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.4488610357806196,
+ "learning_rate": 9.418589087173441e-05,
+ "loss": 2.8088,
+ "step": 1761
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.45177804039987224,
+ "learning_rate": 9.375247636768325e-05,
+ "loss": 2.8298,
+ "step": 1762
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.43684038511076656,
+ "learning_rate": 9.331995819717443e-05,
+ "loss": 2.8751,
+ "step": 1763
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.44997620803997396,
+ "learning_rate": 9.288833731450419e-05,
+ "loss": 2.8728,
+ "step": 1764
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.48759132029597696,
+ "learning_rate": 9.245761467198948e-05,
+ "loss": 2.8439,
+ "step": 1765
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.49881149929435403,
+ "learning_rate": 9.20277912199648e-05,
+ "loss": 2.7145,
+ "step": 1766
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.4310392407119481,
+ "learning_rate": 9.159886790678123e-05,
+ "loss": 2.7769,
+ "step": 1767
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.4253262998188575,
+ "learning_rate": 9.11708456788033e-05,
+ "loss": 2.7874,
+ "step": 1768
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.4368632754090294,
+ "learning_rate": 9.074372548040793e-05,
+ "loss": 2.9097,
+ "step": 1769
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.4661268397133449,
+ "learning_rate": 9.031750825398145e-05,
+ "loss": 2.8452,
+ "step": 1770
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.4845128947944187,
+ "learning_rate": 8.98921949399179e-05,
+ "loss": 2.8286,
+ "step": 1771
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.4611721249245403,
+ "learning_rate": 8.94677864766173e-05,
+ "loss": 2.8874,
+ "step": 1772
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.4444985619055832,
+ "learning_rate": 8.904428380048269e-05,
+ "loss": 2.7885,
+ "step": 1773
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.4632154549585779,
+ "learning_rate": 8.862168784591929e-05,
+ "loss": 2.748,
+ "step": 1774
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.4636342015978502,
+ "learning_rate": 8.819999954533115e-05,
+ "loss": 2.7895,
+ "step": 1775
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.5012118512780429,
+ "learning_rate": 8.777921982911996e-05,
+ "loss": 2.8547,
+ "step": 1776
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.4718778112190392,
+ "learning_rate": 8.735934962568253e-05,
+ "loss": 2.8617,
+ "step": 1777
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.45948080215646764,
+ "learning_rate": 8.694038986140945e-05,
+ "loss": 2.9122,
+ "step": 1778
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.4554456062447432,
+ "learning_rate": 8.652234146068206e-05,
+ "loss": 2.7492,
+ "step": 1779
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.4633838353834484,
+ "learning_rate": 8.610520534587086e-05,
+ "loss": 2.7274,
+ "step": 1780
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.49027443549126076,
+ "learning_rate": 8.568898243733397e-05,
+ "loss": 2.7432,
+ "step": 1781
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.4831859853876445,
+ "learning_rate": 8.527367365341409e-05,
+ "loss": 2.8607,
+ "step": 1782
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.48568761776411196,
+ "learning_rate": 8.485927991043757e-05,
+ "loss": 2.83,
+ "step": 1783
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.4249937623623898,
+ "learning_rate": 8.444580212271125e-05,
+ "loss": 2.8637,
+ "step": 1784
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.4172031538393696,
+ "learning_rate": 8.403324120252159e-05,
+ "loss": 2.7604,
+ "step": 1785
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.4838406528126884,
+ "learning_rate": 8.362159806013175e-05,
+ "loss": 2.8181,
+ "step": 1786
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.4604598309407954,
+ "learning_rate": 8.321087360377988e-05,
+ "loss": 2.8779,
+ "step": 1787
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.4076016896164628,
+ "learning_rate": 8.280106873967752e-05,
+ "loss": 2.8618,
+ "step": 1788
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.4699743537637191,
+ "learning_rate": 8.239218437200679e-05,
+ "loss": 2.7191,
+ "step": 1789
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.43945552427908474,
+ "learning_rate": 8.198422140291939e-05,
+ "loss": 2.7892,
+ "step": 1790
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.43867354140841985,
+ "learning_rate": 8.157718073253351e-05,
+ "loss": 2.8172,
+ "step": 1791
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.4672023144883624,
+ "learning_rate": 8.117106325893287e-05,
+ "loss": 2.7869,
+ "step": 1792
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.4149199915230677,
+ "learning_rate": 8.076586987816404e-05,
+ "loss": 2.7962,
+ "step": 1793
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.4433902469649084,
+ "learning_rate": 8.036160148423449e-05,
+ "loss": 2.7228,
+ "step": 1794
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.4521637463977531,
+ "learning_rate": 7.995825896911141e-05,
+ "loss": 2.7693,
+ "step": 1795
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.4465103851083654,
+ "learning_rate": 7.955584322271853e-05,
+ "loss": 2.8475,
+ "step": 1796
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.4407457164592422,
+ "learning_rate": 7.915435513293523e-05,
+ "loss": 2.7287,
+ "step": 1797
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.4166836660923295,
+ "learning_rate": 7.875379558559387e-05,
+ "loss": 2.8107,
+ "step": 1798
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.4840573129286454,
+ "learning_rate": 7.835416546447838e-05,
+ "loss": 2.827,
+ "step": 1799
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.4503225326954538,
+ "learning_rate": 7.795546565132167e-05,
+ "loss": 2.7577,
+ "step": 1800
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.45711415228721297,
+ "learning_rate": 7.755769702580412e-05,
+ "loss": 2.8672,
+ "step": 1801
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.45992237661386826,
+ "learning_rate": 7.716086046555193e-05,
+ "loss": 2.8001,
+ "step": 1802
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.4650265493015173,
+ "learning_rate": 7.676495684613432e-05,
+ "loss": 2.8615,
+ "step": 1803
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.4603174332362577,
+ "learning_rate": 7.636998704106252e-05,
+ "loss": 2.8338,
+ "step": 1804
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.5286634737322016,
+ "learning_rate": 7.597595192178702e-05,
+ "loss": 2.8103,
+ "step": 1805
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.4588612951971358,
+ "learning_rate": 7.558285235769646e-05,
+ "loss": 2.7997,
+ "step": 1806
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.45204608995511053,
+ "learning_rate": 7.519068921611494e-05,
+ "loss": 2.8327,
+ "step": 1807
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.435875934019387,
+ "learning_rate": 7.479946336230047e-05,
+ "loss": 2.7986,
+ "step": 1808
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.4446640755556926,
+ "learning_rate": 7.440917565944349e-05,
+ "loss": 2.7003,
+ "step": 1809
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.4434126501132901,
+ "learning_rate": 7.4019826968664e-05,
+ "loss": 2.7119,
+ "step": 1810
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.4164455732553954,
+ "learning_rate": 7.363141814901053e-05,
+ "loss": 2.7376,
+ "step": 1811
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.5013380443446551,
+ "learning_rate": 7.32439500574577e-05,
+ "loss": 2.7254,
+ "step": 1812
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.47862850762664866,
+ "learning_rate": 7.285742354890473e-05,
+ "loss": 2.7742,
+ "step": 1813
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.44738726612632534,
+ "learning_rate": 7.247183947617325e-05,
+ "loss": 2.8454,
+ "step": 1814
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.48494863101073743,
+ "learning_rate": 7.20871986900053e-05,
+ "loss": 2.8104,
+ "step": 1815
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.4345544626661981,
+ "learning_rate": 7.170350203906218e-05,
+ "loss": 2.7785,
+ "step": 1816
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.4404229642826438,
+ "learning_rate": 7.132075036992158e-05,
+ "loss": 2.7628,
+ "step": 1817
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.4746894602676435,
+ "learning_rate": 7.093894452707666e-05,
+ "loss": 2.8024,
+ "step": 1818
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.4502579365288954,
+ "learning_rate": 7.055808535293334e-05,
+ "loss": 2.8196,
+ "step": 1819
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.43085462919989886,
+ "learning_rate": 7.017817368780888e-05,
+ "loss": 2.7898,
+ "step": 1820
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.4419090165878023,
+ "learning_rate": 6.979921036993042e-05,
+ "loss": 2.6909,
+ "step": 1821
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.4733744132394077,
+ "learning_rate": 6.942119623543202e-05,
+ "loss": 2.8606,
+ "step": 1822
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.42710806902540244,
+ "learning_rate": 6.904413211835414e-05,
+ "loss": 2.7495,
+ "step": 1823
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.47825566092083166,
+ "learning_rate": 6.866801885064056e-05,
+ "loss": 2.8165,
+ "step": 1824
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.44436622132601106,
+ "learning_rate": 6.829285726213769e-05,
+ "loss": 2.7965,
+ "step": 1825
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.4629315671476937,
+ "learning_rate": 6.79186481805918e-05,
+ "loss": 2.7995,
+ "step": 1826
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.45400486587607414,
+ "learning_rate": 6.754539243164754e-05,
+ "loss": 2.7091,
+ "step": 1827
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.48634126300880165,
+ "learning_rate": 6.717309083884654e-05,
+ "loss": 2.8651,
+ "step": 1828
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.46752519780047797,
+ "learning_rate": 6.680174422362468e-05,
+ "loss": 2.8461,
+ "step": 1829
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.42047161833760416,
+ "learning_rate": 6.643135340531136e-05,
+ "loss": 2.8043,
+ "step": 1830
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.40960834385267164,
+ "learning_rate": 6.606191920112664e-05,
+ "loss": 2.8033,
+ "step": 1831
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.45896309447541594,
+ "learning_rate": 6.569344242618036e-05,
+ "loss": 2.8009,
+ "step": 1832
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.4581239276138602,
+ "learning_rate": 6.532592389346958e-05,
+ "loss": 2.7646,
+ "step": 1833
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.47273990966066515,
+ "learning_rate": 6.495936441387713e-05,
+ "loss": 2.7862,
+ "step": 1834
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.5073883075076153,
+ "learning_rate": 6.459376479617013e-05,
+ "loss": 2.8261,
+ "step": 1835
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.5490595847162782,
+ "learning_rate": 6.422912584699752e-05,
+ "loss": 2.8014,
+ "step": 1836
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.4213992032172031,
+ "learning_rate": 6.386544837088904e-05,
+ "loss": 2.8299,
+ "step": 1837
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.43408772214885816,
+ "learning_rate": 6.350273317025251e-05,
+ "loss": 2.7032,
+ "step": 1838
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.48600777720083826,
+ "learning_rate": 6.314098104537324e-05,
+ "loss": 2.7665,
+ "step": 1839
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.4714451962157142,
+ "learning_rate": 6.278019279441122e-05,
+ "loss": 2.84,
+ "step": 1840
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.4428932143081485,
+ "learning_rate": 6.242036921339972e-05,
+ "loss": 2.729,
+ "step": 1841
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.4340900249705281,
+ "learning_rate": 6.206151109624402e-05,
+ "loss": 2.7235,
+ "step": 1842
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.4781055141469737,
+ "learning_rate": 6.170361923471868e-05,
+ "loss": 2.7651,
+ "step": 1843
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.45054022623172396,
+ "learning_rate": 6.134669441846691e-05,
+ "loss": 2.8023,
+ "step": 1844
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.44389524551587356,
+ "learning_rate": 6.099073743499772e-05,
+ "loss": 2.8385,
+ "step": 1845
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.448327653873566,
+ "learning_rate": 6.063574906968511e-05,
+ "loss": 2.8763,
+ "step": 1846
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.46701265709871564,
+ "learning_rate": 6.028173010576582e-05,
+ "loss": 2.771,
+ "step": 1847
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.4719560827870164,
+ "learning_rate": 5.9928681324337544e-05,
+ "loss": 2.6327,
+ "step": 1848
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.44109675600728626,
+ "learning_rate": 5.957660350435773e-05,
+ "loss": 2.8195,
+ "step": 1849
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.46196007401754124,
+ "learning_rate": 5.922549742264122e-05,
+ "loss": 2.6556,
+ "step": 1850
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.44472709660912657,
+ "learning_rate": 5.8875363853859166e-05,
+ "loss": 2.8139,
+ "step": 1851
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.4679883797685744,
+ "learning_rate": 5.852620357053651e-05,
+ "loss": 2.7671,
+ "step": 1852
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.45570196320313555,
+ "learning_rate": 5.8178017343051336e-05,
+ "loss": 2.7814,
+ "step": 1853
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.48623161550418414,
+ "learning_rate": 5.783080593963219e-05,
+ "loss": 2.8587,
+ "step": 1854
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.436860752868839,
+ "learning_rate": 5.748457012635683e-05,
+ "loss": 2.7226,
+ "step": 1855
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.47870428075908117,
+ "learning_rate": 5.713931066715078e-05,
+ "loss": 2.7601,
+ "step": 1856
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.4715229124291041,
+ "learning_rate": 5.679502832378497e-05,
+ "loss": 2.7906,
+ "step": 1857
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.4761382659598743,
+ "learning_rate": 5.645172385587482e-05,
+ "loss": 2.7362,
+ "step": 1858
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.4873623334280704,
+ "learning_rate": 5.6109398020877834e-05,
+ "loss": 2.8606,
+ "step": 1859
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.4670310574926615,
+ "learning_rate": 5.576805157409265e-05,
+ "loss": 2.8383,
+ "step": 1860
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.4567692179466217,
+ "learning_rate": 5.542768526865677e-05,
+ "loss": 2.8089,
+ "step": 1861
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.4315874195858735,
+ "learning_rate": 5.508829985554509e-05,
+ "loss": 2.7656,
+ "step": 1862
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.4972001729453731,
+ "learning_rate": 5.474989608356856e-05,
+ "loss": 2.8653,
+ "step": 1863
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.4760906749717741,
+ "learning_rate": 5.441247469937194e-05,
+ "loss": 2.8071,
+ "step": 1864
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.48010751291022025,
+ "learning_rate": 5.407603644743286e-05,
+ "loss": 2.8201,
+ "step": 1865
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.483387404765074,
+ "learning_rate": 5.374058207005944e-05,
+ "loss": 2.7711,
+ "step": 1866
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.4434259018554659,
+ "learning_rate": 5.3406112307389066e-05,
+ "loss": 2.8023,
+ "step": 1867
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.4909802314773494,
+ "learning_rate": 5.3072627897386926e-05,
+ "loss": 2.8897,
+ "step": 1868
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.48423892198770335,
+ "learning_rate": 5.27401295758439e-05,
+ "loss": 2.9216,
+ "step": 1869
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.4883071009528147,
+ "learning_rate": 5.2408618076375315e-05,
+ "loss": 2.77,
+ "step": 1870
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.45423370016731535,
+ "learning_rate": 5.207809413041914e-05,
+ "loss": 2.8259,
+ "step": 1871
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.5110150107951352,
+ "learning_rate": 5.174855846723459e-05,
+ "loss": 2.6572,
+ "step": 1872
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.519087478894957,
+ "learning_rate": 5.1420011813900104e-05,
+ "loss": 2.8154,
+ "step": 1873
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.43647072954413296,
+ "learning_rate": 5.109245489531211e-05,
+ "loss": 2.8123,
+ "step": 1874
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.47595617077857805,
+ "learning_rate": 5.0765888434183446e-05,
+ "loss": 2.7178,
+ "step": 1875
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.48470676342228974,
+ "learning_rate": 5.0440313151041364e-05,
+ "loss": 2.7874,
+ "step": 1876
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.4819286670718033,
+ "learning_rate": 5.011572976422657e-05,
+ "loss": 2.7501,
+ "step": 1877
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.5245544303009066,
+ "learning_rate": 4.9792138989890825e-05,
+ "loss": 2.9112,
+ "step": 1878
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.481122122909712,
+ "learning_rate": 4.9469541541996234e-05,
+ "loss": 2.7851,
+ "step": 1879
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.44552125561656075,
+ "learning_rate": 4.914793813231305e-05,
+ "loss": 2.739,
+ "step": 1880
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.47839513558440216,
+ "learning_rate": 4.882732947041818e-05,
+ "loss": 2.8428,
+ "step": 1881
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.4632839657324371,
+ "learning_rate": 4.850771626369416e-05,
+ "loss": 2.7619,
+ "step": 1882
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.4865062660367911,
+ "learning_rate": 4.818909921732662e-05,
+ "loss": 2.8029,
+ "step": 1883
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.4458382521948856,
+ "learning_rate": 4.787147903430383e-05,
+ "loss": 2.8261,
+ "step": 1884
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.46794706916274564,
+ "learning_rate": 4.755485641541424e-05,
+ "loss": 2.8034,
+ "step": 1885
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.5086109196212392,
+ "learning_rate": 4.723923205924557e-05,
+ "loss": 2.8335,
+ "step": 1886
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.5351998390702288,
+ "learning_rate": 4.6924606662182736e-05,
+ "loss": 2.7467,
+ "step": 1887
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.45818290342016676,
+ "learning_rate": 4.6610980918406596e-05,
+ "loss": 2.8747,
+ "step": 1888
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.4471592115105285,
+ "learning_rate": 4.629835551989276e-05,
+ "loss": 2.8359,
+ "step": 1889
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.47773519113577967,
+ "learning_rate": 4.5986731156409224e-05,
+ "loss": 2.7953,
+ "step": 1890
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.478516150402491,
+ "learning_rate": 4.567610851551568e-05,
+ "loss": 2.7876,
+ "step": 1891
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.46709284081096686,
+ "learning_rate": 4.536648828256146e-05,
+ "loss": 2.7747,
+ "step": 1892
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.4700521873765865,
+ "learning_rate": 4.505787114068433e-05,
+ "loss": 2.7402,
+ "step": 1893
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.48450573118493645,
+ "learning_rate": 4.4750257770808764e-05,
+ "loss": 2.7911,
+ "step": 1894
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.46067986332118094,
+ "learning_rate": 4.444364885164448e-05,
+ "loss": 2.7929,
+ "step": 1895
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.4570660354688549,
+ "learning_rate": 4.413804505968533e-05,
+ "loss": 2.8485,
+ "step": 1896
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.4738788992234037,
+ "learning_rate": 4.3833447069206944e-05,
+ "loss": 2.7715,
+ "step": 1897
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.4068239189930094,
+ "learning_rate": 4.352985555226635e-05,
+ "loss": 2.7374,
+ "step": 1898
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.477890508248255,
+ "learning_rate": 4.322727117869951e-05,
+ "loss": 2.8273,
+ "step": 1899
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.4353867321477917,
+ "learning_rate": 4.29256946161205e-05,
+ "loss": 2.7818,
+ "step": 1900
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.41274745845030913,
+ "learning_rate": 4.262512652991968e-05,
+ "loss": 2.7959,
+ "step": 1901
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.46820093469310914,
+ "learning_rate": 4.2325567583262113e-05,
+ "loss": 2.7843,
+ "step": 1902
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.47703071837444616,
+ "learning_rate": 4.2027018437086895e-05,
+ "loss": 2.746,
+ "step": 1903
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.46063027776963866,
+ "learning_rate": 4.172947975010449e-05,
+ "loss": 2.7285,
+ "step": 1904
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.44391525001590143,
+ "learning_rate": 4.143295217879645e-05,
+ "loss": 2.7919,
+ "step": 1905
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.42375822300420607,
+ "learning_rate": 4.113743637741296e-05,
+ "loss": 2.7084,
+ "step": 1906
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.5293397796701194,
+ "learning_rate": 4.084293299797226e-05,
+ "loss": 2.7686,
+ "step": 1907
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.46490302682764184,
+ "learning_rate": 4.054944269025862e-05,
+ "loss": 2.725,
+ "step": 1908
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.4509254691346404,
+ "learning_rate": 4.025696610182095e-05,
+ "loss": 2.8627,
+ "step": 1909
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.4444446483134963,
+ "learning_rate": 3.996550387797187e-05,
+ "loss": 2.7163,
+ "step": 1910
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.47794126104731294,
+ "learning_rate": 3.9675056661785556e-05,
+ "loss": 2.8873,
+ "step": 1911
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.4717248369064989,
+ "learning_rate": 3.9385625094097154e-05,
+ "loss": 2.7357,
+ "step": 1912
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.4744433738312639,
+ "learning_rate": 3.909720981350034e-05,
+ "loss": 2.829,
+ "step": 1913
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.4841709842323799,
+ "learning_rate": 3.880981145634704e-05,
+ "loss": 2.7899,
+ "step": 1914
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.5068611150135629,
+ "learning_rate": 3.852343065674507e-05,
+ "loss": 2.9094,
+ "step": 1915
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.4599030184814247,
+ "learning_rate": 3.8238068046557276e-05,
+ "loss": 2.8175,
+ "step": 1916
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.46059459216713716,
+ "learning_rate": 3.795372425540006e-05,
+ "loss": 2.8272,
+ "step": 1917
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.46238665839895854,
+ "learning_rate": 3.76703999106418e-05,
+ "loss": 2.7585,
+ "step": 1918
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.4727114813444034,
+ "learning_rate": 3.7388095637401754e-05,
+ "loss": 2.8117,
+ "step": 1919
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.44764476740669956,
+ "learning_rate": 3.7106812058548376e-05,
+ "loss": 2.8863,
+ "step": 1920
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.49233761219127414,
+ "learning_rate": 3.682654979469807e-05,
+ "loss": 2.7796,
+ "step": 1921
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.4925670676328931,
+ "learning_rate": 3.654730946421403e-05,
+ "loss": 2.8925,
+ "step": 1922
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.4527937081633335,
+ "learning_rate": 3.6269091683204466e-05,
+ "loss": 2.8186,
+ "step": 1923
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.512373753309031,
+ "learning_rate": 3.5991897065521693e-05,
+ "loss": 2.8894,
+ "step": 1924
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.4799219629222766,
+ "learning_rate": 3.571572622276026e-05,
+ "loss": 2.776,
+ "step": 1925
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.46982886583021805,
+ "learning_rate": 3.544057976425619e-05,
+ "loss": 2.7674,
+ "step": 1926
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.5016161243908822,
+ "learning_rate": 3.5166458297085146e-05,
+ "loss": 2.8071,
+ "step": 1927
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.4670588856155836,
+ "learning_rate": 3.489336242606111e-05,
+ "loss": 2.7942,
+ "step": 1928
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.46978812882417037,
+ "learning_rate": 3.462129275373577e-05,
+ "loss": 2.7836,
+ "step": 1929
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.49031073121178587,
+ "learning_rate": 3.4350249880395924e-05,
+ "loss": 2.8346,
+ "step": 1930
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.46813885006096706,
+ "learning_rate": 3.408023440406355e-05,
+ "loss": 2.8393,
+ "step": 1931
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.513620158114933,
+ "learning_rate": 3.381124692049331e-05,
+ "loss": 2.8688,
+ "step": 1932
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.470617426525453,
+ "learning_rate": 3.354328802317197e-05,
+ "loss": 2.8324,
+ "step": 1933
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.4763494251521051,
+ "learning_rate": 3.327635830331677e-05,
+ "loss": 2.7281,
+ "step": 1934
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.4677637033629361,
+ "learning_rate": 3.3010458349874206e-05,
+ "loss": 2.8705,
+ "step": 1935
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.4486540264164033,
+ "learning_rate": 3.2745588749518775e-05,
+ "loss": 2.7995,
+ "step": 1936
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.43798048194269157,
+ "learning_rate": 3.248175008665161e-05,
+ "loss": 2.8363,
+ "step": 1937
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.5139936474871454,
+ "learning_rate": 3.221894294339911e-05,
+ "loss": 2.8178,
+ "step": 1938
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.48052490834654454,
+ "learning_rate": 3.1957167899611836e-05,
+ "loss": 2.8022,
+ "step": 1939
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.49548187379562525,
+ "learning_rate": 3.169642553286334e-05,
+ "loss": 2.7844,
+ "step": 1940
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.4567819304315379,
+ "learning_rate": 3.143671641844831e-05,
+ "loss": 2.7201,
+ "step": 1941
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.493941659094146,
+ "learning_rate": 3.117804112938205e-05,
+ "loss": 2.6796,
+ "step": 1942
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.48973294320212085,
+ "learning_rate": 3.092040023639869e-05,
+ "loss": 2.7465,
+ "step": 1943
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.4721840901751396,
+ "learning_rate": 3.066379430795002e-05,
+ "loss": 2.7534,
+ "step": 1944
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.44276031810246547,
+ "learning_rate": 3.040822391020459e-05,
+ "loss": 2.7101,
+ "step": 1945
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.4625945857286168,
+ "learning_rate": 3.0153689607045842e-05,
+ "loss": 2.8048,
+ "step": 1946
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.5144344989377134,
+ "learning_rate": 2.9900191960071545e-05,
+ "loss": 2.8394,
+ "step": 1947
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.5075603546288441,
+ "learning_rate": 2.9647731528591848e-05,
+ "loss": 2.8109,
+ "step": 1948
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.5024601703570066,
+ "learning_rate": 2.9396308869628795e-05,
+ "loss": 2.7544,
+ "step": 1949
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.47190929599283804,
+ "learning_rate": 2.914592453791448e-05,
+ "loss": 2.8103,
+ "step": 1950
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.46419782465262044,
+ "learning_rate": 2.8896579085889994e-05,
+ "loss": 2.9013,
+ "step": 1951
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.4765952215823409,
+ "learning_rate": 2.86482730637046e-05,
+ "loss": 2.8603,
+ "step": 1952
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.4981999686736596,
+ "learning_rate": 2.840100701921383e-05,
+ "loss": 2.7443,
+ "step": 1953
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.442265642902609,
+ "learning_rate": 2.8154781497978898e-05,
+ "loss": 2.855,
+ "step": 1954
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.45860907652489324,
+ "learning_rate": 2.7909597043265013e-05,
+ "loss": 2.8404,
+ "step": 1955
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.4414495320589354,
+ "learning_rate": 2.7665454196040662e-05,
+ "loss": 2.6318,
+ "step": 1956
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.4722971002112999,
+ "learning_rate": 2.7422353494975905e-05,
+ "loss": 2.7593,
+ "step": 1957
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.4535287700632903,
+ "learning_rate": 2.7180295476441573e-05,
+ "loss": 2.7287,
+ "step": 1958
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.43062553576734897,
+ "learning_rate": 2.6939280674508016e-05,
+ "loss": 2.8223,
+ "step": 1959
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.4992011784198708,
+ "learning_rate": 2.669930962094358e-05,
+ "loss": 2.7553,
+ "step": 1960
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.454487335731581,
+ "learning_rate": 2.6460382845214126e-05,
+ "loss": 2.8955,
+ "step": 1961
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.47620845008826995,
+ "learning_rate": 2.6222500874481025e-05,
+ "loss": 2.7565,
+ "step": 1962
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.45403431982890907,
+ "learning_rate": 2.5985664233600827e-05,
+ "loss": 2.8351,
+ "step": 1963
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.5147775673173017,
+ "learning_rate": 2.574987344512336e-05,
+ "loss": 2.8221,
+ "step": 1964
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.5148861087900919,
+ "learning_rate": 2.5515129029290984e-05,
+ "loss": 2.7442,
+ "step": 1965
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.48917251902764286,
+ "learning_rate": 2.5281431504037556e-05,
+ "loss": 2.8102,
+ "step": 1966
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.5183777786595527,
+ "learning_rate": 2.504878138498684e-05,
+ "loss": 2.7553,
+ "step": 1967
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.4801684290292703,
+ "learning_rate": 2.48171791854519e-05,
+ "loss": 2.9199,
+ "step": 1968
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.4821462841883277,
+ "learning_rate": 2.4586625416433473e-05,
+ "loss": 2.8171,
+ "step": 1969
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.45817722408329664,
+ "learning_rate": 2.435712058661921e-05,
+ "loss": 2.7872,
+ "step": 1970
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.44188627691752785,
+ "learning_rate": 2.4128665202382327e-05,
+ "loss": 2.7818,
+ "step": 1971
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.49939653333115447,
+ "learning_rate": 2.3901259767780515e-05,
+ "loss": 2.8506,
+ "step": 1972
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.4581521745661269,
+ "learning_rate": 2.367490478455514e-05,
+ "loss": 2.8073,
+ "step": 1973
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.470304038252465,
+ "learning_rate": 2.3449600752129597e-05,
+ "loss": 2.781,
+ "step": 1974
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.4858200595472055,
+ "learning_rate": 2.3225348167608685e-05,
+ "loss": 2.8069,
+ "step": 1975
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.42438304717192216,
+ "learning_rate": 2.3002147525777118e-05,
+ "loss": 2.724,
+ "step": 1976
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.6701500898998157,
+ "learning_rate": 2.2779999319098856e-05,
+ "loss": 2.8509,
+ "step": 1977
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.44321234259025716,
+ "learning_rate": 2.255890403771571e-05,
+ "loss": 2.7965,
+ "step": 1978
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.4853810888074618,
+ "learning_rate": 2.233886216944614e-05,
+ "loss": 2.7566,
+ "step": 1979
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.4678836881227967,
+ "learning_rate": 2.211987419978484e-05,
+ "loss": 2.8053,
+ "step": 1980
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.450650356897011,
+ "learning_rate": 2.1901940611900705e-05,
+ "loss": 2.8888,
+ "step": 1981
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.5221757775990122,
+ "learning_rate": 2.168506188663666e-05,
+ "loss": 2.7682,
+ "step": 1982
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.4872962253885538,
+ "learning_rate": 2.1469238502507925e-05,
+ "loss": 2.8187,
+ "step": 1983
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.4705314647827925,
+ "learning_rate": 2.125447093570154e-05,
+ "loss": 2.7999,
+ "step": 1984
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.46567033006606817,
+ "learning_rate": 2.1040759660074793e-05,
+ "loss": 2.7286,
+ "step": 1985
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.5651127990883319,
+ "learning_rate": 2.0828105147154273e-05,
+ "loss": 2.8119,
+ "step": 1986
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.45161468421943046,
+ "learning_rate": 2.061650786613545e-05,
+ "loss": 2.8103,
+ "step": 1987
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.4685633561899011,
+ "learning_rate": 2.040596828388058e-05,
+ "loss": 2.8264,
+ "step": 1988
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.4773316400336306,
+ "learning_rate": 2.019648686491865e-05,
+ "loss": 2.746,
+ "step": 1989
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.5088348899727118,
+ "learning_rate": 1.9988064071443767e-05,
+ "loss": 2.726,
+ "step": 1990
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.5040015176040803,
+ "learning_rate": 1.9780700363314253e-05,
+ "loss": 2.8457,
+ "step": 1991
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.4538033594866911,
+ "learning_rate": 1.957439619805196e-05,
+ "loss": 2.7185,
+ "step": 1992
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.44782491058440776,
+ "learning_rate": 1.9369152030840554e-05,
+ "loss": 2.7947,
+ "step": 1993
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.4394414917412733,
+ "learning_rate": 1.916496831452552e-05,
+ "loss": 2.841,
+ "step": 1994
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.4695546610831965,
+ "learning_rate": 1.8961845499611998e-05,
+ "loss": 2.8257,
+ "step": 1995
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.46025357205916845,
+ "learning_rate": 1.8759784034264925e-05,
+ "loss": 2.7033,
+ "step": 1996
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.4310208632282742,
+ "learning_rate": 1.855878436430708e-05,
+ "loss": 2.7816,
+ "step": 1997
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.4478732408812746,
+ "learning_rate": 1.835884693321871e-05,
+ "loss": 2.7961,
+ "step": 1998
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.4421373663305853,
+ "learning_rate": 1.8159972182136386e-05,
+ "loss": 2.7469,
+ "step": 1999
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.4612386559009226,
+ "learning_rate": 1.7962160549851945e-05,
+ "loss": 2.8127,
+ "step": 2000
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.5016836061927389,
+ "learning_rate": 1.776541247281177e-05,
+ "loss": 2.6931,
+ "step": 2001
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.44891079976996345,
+ "learning_rate": 1.7569728385115224e-05,
+ "loss": 2.7905,
+ "step": 2002
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.4862415603104597,
+ "learning_rate": 1.7375108718514665e-05,
+ "loss": 2.895,
+ "step": 2003
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.45790508708524574,
+ "learning_rate": 1.7181553902413438e-05,
+ "loss": 2.7822,
+ "step": 2004
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.4567660899871866,
+ "learning_rate": 1.698906436386577e-05,
+ "loss": 2.7159,
+ "step": 2005
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.4579128170551362,
+ "learning_rate": 1.679764052757532e-05,
+ "loss": 2.8115,
+ "step": 2006
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.4626866859769715,
+ "learning_rate": 1.6607282815894464e-05,
+ "loss": 2.923,
+ "step": 2007
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.4787593429515904,
+ "learning_rate": 1.6417991648823405e-05,
+ "loss": 2.8389,
+ "step": 2008
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.49190945126858177,
+ "learning_rate": 1.6229767444008835e-05,
+ "loss": 2.8306,
+ "step": 2009
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.4537104221530222,
+ "learning_rate": 1.604261061674378e-05,
+ "loss": 2.7965,
+ "step": 2010
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.48609552646337717,
+ "learning_rate": 1.5856521579965865e-05,
+ "loss": 2.8186,
+ "step": 2011
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.5074852168440219,
+ "learning_rate": 1.5671500744256938e-05,
+ "loss": 2.8123,
+ "step": 2012
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.46485569215684197,
+ "learning_rate": 1.5487548517841953e-05,
+ "loss": 2.798,
+ "step": 2013
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.5020029324722458,
+ "learning_rate": 1.530466530658814e-05,
+ "loss": 2.8424,
+ "step": 2014
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.4564780321705549,
+ "learning_rate": 1.5122851514004054e-05,
+ "loss": 2.774,
+ "step": 2015
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.4454540080917654,
+ "learning_rate": 1.4942107541238703e-05,
+ "loss": 2.8041,
+ "step": 2016
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.46540218950928874,
+ "learning_rate": 1.4762433787080809e-05,
+ "loss": 2.7892,
+ "step": 2017
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.432990906843607,
+ "learning_rate": 1.4583830647957541e-05,
+ "loss": 2.8084,
+ "step": 2018
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.5043464372985752,
+ "learning_rate": 1.4406298517934068e-05,
+ "loss": 2.8159,
+ "step": 2019
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.47077663775535517,
+ "learning_rate": 1.4229837788712562e-05,
+ "loss": 2.8267,
+ "step": 2020
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.47425311779563945,
+ "learning_rate": 1.4054448849631085e-05,
+ "loss": 2.7072,
+ "step": 2021
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.5100092882454986,
+ "learning_rate": 1.3880132087663145e-05,
+ "loss": 2.7991,
+ "step": 2022
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.4453963794289051,
+ "learning_rate": 1.3706887887416419e-05,
+ "loss": 2.8445,
+ "step": 2023
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.45265513093828225,
+ "learning_rate": 1.3534716631132316e-05,
+ "loss": 2.7363,
+ "step": 2024
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.4787350545307203,
+ "learning_rate": 1.3363618698684853e-05,
+ "loss": 2.7596,
+ "step": 2025
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.4145650883644428,
+ "learning_rate": 1.3193594467579728e-05,
+ "loss": 2.8245,
+ "step": 2026
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.44655211114948384,
+ "learning_rate": 1.3024644312954026e-05,
+ "loss": 2.787,
+ "step": 2027
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.458896678392866,
+ "learning_rate": 1.2856768607574564e-05,
+ "loss": 2.7468,
+ "step": 2028
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.452346485850069,
+ "learning_rate": 1.2689967721837947e-05,
+ "loss": 2.7915,
+ "step": 2029
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.4706049865111943,
+ "learning_rate": 1.2524242023769006e-05,
+ "loss": 2.7089,
+ "step": 2030
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.47117146634495244,
+ "learning_rate": 1.2359591879020526e-05,
+ "loss": 2.8364,
+ "step": 2031
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.4831525393630432,
+ "learning_rate": 1.2196017650872081e-05,
+ "loss": 2.8107,
+ "step": 2032
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.47114539851396003,
+ "learning_rate": 1.2033519700229367e-05,
+ "loss": 2.7668,
+ "step": 2033
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.45440880494451574,
+ "learning_rate": 1.1872098385623586e-05,
+ "loss": 2.7373,
+ "step": 2034
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.44288738546324935,
+ "learning_rate": 1.1711754063210289e-05,
+ "loss": 2.8058,
+ "step": 2035
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.4841211113174389,
+ "learning_rate": 1.155248708676887e-05,
+ "loss": 2.8097,
+ "step": 2036
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.4853645045705976,
+ "learning_rate": 1.1394297807701737e-05,
+ "loss": 2.733,
+ "step": 2037
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.4542728338861751,
+ "learning_rate": 1.1237186575033254e-05,
+ "loss": 2.8045,
+ "step": 2038
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.47893169907657707,
+ "learning_rate": 1.1081153735409522e-05,
+ "loss": 2.7793,
+ "step": 2039
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.44362359727759687,
+ "learning_rate": 1.0926199633097156e-05,
+ "loss": 2.7482,
+ "step": 2040
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.4454427382791054,
+ "learning_rate": 1.0772324609982787e-05,
+ "loss": 2.7293,
+ "step": 2041
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.43022103892222535,
+ "learning_rate": 1.0619529005571893e-05,
+ "loss": 2.7568,
+ "step": 2042
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.5059560897960033,
+ "learning_rate": 1.0467813156988748e-05,
+ "loss": 2.7625,
+ "step": 2043
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.4676097860348837,
+ "learning_rate": 1.0317177398975031e-05,
+ "loss": 2.8844,
+ "step": 2044
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.5021445696594896,
+ "learning_rate": 1.0167622063889326e-05,
+ "loss": 2.6942,
+ "step": 2045
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.47780505678852253,
+ "learning_rate": 1.0019147481706625e-05,
+ "loss": 2.7933,
+ "step": 2046
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.4720890351547827,
+ "learning_rate": 9.871753980017051e-06,
+ "loss": 2.8034,
+ "step": 2047
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.4856424928843303,
+ "learning_rate": 9.725441884025855e-06,
+ "loss": 2.7439,
+ "step": 2048
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.4736024903086179,
+ "learning_rate": 9.580211516551862e-06,
+ "loss": 2.813,
+ "step": 2049
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.4841094138822361,
+ "learning_rate": 9.436063198027589e-06,
+ "loss": 2.8396,
+ "step": 2050
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.4920581696669587,
+ "learning_rate": 9.292997246497959e-06,
+ "loss": 2.8822,
+ "step": 2051
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.5228107348726229,
+ "learning_rate": 9.151013977619693e-06,
+ "loss": 2.8705,
+ "step": 2052
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.4470364838226458,
+ "learning_rate": 9.010113704661038e-06,
+ "loss": 2.7169,
+ "step": 2053
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.46936047043073365,
+ "learning_rate": 8.870296738500316e-06,
+ "loss": 2.867,
+ "step": 2054
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.450006808643461,
+ "learning_rate": 8.731563387626095e-06,
+ "loss": 2.8004,
+ "step": 2055
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.4789680188993925,
+ "learning_rate": 8.59391395813569e-06,
+ "loss": 2.8102,
+ "step": 2056
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.42732438231896425,
+ "learning_rate": 8.457348753735328e-06,
+ "loss": 2.8046,
+ "step": 2057
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.44631080403625134,
+ "learning_rate": 8.321868075738593e-06,
+ "loss": 2.7695,
+ "step": 2058
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.46629998888195107,
+ "learning_rate": 8.187472223066371e-06,
+ "loss": 2.8161,
+ "step": 2059
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.4543475553812011,
+ "learning_rate": 8.054161492246136e-06,
+ "loss": 2.6853,
+ "step": 2060
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.4913157477220057,
+ "learning_rate": 7.921936177411049e-06,
+ "loss": 2.7405,
+ "step": 2061
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.48509873865743036,
+ "learning_rate": 7.790796570299463e-06,
+ "loss": 2.768,
+ "step": 2062
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.4590405644418585,
+ "learning_rate": 7.660742960254207e-06,
+ "loss": 2.7279,
+ "step": 2063
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.5247811017459154,
+ "learning_rate": 7.531775634222138e-06,
+ "loss": 2.7657,
+ "step": 2064
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.45717441315367896,
+ "learning_rate": 7.403894876753192e-06,
+ "loss": 2.7772,
+ "step": 2065
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.48075361400821537,
+ "learning_rate": 7.277100970000061e-06,
+ "loss": 2.8077,
+ "step": 2066
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.44700284313010125,
+ "learning_rate": 7.151394193717408e-06,
+ "loss": 2.793,
+ "step": 2067
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.49147655830494874,
+ "learning_rate": 7.026774825261151e-06,
+ "loss": 2.8699,
+ "step": 2068
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.45734570660675977,
+ "learning_rate": 6.903243139588233e-06,
+ "loss": 2.8503,
+ "step": 2069
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.463690533222173,
+ "learning_rate": 6.780799409255522e-06,
+ "loss": 2.8439,
+ "step": 2070
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.47873911932843277,
+ "learning_rate": 6.659443904419637e-06,
+ "loss": 2.8481,
+ "step": 2071
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.430584033192979,
+ "learning_rate": 6.539176892836008e-06,
+ "loss": 2.8482,
+ "step": 2072
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.4461122642981438,
+ "learning_rate": 6.4199986398585375e-06,
+ "loss": 2.7502,
+ "step": 2073
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.4926468634353465,
+ "learning_rate": 6.3019094084388884e-06,
+ "loss": 2.7716,
+ "step": 2074
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.4604122001543981,
+ "learning_rate": 6.18490945912592e-06,
+ "loss": 2.7906,
+ "step": 2075
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.4238013264400932,
+ "learning_rate": 6.068999050065249e-06,
+ "loss": 2.7953,
+ "step": 2076
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.4592468117000258,
+ "learning_rate": 5.9541784369983586e-06,
+ "loss": 2.6671,
+ "step": 2077
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.4509067002540054,
+ "learning_rate": 5.840447873262433e-06,
+ "loss": 2.7382,
+ "step": 2078
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.4733189347756639,
+ "learning_rate": 5.727807609789471e-06,
+ "loss": 2.7501,
+ "step": 2079
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.5052341986995316,
+ "learning_rate": 5.616257895105892e-06,
+ "loss": 2.817,
+ "step": 2080
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.47668839711376165,
+ "learning_rate": 5.505798975331933e-06,
+ "loss": 2.8105,
+ "step": 2081
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.44248654097328816,
+ "learning_rate": 5.396431094181198e-06,
+ "loss": 2.789,
+ "step": 2082
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.48070199659483964,
+ "learning_rate": 5.288154492960107e-06,
+ "loss": 2.8633,
+ "step": 2083
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.44850382546206996,
+ "learning_rate": 5.1809694105671155e-06,
+ "loss": 2.787,
+ "step": 2084
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.4379239775705147,
+ "learning_rate": 5.074876083492441e-06,
+ "loss": 2.7177,
+ "step": 2085
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.4857181159656042,
+ "learning_rate": 4.96987474581767e-06,
+ "loss": 2.7514,
+ "step": 2086
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.47197684614564156,
+ "learning_rate": 4.865965629214819e-06,
+ "loss": 2.8153,
+ "step": 2087
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.48393932038819054,
+ "learning_rate": 4.763148962946218e-06,
+ "loss": 2.6905,
+ "step": 2088
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.48610966967589436,
+ "learning_rate": 4.661424973863681e-06,
+ "loss": 2.7602,
+ "step": 2089
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.553048205668431,
+ "learning_rate": 4.560793886408398e-06,
+ "loss": 2.8479,
+ "step": 2090
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.45187448835288313,
+ "learning_rate": 4.461255922609986e-06,
+ "loss": 2.7745,
+ "step": 2091
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.4631367621720634,
+ "learning_rate": 4.362811302086267e-06,
+ "loss": 2.7969,
+ "step": 2092
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.45960380702274817,
+ "learning_rate": 4.265460242042885e-06,
+ "loss": 2.6522,
+ "step": 2093
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.4400208165169231,
+ "learning_rate": 4.169202957272522e-06,
+ "loss": 2.81,
+ "step": 2094
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.44154789683210316,
+ "learning_rate": 4.074039660154738e-06,
+ "loss": 2.8032,
+ "step": 2095
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.4746925886053807,
+ "learning_rate": 3.9799705606551325e-06,
+ "loss": 2.7665,
+ "step": 2096
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.46274953700322696,
+ "learning_rate": 3.886995866325294e-06,
+ "loss": 2.8399,
+ "step": 2097
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.4676260412022973,
+ "learning_rate": 3.795115782302072e-06,
+ "loss": 2.7399,
+ "step": 2098
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.4310798787304817,
+ "learning_rate": 3.704330511307197e-06,
+ "loss": 2.7813,
+ "step": 2099
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.4556599050672803,
+ "learning_rate": 3.614640253646828e-06,
+ "loss": 2.7209,
+ "step": 2100
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.5141632775597322,
+ "learning_rate": 3.5260452072110594e-06,
+ "loss": 2.773,
+ "step": 2101
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.4716452653637496,
+ "learning_rate": 3.4385455674737498e-06,
+ "loss": 2.7544,
+ "step": 2102
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.48239390160249335,
+ "learning_rate": 3.3521415274915256e-06,
+ "loss": 2.8291,
+ "step": 2103
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.47898508642762055,
+ "learning_rate": 3.2668332779041133e-06,
+ "loss": 2.8183,
+ "step": 2104
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.4391336843322346,
+ "learning_rate": 3.1826210069332838e-06,
+ "loss": 2.8003,
+ "step": 2105
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.46510233853519356,
+ "learning_rate": 3.0995049003826324e-06,
+ "loss": 2.8022,
+ "step": 2106
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.4487659413862821,
+ "learning_rate": 3.017485141637355e-06,
+ "loss": 2.7327,
+ "step": 2107
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.47778849965513936,
+ "learning_rate": 2.9365619116636376e-06,
+ "loss": 2.7856,
+ "step": 2108
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.4713857423564985,
+ "learning_rate": 2.856735389008269e-06,
+ "loss": 2.8436,
+ "step": 2109
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.4390533847470885,
+ "learning_rate": 2.778005749798307e-06,
+ "loss": 2.7581,
+ "step": 2110
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.4911732991020293,
+ "learning_rate": 2.700373167740744e-06,
+ "loss": 2.7569,
+ "step": 2111
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.4384803995488124,
+ "learning_rate": 2.62383781412201e-06,
+ "loss": 2.6527,
+ "step": 2112
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.462254673706326,
+ "learning_rate": 2.5483998578076373e-06,
+ "loss": 2.8175,
+ "step": 2113
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.43087664244111007,
+ "learning_rate": 2.4740594652418736e-06,
+ "loss": 2.8087,
+ "step": 2114
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.46030771482668964,
+ "learning_rate": 2.4008168004472917e-06,
+ "loss": 2.7712,
+ "step": 2115
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.48699442252074393,
+ "learning_rate": 2.3286720250246253e-06,
+ "loss": 2.8433,
+ "step": 2116
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.44469749237903744,
+ "learning_rate": 2.2576252981520994e-06,
+ "loss": 2.7922,
+ "step": 2117
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.482177729425551,
+ "learning_rate": 2.1876767765853233e-06,
+ "loss": 2.7545,
+ "step": 2118
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.46452828310557703,
+ "learning_rate": 2.118826614656788e-06,
+ "loss": 2.7341,
+ "step": 2119
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.4959307287110597,
+ "learning_rate": 2.051074964275701e-06,
+ "loss": 2.7373,
+ "step": 2120
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.4453020549150987,
+ "learning_rate": 1.984421974927375e-06,
+ "loss": 2.7444,
+ "step": 2121
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.4356888587207899,
+ "learning_rate": 1.9188677936731734e-06,
+ "loss": 2.7687,
+ "step": 2122
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.4416527494338029,
+ "learning_rate": 1.8544125651501208e-06,
+ "loss": 2.7809,
+ "step": 2123
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.467033643316628,
+ "learning_rate": 1.7910564315704035e-06,
+ "loss": 2.6572,
+ "step": 2124
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.4554417988270171,
+ "learning_rate": 1.7287995327214257e-06,
+ "loss": 2.8849,
+ "step": 2125
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.4600049846471278,
+ "learning_rate": 1.6676420059649754e-06,
+ "loss": 2.814,
+ "step": 2126
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.476287749771908,
+ "learning_rate": 1.6075839862374485e-06,
+ "loss": 2.7309,
+ "step": 2127
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.4716490679826876,
+ "learning_rate": 1.5486256060492366e-06,
+ "loss": 2.8892,
+ "step": 2128
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.429675874701832,
+ "learning_rate": 1.4907669954844495e-06,
+ "loss": 2.7518,
+ "step": 2129
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.47573967987410853,
+ "learning_rate": 1.434008282200805e-06,
+ "loss": 2.8301,
+ "step": 2130
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.4695350431012943,
+ "learning_rate": 1.3783495914291844e-06,
+ "loss": 2.7307,
+ "step": 2131
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.4375960837779474,
+ "learning_rate": 1.3237910459734104e-06,
+ "loss": 2.824,
+ "step": 2132
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.42893081543092665,
+ "learning_rate": 1.270332766210025e-06,
+ "loss": 2.7471,
+ "step": 2133
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.48808630112278223,
+ "learning_rate": 1.2179748700879012e-06,
+ "loss": 2.7899,
+ "step": 2134
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.4872253428087659,
+ "learning_rate": 1.1667174731280205e-06,
+ "loss": 2.7974,
+ "step": 2135
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.4410282618655092,
+ "learning_rate": 1.1165606884234182e-06,
+ "loss": 2.7597,
+ "step": 2136
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.44321172825476063,
+ "learning_rate": 1.0675046266386268e-06,
+ "loss": 2.7776,
+ "step": 2137
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.4722702070453984,
+ "learning_rate": 1.019549396009567e-06,
+ "loss": 2.8274,
+ "step": 2138
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.45088226351656135,
+ "learning_rate": 9.726951023434348e-07,
+ "loss": 2.8221,
+ "step": 2139
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.45161833872392165,
+ "learning_rate": 9.269418490182591e-07,
+ "loss": 2.8249,
+ "step": 2140
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.4407159175282076,
+ "learning_rate": 8.822897369827332e-07,
+ "loss": 2.7927,
+ "step": 2141
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.4733554830666991,
+ "learning_rate": 8.387388647561611e-07,
+ "loss": 2.8171,
+ "step": 2142
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.4689389208537688,
+ "learning_rate": 7.962893284279016e-07,
+ "loss": 2.8925,
+ "step": 2143
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.49020795755714597,
+ "learning_rate": 7.549412216574791e-07,
+ "loss": 2.84,
+ "step": 2144
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.48149953586021593,
+ "learning_rate": 7.146946356743067e-07,
+ "loss": 2.7792,
+ "step": 2145
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.4815559791682112,
+ "learning_rate": 6.755496592773524e-07,
+ "loss": 2.7417,
+ "step": 2146
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.4605473842465863,
+ "learning_rate": 6.375063788349733e-07,
+ "loss": 2.7574,
+ "step": 2147
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.47098129929630517,
+ "learning_rate": 6.005648782848594e-07,
+ "loss": 2.6296,
+ "step": 2148
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.47629641861683797,
+ "learning_rate": 5.647252391337565e-07,
+ "loss": 2.8252,
+ "step": 2149
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.454109128509577,
+ "learning_rate": 5.299875404572441e-07,
+ "loss": 2.7242,
+ "step": 2150
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.46593669186930137,
+ "learning_rate": 4.963518588996796e-07,
+ "loss": 2.8729,
+ "step": 2151
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.45313079741382556,
+ "learning_rate": 4.638182686738657e-07,
+ "loss": 2.8391,
+ "step": 2152
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.4996976739343907,
+ "learning_rate": 4.3238684156110543e-07,
+ "loss": 2.7901,
+ "step": 2153
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.4786175702574572,
+ "learning_rate": 4.020576469108139e-07,
+ "loss": 2.745,
+ "step": 2154
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.48301096005485294,
+ "learning_rate": 3.7283075164046274e-07,
+ "loss": 2.7751,
+ "step": 2155
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.4500305389544084,
+ "learning_rate": 3.4470622023557995e-07,
+ "loss": 2.741,
+ "step": 2156
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.4560577429297914,
+ "learning_rate": 3.176841147492504e-07,
+ "loss": 2.6666,
+ "step": 2157
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.46640735639493036,
+ "learning_rate": 2.9176449480244895e-07,
+ "loss": 2.7785,
+ "step": 2158
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.45548634589695075,
+ "learning_rate": 2.6694741758342967e-07,
+ "loss": 2.7658,
+ "step": 2159
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.44373212603559636,
+ "learning_rate": 2.432329378478926e-07,
+ "loss": 2.7824,
+ "step": 2160
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.46690234572380357,
+ "learning_rate": 2.2062110791892798e-07,
+ "loss": 2.7646,
+ "step": 2161
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.45681787108960736,
+ "learning_rate": 1.9911197768662792e-07,
+ "loss": 2.8947,
+ "step": 2162
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.4424232177684632,
+ "learning_rate": 1.7870559460814173e-07,
+ "loss": 2.8332,
+ "step": 2163
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.4366246222920518,
+ "learning_rate": 1.5940200370750947e-07,
+ "loss": 2.6649,
+ "step": 2164
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.4763229197042357,
+ "learning_rate": 1.4120124757577291e-07,
+ "loss": 2.7747,
+ "step": 2165
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.47563211046899717,
+ "learning_rate": 1.2410336637047603e-07,
+ "loss": 2.8709,
+ "step": 2166
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.4498751264484186,
+ "learning_rate": 1.081083978159425e-07,
+ "loss": 2.7341,
+ "step": 2167
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.47432594620082225,
+ "learning_rate": 9.321637720310915e-08,
+ "loss": 2.7993,
+ "step": 2168
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.4739631477894588,
+ "learning_rate": 7.942733738924845e-08,
+ "loss": 2.8248,
+ "step": 2169
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.43999709410374166,
+ "learning_rate": 6.6741308798135e-08,
+ "loss": 2.8803,
+ "step": 2170
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.4731764216127875,
+ "learning_rate": 5.5158319419934546e-08,
+ "loss": 2.7329,
+ "step": 2171
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.47832521556779595,
+ "learning_rate": 4.4678394810981906e-08,
+ "loss": 2.7466,
+ "step": 2172
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.4636618700641286,
+ "learning_rate": 3.5301558093947527e-08,
+ "loss": 2.8492,
+ "step": 2173
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.44155975425804933,
+ "learning_rate": 2.7027829957559925e-08,
+ "loss": 2.7841,
+ "step": 2174
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.4495383120162191,
+ "learning_rate": 1.985722865682771e-08,
+ "loss": 2.826,
+ "step": 2175
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.4362743603683773,
+ "learning_rate": 1.3789770012762048e-08,
+ "loss": 2.7226,
+ "step": 2176
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.4255899094198324,
+ "learning_rate": 8.825467412376665e-09,
+ "loss": 2.7198,
+ "step": 2177
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.4411677339131427,
+ "learning_rate": 4.96433180879885e-09,
+ "loss": 2.8168,
+ "step": 2178
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.4693806256949284,
+ "learning_rate": 2.206371721158451e-09,
+ "loss": 2.9341,
+ "step": 2179
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.4543916814101575,
+ "learning_rate": 5.515932345323549e-10,
+ "loss": 2.7974,
+ "step": 2180
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.45095366720040175,
+ "learning_rate": 0.0,
+ "loss": 2.8326,
+ "step": 2181
+ },
+ {
+ "epoch": 1.0,
+ "step": 2181,
+ "total_flos": 7.084547721092137e+17,
+ "train_loss": 2.930072004452915,
+ "train_runtime": 64685.0922,
+ "train_samples_per_second": 8.628,
+ "train_steps_per_second": 0.034
+ }
+ ],
+ "logging_steps": 1.0,
+ "max_steps": 2181,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 1,
+ "save_steps": 1000,
+ "total_flos": 7.084547721092137e+17,
+ "train_batch_size": 32,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/config.json b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..377cb3667dc2b25472aecfac5dee796a045d8730
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/config.json
@@ -0,0 +1,48 @@
+{
+ "_flash_attn_2_enabled": true,
+ "_name_or_path": "/mnt/bn/bohanzhainas1/Public_Models/llama-2_7B_hf",
+ "architectures": [
+ "LlavaLlamaForCausalLM"
+ ],
+ "attention_bias": false,
+ "attention_dropout": 0.0,
+ "bos_token_id": 1,
+ "ensemble_size": 1,
+ "eos_token_id": 2,
+ "freeze_mm_mlp_adapter": false,
+ "hidden_act": "silu",
+ "hidden_size": 4096,
+ "image_aspect_ratio": "square",
+ "image_grid_pinpoints": null,
+ "img_size": 768,
+ "initializer_range": 0.02,
+ "intermediate_size": 11008,
+ "max_position_embeddings": 2048,
+ "mm_hidden_size": 1280,
+ "mm_projector_type": "mlp2x_gelu",
+ "mm_use_im_patch_token": false,
+ "mm_use_im_start_end": false,
+ "mm_vision_select_feature": "patch",
+ "mm_vision_select_layer": -2,
+ "mm_vision_tower": "stabilityai/stable-diffusion-2-1",
+ "model_type": "llava_llama",
+ "num_attention_heads": 32,
+ "num_hidden_layers": 32,
+ "num_key_value_heads": 32,
+ "pad_token_id": 0,
+ "pretraining_tp": 1,
+ "prompt": "",
+ "rms_norm_eps": 1e-06,
+ "rope_scaling": null,
+ "rope_theta": 10000.0,
+ "t": 1,
+ "tie_word_embeddings": false,
+ "torch_dtype": "bfloat16",
+ "transformers_version": "4.38.2",
+ "tune_mm_mlp_adapter": false,
+ "up_ft_index": 0,
+ "use_cache": false,
+ "use_mm_proj": true,
+ "vision_tower": "stabilityai/stable-diffusion-2-1",
+ "vocab_size": 32000
+}
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/generation_config.json b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/generation_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..2602302d2b751745299b1aa70969f28531d23ccd
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/generation_config.json
@@ -0,0 +1,9 @@
+{
+ "_from_model_config": true,
+ "bos_token_id": 1,
+ "eos_token_id": 2,
+ "pad_token_id": 0,
+ "temperature": null,
+ "top_p": null,
+ "transformers_version": "4.38.2"
+}
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/global_step5000/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/global_step5000/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..b341ecdf290ce3b81f1ef3bdbf48383024746fb1
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/global_step5000/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:148c257de7374ccf54fbd614a8e834970a35446f56d9884a444564bfb056b40c
+size 10140672903
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/global_step5000/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/global_step5000/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..18696a8fcb9e6f4edec9083ce8a49bd03145fe8e
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/global_step5000/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:81c26906ef43bb178815e60009756fbeff5d70fc178c2cfe1b4bb75d0faf77f8
+size 10140673095
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/global_step5000/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/global_step5000/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..8389ef09a068475bee4a3a357e6bb2c738ea32cc
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/global_step5000/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:bf4657aa0caac0a0a904aaac44d6dc7dc3ea369b24bf5b70cb03be35f781d4f2
+size 10140673223
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/global_step5000/bf16_zero_pp_rank_3_mp_rank_00_optim_states.pt b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/global_step5000/bf16_zero_pp_rank_3_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..edebb210ed46061712aef4508e83998dce77b97b
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/global_step5000/bf16_zero_pp_rank_3_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ea4bdbced717d20d0c1a1ad69040e909ad22a8028f1ca15f7cb7f7e8039fdeed
+size 10140673159
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/global_step5000/bf16_zero_pp_rank_4_mp_rank_00_optim_states.pt b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/global_step5000/bf16_zero_pp_rank_4_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..c4d4ed576a47c1dc185b1386d37a29eb0d482977
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/global_step5000/bf16_zero_pp_rank_4_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:39756ebfded075341bdea5e45656faeee1f9d894c868c68251e77fb5c02abec3
+size 10140673223
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/global_step5000/bf16_zero_pp_rank_5_mp_rank_00_optim_states.pt b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/global_step5000/bf16_zero_pp_rank_5_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..e1c2e41390110ced9de371452c58e9985a2e8fb8
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/global_step5000/bf16_zero_pp_rank_5_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6cd9dd94390a1b590a48798cb52ead4a1b1da8e62037cf834aa0e2087602f88d
+size 10140673287
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/global_step5000/bf16_zero_pp_rank_6_mp_rank_00_optim_states.pt b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/global_step5000/bf16_zero_pp_rank_6_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..5d930cf5541c471cc989006a513578eeaeb8e294
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/global_step5000/bf16_zero_pp_rank_6_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f06407d9c2fa5adae52f32e6f0922c6dc3ee1a0be4210e75c74eb6d8c7febc0d
+size 10140673159
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/global_step5000/bf16_zero_pp_rank_7_mp_rank_00_optim_states.pt b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/global_step5000/bf16_zero_pp_rank_7_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..f8956416efdb655c44639be8d69ec6f56734543f
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/global_step5000/bf16_zero_pp_rank_7_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f180ef342b5b59f97130366d979bba2f0e5d8f5276e75161927f8c38e1383347
+size 10140672711
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/global_step5000/mp_rank_00_model_states.pt b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/global_step5000/mp_rank_00_model_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..d604b9c2824634a1ffa9a93fb3f71cf6e9ff6645
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/global_step5000/mp_rank_00_model_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:65ca989fcaf11a754d4c467c25bb3134c889954fb13707618f4e64b3d8ce547d
+size 13520976283
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/latest b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/latest
new file mode 100644
index 0000000000000000000000000000000000000000..f805186fa43374540c3fa51dfd3cca9ac06e56a5
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/latest
@@ -0,0 +1 @@
+global_step5000
\ No newline at end of file
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/model-00001-of-00003.safetensors b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/model-00001-of-00003.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..0fe140da5d27d1e9759af1332f89f9e86382030e
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/model-00001-of-00003.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ea35db108511bb5c3357601637e9c4ee9c3de1a029be2740551c2435854842b0
+size 4938985352
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/model-00002-of-00003.safetensors b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/model-00002-of-00003.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..3b626ee26f097f7de8b1354362f09693d8cd2a14
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/model-00002-of-00003.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4a78215d2ff442c0b45e40862381c91672fb6816f4cf3243c5178af22efd28b1
+size 4947390880
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/model-00003-of-00003.safetensors b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/model-00003-of-00003.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..73126b515268ebee5fb3b229f86567b38a88d191
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/model-00003-of-00003.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1f935b22801f12fae8202b0b86dbff9a5c00a53ec2bd5c23f8bbfaf4ba358a85
+size 3634545800
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/model.safetensors.index.json b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/model.safetensors.index.json
new file mode 100644
index 0000000000000000000000000000000000000000..452be371a188daae5c518442b1c621e414774067
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/model.safetensors.index.json
@@ -0,0 +1,302 @@
+{
+ "metadata": {
+ "total_size": 13520887808
+ },
+ "weight_map": {
+ "lm_head.weight": "model-00003-of-00003.safetensors",
+ "model.embed_tokens.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.20.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.23.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.23.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.24.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.30.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.mm_projector.0.bias": "model-00003-of-00003.safetensors",
+ "model.mm_projector.0.weight": "model-00003-of-00003.safetensors",
+ "model.mm_projector.2.bias": "model-00003-of-00003.safetensors",
+ "model.mm_projector.2.weight": "model-00003-of-00003.safetensors",
+ "model.norm.weight": "model-00003-of-00003.safetensors"
+ }
+}
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/rng_state_0.pth b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/rng_state_0.pth
new file mode 100644
index 0000000000000000000000000000000000000000..afeddd29560d8643a2ce4861f9950ee171534ca5
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/rng_state_0.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:59744f3fc31b9562212fc394b77a7456f57a14332b4d89f104cd038668ab0911
+size 21687
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/rng_state_1.pth b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/rng_state_1.pth
new file mode 100644
index 0000000000000000000000000000000000000000..c935edbc6253470cb257ead7c21bb50085cb8f0f
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/rng_state_1.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:19710f2cf4809ea1e079e73c70af65f8c3bbdd6c6d5817680dcdb3fa476a8989
+size 21687
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/rng_state_2.pth b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/rng_state_2.pth
new file mode 100644
index 0000000000000000000000000000000000000000..84987fff60107b7e5d1859c84140014fbff7fdac
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/rng_state_2.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:49480af9b490ef086d194375df5150e0a31c36d4d3f5fcd7a971421ed23829ee
+size 21687
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/rng_state_3.pth b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/rng_state_3.pth
new file mode 100644
index 0000000000000000000000000000000000000000..c576281e54367f0928071748e35a00644e871f5b
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/rng_state_3.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8674da845ad077c9c45d66c11574ccd27739aa8adf7f98ccd77e64d638878c63
+size 21687
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/rng_state_4.pth b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/rng_state_4.pth
new file mode 100644
index 0000000000000000000000000000000000000000..74a90e1f58c3ab919afe833b4c39893bbe54478d
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/rng_state_4.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0f8d5e9036d744e9ab5e928b6337069b8592c8a780d5a07df23a93989c7a275c
+size 21687
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/rng_state_5.pth b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/rng_state_5.pth
new file mode 100644
index 0000000000000000000000000000000000000000..6e14143154dc0a76db9116fc9334a4387d80c357
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/rng_state_5.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f71f70b50b30df61b140c5543a5879b3f6cd7a81ddd05af5e6a9a77c11642608
+size 21687
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/rng_state_6.pth b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/rng_state_6.pth
new file mode 100644
index 0000000000000000000000000000000000000000..09d278536aea48ce01ecd630c20f244d64982bb9
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/rng_state_6.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:15597f53e6125814510068c0722af2a5471eef0dfc6f3b777c714813f6cfa709
+size 21687
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/rng_state_7.pth b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/rng_state_7.pth
new file mode 100644
index 0000000000000000000000000000000000000000..687380ec9b7e05f1b4bcdf6656f3ea16f5946300
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/rng_state_7.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:49f55116534c3f43bdf9732af1de2559567c46a6d5924803d9673d1ab75c8887
+size 21687
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/scheduler.pt b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..bc898eba47dd273d765082a8002ca96956c865cc
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7b88333e73ef750c0e1661161687ef3c0683adce3e43baa72a8db96e90f8cbda
+size 627
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/special_tokens_map.json b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..f928b2409a393d47ce0d9fe519f17e048a471eca
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/special_tokens_map.json
@@ -0,0 +1,24 @@
+{
+ "bos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": "",
+ "unk_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/tokenizer.model b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/tokenizer.model
new file mode 100644
index 0000000000000000000000000000000000000000..6c00c742ce03c627d6cd5b795984876fa49fa899
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/tokenizer.model
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
+size 499723
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/tokenizer_config.json b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..00456631ca49c4adbd95ae9609e79c6444d97706
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/tokenizer_config.json
@@ -0,0 +1,43 @@
+{
+ "add_bos_token": true,
+ "add_eos_token": false,
+ "add_prefix_space": true,
+ "added_tokens_decoder": {
+ "0": {
+ "content": "",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "1": {
+ "content": "",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "2": {
+ "content": "",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "bos_token": "",
+ "clean_up_tokenization_spaces": false,
+ "eos_token": "",
+ "legacy": true,
+ "model_max_length": 2048,
+ "pad_token": "",
+ "padding_side": "right",
+ "sp_model_kwargs": {},
+ "spaces_between_special_tokens": false,
+ "tokenizer_class": "LlamaTokenizer",
+ "unk_token": "",
+ "use_default_system_prompt": false
+}
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/trainer_state.json b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..725ad1d0613c9140481ecfaca48a13604c2004cf
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/trainer_state.json
@@ -0,0 +1,35021 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 0.96195469193401,
+ "eval_steps": 500,
+ "global_step": 5000,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.0,
+ "grad_norm": 17.86347389075609,
+ "learning_rate": 1.282051282051282e-07,
+ "loss": 2.0553,
+ "step": 1
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 16.340788417924784,
+ "learning_rate": 2.564102564102564e-07,
+ "loss": 1.9244,
+ "step": 2
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 13.31230791159293,
+ "learning_rate": 3.846153846153847e-07,
+ "loss": 1.7098,
+ "step": 3
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 14.40727225294918,
+ "learning_rate": 5.128205128205128e-07,
+ "loss": 1.9669,
+ "step": 4
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 16.041732001068368,
+ "learning_rate": 6.41025641025641e-07,
+ "loss": 1.9909,
+ "step": 5
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 15.068210215890273,
+ "learning_rate": 7.692307692307694e-07,
+ "loss": 1.8324,
+ "step": 6
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 13.837817705671661,
+ "learning_rate": 8.974358974358975e-07,
+ "loss": 1.8703,
+ "step": 7
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 15.314386356705906,
+ "learning_rate": 1.0256410256410257e-06,
+ "loss": 1.9187,
+ "step": 8
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 18.955730564183586,
+ "learning_rate": 1.153846153846154e-06,
+ "loss": 2.0023,
+ "step": 9
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 17.866320156567383,
+ "learning_rate": 1.282051282051282e-06,
+ "loss": 1.891,
+ "step": 10
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 14.965328550240324,
+ "learning_rate": 1.4102564102564104e-06,
+ "loss": 1.8438,
+ "step": 11
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 11.89546330250022,
+ "learning_rate": 1.5384615384615387e-06,
+ "loss": 1.6365,
+ "step": 12
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 8.384692664709801,
+ "learning_rate": 1.6666666666666667e-06,
+ "loss": 1.5361,
+ "step": 13
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 12.619120531844034,
+ "learning_rate": 1.794871794871795e-06,
+ "loss": 1.6459,
+ "step": 14
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 9.194183144132957,
+ "learning_rate": 1.9230769230769234e-06,
+ "loss": 1.529,
+ "step": 15
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 10.036346362006853,
+ "learning_rate": 2.0512820512820513e-06,
+ "loss": 1.6111,
+ "step": 16
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 5.028586322255148,
+ "learning_rate": 2.1794871794871797e-06,
+ "loss": 1.3546,
+ "step": 17
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 2.4894329606548493,
+ "learning_rate": 2.307692307692308e-06,
+ "loss": 1.286,
+ "step": 18
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 3.003990110161847,
+ "learning_rate": 2.435897435897436e-06,
+ "loss": 1.3436,
+ "step": 19
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 2.624304641006234,
+ "learning_rate": 2.564102564102564e-06,
+ "loss": 1.294,
+ "step": 20
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 2.843933611748089,
+ "learning_rate": 2.6923076923076923e-06,
+ "loss": 1.3652,
+ "step": 21
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 2.5282127575561013,
+ "learning_rate": 2.8205128205128207e-06,
+ "loss": 1.258,
+ "step": 22
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 2.572954278290059,
+ "learning_rate": 2.948717948717949e-06,
+ "loss": 1.3478,
+ "step": 23
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 2.3242785478160233,
+ "learning_rate": 3.0769230769230774e-06,
+ "loss": 1.3467,
+ "step": 24
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 1.9444593814766211,
+ "learning_rate": 3.205128205128206e-06,
+ "loss": 1.2672,
+ "step": 25
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 2.2766386252395425,
+ "learning_rate": 3.3333333333333333e-06,
+ "loss": 1.3004,
+ "step": 26
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.9052625124963702,
+ "learning_rate": 3.4615384615384617e-06,
+ "loss": 1.3198,
+ "step": 27
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.883473749107834,
+ "learning_rate": 3.58974358974359e-06,
+ "loss": 1.3105,
+ "step": 28
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.8307638440888208,
+ "learning_rate": 3.7179487179487184e-06,
+ "loss": 1.2597,
+ "step": 29
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.6968574944097243,
+ "learning_rate": 3.846153846153847e-06,
+ "loss": 1.1814,
+ "step": 30
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 2.072869058382591,
+ "learning_rate": 3.974358974358974e-06,
+ "loss": 1.2785,
+ "step": 31
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.4576760922025103,
+ "learning_rate": 4.102564102564103e-06,
+ "loss": 1.2502,
+ "step": 32
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.4994099267875476,
+ "learning_rate": 4.230769230769231e-06,
+ "loss": 1.18,
+ "step": 33
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.460264902782038,
+ "learning_rate": 4.358974358974359e-06,
+ "loss": 1.1521,
+ "step": 34
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.5865701097261202,
+ "learning_rate": 4.487179487179488e-06,
+ "loss": 1.2394,
+ "step": 35
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.4428017911065363,
+ "learning_rate": 4.615384615384616e-06,
+ "loss": 1.1372,
+ "step": 36
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.2998737521778192,
+ "learning_rate": 4.743589743589744e-06,
+ "loss": 1.0461,
+ "step": 37
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.2763215591692354,
+ "learning_rate": 4.871794871794872e-06,
+ "loss": 1.155,
+ "step": 38
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.4121606917705762,
+ "learning_rate": 5e-06,
+ "loss": 1.146,
+ "step": 39
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.2631041889278862,
+ "learning_rate": 5.128205128205128e-06,
+ "loss": 1.1765,
+ "step": 40
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.3670803879604925,
+ "learning_rate": 5.256410256410257e-06,
+ "loss": 1.1339,
+ "step": 41
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.2906797090346698,
+ "learning_rate": 5.384615384615385e-06,
+ "loss": 1.1015,
+ "step": 42
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.5085866452824694,
+ "learning_rate": 5.512820512820514e-06,
+ "loss": 1.1426,
+ "step": 43
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.3071318057444536,
+ "learning_rate": 5.641025641025641e-06,
+ "loss": 1.092,
+ "step": 44
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.1016661057549553,
+ "learning_rate": 5.769230769230769e-06,
+ "loss": 0.9978,
+ "step": 45
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.2111162907372104,
+ "learning_rate": 5.897435897435898e-06,
+ "loss": 1.1211,
+ "step": 46
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.1274844124655137,
+ "learning_rate": 6.025641025641026e-06,
+ "loss": 1.1313,
+ "step": 47
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.2668397693417877,
+ "learning_rate": 6.153846153846155e-06,
+ "loss": 1.1452,
+ "step": 48
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.176403429558345,
+ "learning_rate": 6.282051282051282e-06,
+ "loss": 1.0395,
+ "step": 49
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.1951095967488896,
+ "learning_rate": 6.410256410256412e-06,
+ "loss": 1.0549,
+ "step": 50
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.0698501123701163,
+ "learning_rate": 6.538461538461539e-06,
+ "loss": 1.0768,
+ "step": 51
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.3217022898812898,
+ "learning_rate": 6.666666666666667e-06,
+ "loss": 1.1191,
+ "step": 52
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.06152889720562,
+ "learning_rate": 6.794871794871796e-06,
+ "loss": 1.1467,
+ "step": 53
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.2207687703601493,
+ "learning_rate": 6.923076923076923e-06,
+ "loss": 1.102,
+ "step": 54
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.2701407726308969,
+ "learning_rate": 7.051282051282053e-06,
+ "loss": 1.0554,
+ "step": 55
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.3329090385731375,
+ "learning_rate": 7.17948717948718e-06,
+ "loss": 1.1877,
+ "step": 56
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.356023272392615,
+ "learning_rate": 7.307692307692308e-06,
+ "loss": 1.1086,
+ "step": 57
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.1849392700006287,
+ "learning_rate": 7.435897435897437e-06,
+ "loss": 1.1194,
+ "step": 58
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.0253161321556497,
+ "learning_rate": 7.564102564102564e-06,
+ "loss": 1.017,
+ "step": 59
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.0385674726144254,
+ "learning_rate": 7.692307692307694e-06,
+ "loss": 1.0556,
+ "step": 60
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.1969264274945228,
+ "learning_rate": 7.820512820512822e-06,
+ "loss": 1.0569,
+ "step": 61
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.0645872005402048,
+ "learning_rate": 7.948717948717949e-06,
+ "loss": 1.0202,
+ "step": 62
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.2367676967422145,
+ "learning_rate": 8.076923076923077e-06,
+ "loss": 1.1035,
+ "step": 63
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.0867326076016188,
+ "learning_rate": 8.205128205128205e-06,
+ "loss": 1.0911,
+ "step": 64
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.385231704991178,
+ "learning_rate": 8.333333333333334e-06,
+ "loss": 1.105,
+ "step": 65
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.1983463619808143,
+ "learning_rate": 8.461538461538462e-06,
+ "loss": 0.9641,
+ "step": 66
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.4198421383078568,
+ "learning_rate": 8.58974358974359e-06,
+ "loss": 1.1414,
+ "step": 67
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.1099650533110834,
+ "learning_rate": 8.717948717948719e-06,
+ "loss": 1.0109,
+ "step": 68
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.2290166817062858,
+ "learning_rate": 8.846153846153847e-06,
+ "loss": 1.0907,
+ "step": 69
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.267048521231698,
+ "learning_rate": 8.974358974358976e-06,
+ "loss": 1.1442,
+ "step": 70
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.1019029454770533,
+ "learning_rate": 9.102564102564104e-06,
+ "loss": 1.0056,
+ "step": 71
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.2807328166020238,
+ "learning_rate": 9.230769230769232e-06,
+ "loss": 1.1258,
+ "step": 72
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.1551688945867158,
+ "learning_rate": 9.358974358974359e-06,
+ "loss": 1.0295,
+ "step": 73
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.0080125770907116,
+ "learning_rate": 9.487179487179487e-06,
+ "loss": 1.0032,
+ "step": 74
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.402806385707426,
+ "learning_rate": 9.615384615384616e-06,
+ "loss": 1.0427,
+ "step": 75
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.2215549239861518,
+ "learning_rate": 9.743589743589744e-06,
+ "loss": 1.0579,
+ "step": 76
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.322056156982379,
+ "learning_rate": 9.871794871794872e-06,
+ "loss": 1.0335,
+ "step": 77
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.1446838795879737,
+ "learning_rate": 1e-05,
+ "loss": 1.0138,
+ "step": 78
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2807246001751553,
+ "learning_rate": 1.012820512820513e-05,
+ "loss": 1.0419,
+ "step": 79
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.1976418300838048,
+ "learning_rate": 1.0256410256410256e-05,
+ "loss": 1.1068,
+ "step": 80
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.0292812084728495,
+ "learning_rate": 1.0384615384615386e-05,
+ "loss": 1.0136,
+ "step": 81
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.9653551844674515,
+ "learning_rate": 1.0512820512820514e-05,
+ "loss": 0.9696,
+ "step": 82
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2044521578979546,
+ "learning_rate": 1.0641025641025643e-05,
+ "loss": 1.061,
+ "step": 83
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.0506107786315697,
+ "learning_rate": 1.076923076923077e-05,
+ "loss": 1.0468,
+ "step": 84
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.4498246660106078,
+ "learning_rate": 1.0897435897435898e-05,
+ "loss": 1.0875,
+ "step": 85
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.1845347304004339,
+ "learning_rate": 1.1025641025641028e-05,
+ "loss": 1.0663,
+ "step": 86
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.161786603180624,
+ "learning_rate": 1.1153846153846154e-05,
+ "loss": 1.0531,
+ "step": 87
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.220869922686358,
+ "learning_rate": 1.1282051282051283e-05,
+ "loss": 1.0559,
+ "step": 88
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2404652106613836,
+ "learning_rate": 1.1410256410256411e-05,
+ "loss": 1.0445,
+ "step": 89
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.4743753635963737,
+ "learning_rate": 1.1538461538461538e-05,
+ "loss": 1.0602,
+ "step": 90
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.262408610467295,
+ "learning_rate": 1.1666666666666668e-05,
+ "loss": 1.0615,
+ "step": 91
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.9808569501537702,
+ "learning_rate": 1.1794871794871796e-05,
+ "loss": 0.9462,
+ "step": 92
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.3013446449801787,
+ "learning_rate": 1.1923076923076925e-05,
+ "loss": 1.1231,
+ "step": 93
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.1886643407923234,
+ "learning_rate": 1.2051282051282051e-05,
+ "loss": 1.0244,
+ "step": 94
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.1424668824418163,
+ "learning_rate": 1.217948717948718e-05,
+ "loss": 1.0228,
+ "step": 95
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2348883959266557,
+ "learning_rate": 1.230769230769231e-05,
+ "loss": 1.0589,
+ "step": 96
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.400120543528127,
+ "learning_rate": 1.2435897435897436e-05,
+ "loss": 1.0111,
+ "step": 97
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2712817733832307,
+ "learning_rate": 1.2564102564102565e-05,
+ "loss": 1.0289,
+ "step": 98
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.4134889151969168,
+ "learning_rate": 1.2692307692307693e-05,
+ "loss": 1.0632,
+ "step": 99
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2717376671306364,
+ "learning_rate": 1.2820512820512823e-05,
+ "loss": 1.0823,
+ "step": 100
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2931804137988707,
+ "learning_rate": 1.294871794871795e-05,
+ "loss": 1.0387,
+ "step": 101
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2393826879437473,
+ "learning_rate": 1.3076923076923078e-05,
+ "loss": 1.0144,
+ "step": 102
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.1747673033476334,
+ "learning_rate": 1.3205128205128207e-05,
+ "loss": 1.034,
+ "step": 103
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.1483685440735119,
+ "learning_rate": 1.3333333333333333e-05,
+ "loss": 1.0326,
+ "step": 104
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2519004715114483,
+ "learning_rate": 1.3461538461538463e-05,
+ "loss": 1.0626,
+ "step": 105
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.4413918805246082,
+ "learning_rate": 1.3589743589743592e-05,
+ "loss": 1.0787,
+ "step": 106
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2509070242345923,
+ "learning_rate": 1.3717948717948718e-05,
+ "loss": 1.0336,
+ "step": 107
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.1949113000331355,
+ "learning_rate": 1.3846153846153847e-05,
+ "loss": 1.048,
+ "step": 108
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2924213576049175,
+ "learning_rate": 1.3974358974358975e-05,
+ "loss": 1.0764,
+ "step": 109
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.1957696479928377,
+ "learning_rate": 1.4102564102564105e-05,
+ "loss": 1.1442,
+ "step": 110
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.1727412691673529,
+ "learning_rate": 1.4230769230769232e-05,
+ "loss": 1.0056,
+ "step": 111
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.0751077398034676,
+ "learning_rate": 1.435897435897436e-05,
+ "loss": 0.9844,
+ "step": 112
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.9960682080303919,
+ "learning_rate": 1.4487179487179489e-05,
+ "loss": 0.9427,
+ "step": 113
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.3195235989294032,
+ "learning_rate": 1.4615384615384615e-05,
+ "loss": 1.039,
+ "step": 114
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2346213976081493,
+ "learning_rate": 1.4743589743589745e-05,
+ "loss": 1.0804,
+ "step": 115
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2879996809518923,
+ "learning_rate": 1.4871794871794874e-05,
+ "loss": 1.0023,
+ "step": 116
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.9712618840126015,
+ "learning_rate": 1.5000000000000002e-05,
+ "loss": 0.9809,
+ "step": 117
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2174492391453324,
+ "learning_rate": 1.5128205128205129e-05,
+ "loss": 1.1002,
+ "step": 118
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.5518360442312447,
+ "learning_rate": 1.5256410256410257e-05,
+ "loss": 1.1022,
+ "step": 119
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.286191029125357,
+ "learning_rate": 1.5384615384615387e-05,
+ "loss": 1.0813,
+ "step": 120
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.0667865886248182,
+ "learning_rate": 1.5512820512820516e-05,
+ "loss": 1.0152,
+ "step": 121
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.09667692583234,
+ "learning_rate": 1.5641025641025644e-05,
+ "loss": 0.9334,
+ "step": 122
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2582078306529967,
+ "learning_rate": 1.576923076923077e-05,
+ "loss": 1.0562,
+ "step": 123
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.360466747172384,
+ "learning_rate": 1.5897435897435897e-05,
+ "loss": 1.0997,
+ "step": 124
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.1103847133012368,
+ "learning_rate": 1.602564102564103e-05,
+ "loss": 1.0463,
+ "step": 125
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.1283331979085496,
+ "learning_rate": 1.6153846153846154e-05,
+ "loss": 1.0209,
+ "step": 126
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2347054944456584,
+ "learning_rate": 1.6282051282051282e-05,
+ "loss": 0.9817,
+ "step": 127
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.0858325607380797,
+ "learning_rate": 1.641025641025641e-05,
+ "loss": 0.9878,
+ "step": 128
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.0559584412723666,
+ "learning_rate": 1.653846153846154e-05,
+ "loss": 0.9288,
+ "step": 129
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.1649804448284384,
+ "learning_rate": 1.6666666666666667e-05,
+ "loss": 1.0552,
+ "step": 130
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.2768658059542357,
+ "learning_rate": 1.6794871794871796e-05,
+ "loss": 1.0371,
+ "step": 131
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.1484469622997453,
+ "learning_rate": 1.6923076923076924e-05,
+ "loss": 0.9749,
+ "step": 132
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.1908479242187853,
+ "learning_rate": 1.7051282051282053e-05,
+ "loss": 0.972,
+ "step": 133
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.0402509536166955,
+ "learning_rate": 1.717948717948718e-05,
+ "loss": 0.9693,
+ "step": 134
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.279153512609506,
+ "learning_rate": 1.730769230769231e-05,
+ "loss": 1.0614,
+ "step": 135
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.0544219667349493,
+ "learning_rate": 1.7435897435897438e-05,
+ "loss": 0.978,
+ "step": 136
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.1212869749297296,
+ "learning_rate": 1.7564102564102566e-05,
+ "loss": 1.1024,
+ "step": 137
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.9592020155405737,
+ "learning_rate": 1.7692307692307694e-05,
+ "loss": 0.9405,
+ "step": 138
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.9954547382113923,
+ "learning_rate": 1.7820512820512823e-05,
+ "loss": 1.0248,
+ "step": 139
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.1370111213170742,
+ "learning_rate": 1.794871794871795e-05,
+ "loss": 1.0575,
+ "step": 140
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.9834559647762655,
+ "learning_rate": 1.807692307692308e-05,
+ "loss": 1.0187,
+ "step": 141
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.5310889926993767,
+ "learning_rate": 1.8205128205128208e-05,
+ "loss": 0.9525,
+ "step": 142
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.1342556535073165,
+ "learning_rate": 1.8333333333333333e-05,
+ "loss": 1.0082,
+ "step": 143
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.3336677423335714,
+ "learning_rate": 1.8461538461538465e-05,
+ "loss": 0.9902,
+ "step": 144
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.3619580210614848,
+ "learning_rate": 1.8589743589743593e-05,
+ "loss": 1.0237,
+ "step": 145
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.4407400734323819,
+ "learning_rate": 1.8717948717948718e-05,
+ "loss": 1.017,
+ "step": 146
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.2835998371906323,
+ "learning_rate": 1.8846153846153846e-05,
+ "loss": 1.0469,
+ "step": 147
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.2354416512823005,
+ "learning_rate": 1.8974358974358975e-05,
+ "loss": 1.0207,
+ "step": 148
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.55919779578618,
+ "learning_rate": 1.9102564102564106e-05,
+ "loss": 1.102,
+ "step": 149
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.0417469349621207,
+ "learning_rate": 1.923076923076923e-05,
+ "loss": 1.0014,
+ "step": 150
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.3338629959201007,
+ "learning_rate": 1.935897435897436e-05,
+ "loss": 1.0532,
+ "step": 151
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.2584602038843604,
+ "learning_rate": 1.9487179487179488e-05,
+ "loss": 1.0235,
+ "step": 152
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.3030823235047277,
+ "learning_rate": 1.9615384615384617e-05,
+ "loss": 1.0613,
+ "step": 153
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.2348866055127186,
+ "learning_rate": 1.9743589743589745e-05,
+ "loss": 1.0402,
+ "step": 154
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.152356070113191,
+ "learning_rate": 1.9871794871794873e-05,
+ "loss": 0.9644,
+ "step": 155
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.2194973002491283,
+ "learning_rate": 2e-05,
+ "loss": 1.0733,
+ "step": 156
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.2754636914469413,
+ "learning_rate": 1.9999998058057616e-05,
+ "loss": 1.0593,
+ "step": 157
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.0948574037314331,
+ "learning_rate": 1.9999992232231216e-05,
+ "loss": 1.0224,
+ "step": 158
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.183418528094626,
+ "learning_rate": 1.999998252252306e-05,
+ "loss": 1.1092,
+ "step": 159
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.0560815732774311,
+ "learning_rate": 1.9999968928936924e-05,
+ "loss": 0.9806,
+ "step": 160
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.1358578802193122,
+ "learning_rate": 1.999995145147809e-05,
+ "loss": 1.0757,
+ "step": 161
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.2374696678028985,
+ "learning_rate": 1.9999930090153335e-05,
+ "loss": 1.0532,
+ "step": 162
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.1079912147610231,
+ "learning_rate": 1.9999904844970963e-05,
+ "loss": 1.0477,
+ "step": 163
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.0229541011411554,
+ "learning_rate": 1.999987571594078e-05,
+ "loss": 1.0308,
+ "step": 164
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.112131071523883,
+ "learning_rate": 1.99998427030741e-05,
+ "loss": 1.0121,
+ "step": 165
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.2280127901765792,
+ "learning_rate": 1.999980580638374e-05,
+ "loss": 0.971,
+ "step": 166
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.9791620231473505,
+ "learning_rate": 1.999976502588403e-05,
+ "loss": 1.0107,
+ "step": 167
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.1209582965961267,
+ "learning_rate": 1.9999720361590812e-05,
+ "loss": 1.0036,
+ "step": 168
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.0519871421863807,
+ "learning_rate": 1.9999671813521435e-05,
+ "loss": 0.9852,
+ "step": 169
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.070507196091445,
+ "learning_rate": 1.999961938169475e-05,
+ "loss": 1.0146,
+ "step": 170
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.2738992659769082,
+ "learning_rate": 1.9999563066131124e-05,
+ "loss": 1.0163,
+ "step": 171
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.9509746001304237,
+ "learning_rate": 1.9999502866852427e-05,
+ "loss": 0.952,
+ "step": 172
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.1393322955608345,
+ "learning_rate": 1.999943878388204e-05,
+ "loss": 1.0464,
+ "step": 173
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.2140923212260029,
+ "learning_rate": 1.9999370817244853e-05,
+ "loss": 1.0614,
+ "step": 174
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.134122432733768,
+ "learning_rate": 1.9999298966967264e-05,
+ "loss": 1.0424,
+ "step": 175
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.0712747655486559,
+ "learning_rate": 1.9999223233077178e-05,
+ "loss": 0.9444,
+ "step": 176
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.0801415577846951,
+ "learning_rate": 1.999914361560401e-05,
+ "loss": 1.0388,
+ "step": 177
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.1092667973918147,
+ "learning_rate": 1.9999060114578682e-05,
+ "loss": 1.0614,
+ "step": 178
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.3338891937207205,
+ "learning_rate": 1.9998972730033624e-05,
+ "loss": 0.9689,
+ "step": 179
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.2526136774335184,
+ "learning_rate": 1.9998881462002778e-05,
+ "loss": 1.0375,
+ "step": 180
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.2148881075034643,
+ "learning_rate": 1.9998786310521585e-05,
+ "loss": 0.9825,
+ "step": 181
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1835507142706478,
+ "learning_rate": 1.9998687275627008e-05,
+ "loss": 1.0314,
+ "step": 182
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0952611287150276,
+ "learning_rate": 1.9998584357357503e-05,
+ "loss": 1.038,
+ "step": 183
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.055745407520473,
+ "learning_rate": 1.9998477555753054e-05,
+ "loss": 1.0356,
+ "step": 184
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.047898258171331,
+ "learning_rate": 1.9998366870855134e-05,
+ "loss": 0.9735,
+ "step": 185
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0844942676429572,
+ "learning_rate": 1.999825230270673e-05,
+ "loss": 0.9655,
+ "step": 186
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.2102542072272067,
+ "learning_rate": 1.9998133851352342e-05,
+ "loss": 0.9695,
+ "step": 187
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.5351794364867144,
+ "learning_rate": 1.9998011516837974e-05,
+ "loss": 0.9742,
+ "step": 188
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0829242522713511,
+ "learning_rate": 1.999788529921114e-05,
+ "loss": 1.0439,
+ "step": 189
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.196733287932661,
+ "learning_rate": 1.999775519852086e-05,
+ "loss": 1.0344,
+ "step": 190
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1412285937057054,
+ "learning_rate": 1.999762121481767e-05,
+ "loss": 1.019,
+ "step": 191
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0790195675591219,
+ "learning_rate": 1.99974833481536e-05,
+ "loss": 0.9841,
+ "step": 192
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.2350711196975463,
+ "learning_rate": 1.9997341598582197e-05,
+ "loss": 1.0445,
+ "step": 193
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1425652576602308,
+ "learning_rate": 1.9997195966158518e-05,
+ "loss": 1.0422,
+ "step": 194
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.606456629008868,
+ "learning_rate": 1.9997046450939122e-05,
+ "loss": 1.0935,
+ "step": 195
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0639619643411806,
+ "learning_rate": 1.9996893052982083e-05,
+ "loss": 0.9272,
+ "step": 196
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1114960836440815,
+ "learning_rate": 1.9996735772346973e-05,
+ "loss": 1.1476,
+ "step": 197
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1204248642253234,
+ "learning_rate": 1.9996574609094887e-05,
+ "loss": 0.9985,
+ "step": 198
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.9946527403315302,
+ "learning_rate": 1.9996409563288404e-05,
+ "loss": 0.9582,
+ "step": 199
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1078356935974523,
+ "learning_rate": 1.9996240634991645e-05,
+ "loss": 1.032,
+ "step": 200
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.4640703912774728,
+ "learning_rate": 1.9996067824270204e-05,
+ "loss": 1.0689,
+ "step": 201
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1399093993533496,
+ "learning_rate": 1.999589113119121e-05,
+ "loss": 1.0513,
+ "step": 202
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0597768450437273,
+ "learning_rate": 1.9995710555823277e-05,
+ "loss": 0.9459,
+ "step": 203
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.9853784011195135,
+ "learning_rate": 1.999552609823655e-05,
+ "loss": 0.9497,
+ "step": 204
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1295434063672711,
+ "learning_rate": 1.999533775850266e-05,
+ "loss": 1.0133,
+ "step": 205
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.9723564757441976,
+ "learning_rate": 1.9995145536694764e-05,
+ "loss": 0.9556,
+ "step": 206
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.098636570394235,
+ "learning_rate": 1.9994949432887512e-05,
+ "loss": 0.9391,
+ "step": 207
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1172358470739157,
+ "learning_rate": 1.999474944715708e-05,
+ "loss": 1.0021,
+ "step": 208
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.120101339931511,
+ "learning_rate": 1.9994545579581125e-05,
+ "loss": 1.0528,
+ "step": 209
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.104126742032167,
+ "learning_rate": 1.9994337830238836e-05,
+ "loss": 0.9719,
+ "step": 210
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.9900241648436833,
+ "learning_rate": 1.9994126199210897e-05,
+ "loss": 0.9351,
+ "step": 211
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1062113566320204,
+ "learning_rate": 1.999391068657951e-05,
+ "loss": 0.9947,
+ "step": 212
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0912350553018915,
+ "learning_rate": 1.9993691292428364e-05,
+ "loss": 1.0142,
+ "step": 213
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.2248252251779344,
+ "learning_rate": 1.9993468016842684e-05,
+ "loss": 0.9468,
+ "step": 214
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.2646610193105368,
+ "learning_rate": 1.999324085990918e-05,
+ "loss": 1.0577,
+ "step": 215
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0650758981936763,
+ "learning_rate": 1.9993009821716076e-05,
+ "loss": 1.0205,
+ "step": 216
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.2290765225960296,
+ "learning_rate": 1.9992774902353104e-05,
+ "loss": 1.0925,
+ "step": 217
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1433310161715657,
+ "learning_rate": 1.999253610191151e-05,
+ "loss": 1.0701,
+ "step": 218
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0459359117325058,
+ "learning_rate": 1.999229342048404e-05,
+ "loss": 1.0457,
+ "step": 219
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0564872906762484,
+ "learning_rate": 1.9992046858164942e-05,
+ "loss": 0.9763,
+ "step": 220
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0819025507537388,
+ "learning_rate": 1.999179641504999e-05,
+ "loss": 0.9547,
+ "step": 221
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1934400237001315,
+ "learning_rate": 1.9991542091236438e-05,
+ "loss": 1.0922,
+ "step": 222
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.161179989745852,
+ "learning_rate": 1.9991283886823075e-05,
+ "loss": 1.0458,
+ "step": 223
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0867826068691018,
+ "learning_rate": 1.9991021801910177e-05,
+ "loss": 1.034,
+ "step": 224
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0585184576383915,
+ "learning_rate": 1.999075583659954e-05,
+ "loss": 0.9095,
+ "step": 225
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0235086504577238,
+ "learning_rate": 1.999048599099446e-05,
+ "loss": 0.9233,
+ "step": 226
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.103995240475684,
+ "learning_rate": 1.9990212265199738e-05,
+ "loss": 1.0443,
+ "step": 227
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0985369068246764,
+ "learning_rate": 1.998993465932169e-05,
+ "loss": 0.9732,
+ "step": 228
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1595157370784517,
+ "learning_rate": 1.9989653173468137e-05,
+ "loss": 0.9698,
+ "step": 229
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.909226275371055,
+ "learning_rate": 1.99893678077484e-05,
+ "loss": 0.9459,
+ "step": 230
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.9721503193880335,
+ "learning_rate": 1.9989078562273313e-05,
+ "loss": 0.8879,
+ "step": 231
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1167168793076159,
+ "learning_rate": 1.9988785437155222e-05,
+ "loss": 1.0538,
+ "step": 232
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1203943554433957,
+ "learning_rate": 1.9988488432507963e-05,
+ "loss": 1.0474,
+ "step": 233
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.2003829338387189,
+ "learning_rate": 1.9988187548446895e-05,
+ "loss": 1.0171,
+ "step": 234
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.2001828855704575,
+ "learning_rate": 1.998788278508888e-05,
+ "loss": 1.0292,
+ "step": 235
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.1777841113075826,
+ "learning_rate": 1.9987574142552274e-05,
+ "loss": 0.974,
+ "step": 236
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.3487292075045965,
+ "learning_rate": 1.9987261620956964e-05,
+ "loss": 1.049,
+ "step": 237
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.021633961676533,
+ "learning_rate": 1.9986945220424326e-05,
+ "loss": 1.0227,
+ "step": 238
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.2011481891405715,
+ "learning_rate": 1.998662494107724e-05,
+ "loss": 1.0574,
+ "step": 239
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.2480837841874388,
+ "learning_rate": 1.99863007830401e-05,
+ "loss": 1.0578,
+ "step": 240
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.1539635705528444,
+ "learning_rate": 1.9985972746438815e-05,
+ "loss": 1.0352,
+ "step": 241
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.2375605968879775,
+ "learning_rate": 1.9985640831400778e-05,
+ "loss": 1.0455,
+ "step": 242
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.2382629442817368,
+ "learning_rate": 1.998530503805491e-05,
+ "loss": 1.0042,
+ "step": 243
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.2491761957486767,
+ "learning_rate": 1.9984965366531624e-05,
+ "loss": 0.984,
+ "step": 244
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.9855487927840765,
+ "learning_rate": 1.9984621816962843e-05,
+ "loss": 0.9494,
+ "step": 245
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0803961197344898,
+ "learning_rate": 1.9984274389482005e-05,
+ "loss": 0.9561,
+ "step": 246
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.3126933935997356,
+ "learning_rate": 1.9983923084224047e-05,
+ "loss": 1.0042,
+ "step": 247
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0712696345783617,
+ "learning_rate": 1.9983567901325404e-05,
+ "loss": 1.103,
+ "step": 248
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.2299127993608967,
+ "learning_rate": 1.9983208840924028e-05,
+ "loss": 1.0678,
+ "step": 249
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0459571990167438,
+ "learning_rate": 1.998284590315937e-05,
+ "loss": 1.0011,
+ "step": 250
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.234895666534169,
+ "learning_rate": 1.9982479088172403e-05,
+ "loss": 1.0247,
+ "step": 251
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0400591644618937,
+ "learning_rate": 1.9982108396105584e-05,
+ "loss": 0.9653,
+ "step": 252
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.1222203513778257,
+ "learning_rate": 1.9981733827102884e-05,
+ "loss": 1.0255,
+ "step": 253
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0428445711754202,
+ "learning_rate": 1.998135538130979e-05,
+ "loss": 1.009,
+ "step": 254
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.993313396131739,
+ "learning_rate": 1.998097305887328e-05,
+ "loss": 1.0353,
+ "step": 255
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.1138141339680838,
+ "learning_rate": 1.9980586859941846e-05,
+ "loss": 1.0283,
+ "step": 256
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.06237415793888,
+ "learning_rate": 1.998019678466548e-05,
+ "loss": 0.941,
+ "step": 257
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.103853261194552,
+ "learning_rate": 1.997980283319568e-05,
+ "loss": 1.0525,
+ "step": 258
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0851989323256417,
+ "learning_rate": 1.9979405005685466e-05,
+ "loss": 1.021,
+ "step": 259
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.1013505172431879,
+ "learning_rate": 1.9979003302289336e-05,
+ "loss": 1.0366,
+ "step": 260
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0528197990342376,
+ "learning_rate": 1.997859772316331e-05,
+ "loss": 1.0017,
+ "step": 261
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.9961957077596891,
+ "learning_rate": 1.9978188268464912e-05,
+ "loss": 1.0011,
+ "step": 262
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0986536329203895,
+ "learning_rate": 1.997777493835317e-05,
+ "loss": 1.0291,
+ "step": 263
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0895648151990427,
+ "learning_rate": 1.9977357732988616e-05,
+ "loss": 0.998,
+ "step": 264
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.2103849011530055,
+ "learning_rate": 1.9976936652533288e-05,
+ "loss": 1.0342,
+ "step": 265
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.9507441705890096,
+ "learning_rate": 1.997651169715073e-05,
+ "loss": 0.937,
+ "step": 266
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.183923457824153,
+ "learning_rate": 1.9976082867005985e-05,
+ "loss": 1.0814,
+ "step": 267
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0666908558586932,
+ "learning_rate": 1.997565016226561e-05,
+ "loss": 1.0267,
+ "step": 268
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0938238668192006,
+ "learning_rate": 1.997521358309766e-05,
+ "loss": 0.9797,
+ "step": 269
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.9974396215028214,
+ "learning_rate": 1.99747731296717e-05,
+ "loss": 0.9745,
+ "step": 270
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.1114483480841397,
+ "learning_rate": 1.9974328802158798e-05,
+ "loss": 1.0203,
+ "step": 271
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.045538807506972,
+ "learning_rate": 1.997388060073152e-05,
+ "loss": 1.0451,
+ "step": 272
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.1083583866563211,
+ "learning_rate": 1.9973428525563948e-05,
+ "loss": 1.0574,
+ "step": 273
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.1701859122896223,
+ "learning_rate": 1.9972972576831656e-05,
+ "loss": 1.0661,
+ "step": 274
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0679023759461068,
+ "learning_rate": 1.9972512754711738e-05,
+ "loss": 1.0016,
+ "step": 275
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0776214334936154,
+ "learning_rate": 1.997204905938278e-05,
+ "loss": 1.0481,
+ "step": 276
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0994739379188883,
+ "learning_rate": 1.9971581491024873e-05,
+ "loss": 0.9836,
+ "step": 277
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0540199770109147,
+ "learning_rate": 1.997111004981962e-05,
+ "loss": 1.0737,
+ "step": 278
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.9725209801803888,
+ "learning_rate": 1.9970634735950117e-05,
+ "loss": 0.9469,
+ "step": 279
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.9522461545148276,
+ "learning_rate": 1.9970155549600978e-05,
+ "loss": 0.956,
+ "step": 280
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.9723112391371475,
+ "learning_rate": 1.9969672490958304e-05,
+ "loss": 0.9375,
+ "step": 281
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.1097297277623286,
+ "learning_rate": 1.996918556020972e-05,
+ "loss": 1.0106,
+ "step": 282
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.131166385572371,
+ "learning_rate": 1.996869475754434e-05,
+ "loss": 0.9987,
+ "step": 283
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0827734775808813,
+ "learning_rate": 1.9968200083152784e-05,
+ "loss": 1.0048,
+ "step": 284
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.117780989198639,
+ "learning_rate": 1.9967701537227175e-05,
+ "loss": 1.0228,
+ "step": 285
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.0681296455501026,
+ "learning_rate": 1.996719911996115e-05,
+ "loss": 0.9366,
+ "step": 286
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.060941460084828,
+ "learning_rate": 1.996669283154984e-05,
+ "loss": 0.9932,
+ "step": 287
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.156906817119832,
+ "learning_rate": 1.996618267218988e-05,
+ "loss": 1.0177,
+ "step": 288
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.3152595631612354,
+ "learning_rate": 1.996566864207941e-05,
+ "loss": 1.0269,
+ "step": 289
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.0913419325854408,
+ "learning_rate": 1.9965150741418072e-05,
+ "loss": 1.0438,
+ "step": 290
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.883672872961938,
+ "learning_rate": 1.9964628970407018e-05,
+ "loss": 0.8671,
+ "step": 291
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.148266864364802,
+ "learning_rate": 1.9964103329248892e-05,
+ "loss": 1.0138,
+ "step": 292
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.9555332042108989,
+ "learning_rate": 1.996357381814785e-05,
+ "loss": 0.9864,
+ "step": 293
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.9867179317501176,
+ "learning_rate": 1.996304043730955e-05,
+ "loss": 1.0427,
+ "step": 294
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.065732418166951,
+ "learning_rate": 1.9962503186941143e-05,
+ "loss": 1.0111,
+ "step": 295
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.1637313554979152,
+ "learning_rate": 1.9961962067251298e-05,
+ "loss": 1.0636,
+ "step": 296
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.1028865508626255,
+ "learning_rate": 1.9961417078450177e-05,
+ "loss": 0.9832,
+ "step": 297
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.3709736442670721,
+ "learning_rate": 1.996086822074945e-05,
+ "loss": 0.9825,
+ "step": 298
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.1319212799054115,
+ "learning_rate": 1.9960315494362286e-05,
+ "loss": 1.066,
+ "step": 299
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.0096657706385352,
+ "learning_rate": 1.9959758899503355e-05,
+ "loss": 1.0275,
+ "step": 300
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.9512338439427707,
+ "learning_rate": 1.995919843638883e-05,
+ "loss": 0.9121,
+ "step": 301
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.122276064017236,
+ "learning_rate": 1.9958634105236395e-05,
+ "loss": 1.0361,
+ "step": 302
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.0049578061308773,
+ "learning_rate": 1.9958065906265228e-05,
+ "loss": 0.9547,
+ "step": 303
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.1621443110111267,
+ "learning_rate": 1.9957493839696013e-05,
+ "loss": 1.0012,
+ "step": 304
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.3393044686281577,
+ "learning_rate": 1.9956917905750926e-05,
+ "loss": 0.9688,
+ "step": 305
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.0477868559661858,
+ "learning_rate": 1.995633810465366e-05,
+ "loss": 0.9109,
+ "step": 306
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.041686619229756,
+ "learning_rate": 1.99557544366294e-05,
+ "loss": 0.9793,
+ "step": 307
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.0268588020698692,
+ "learning_rate": 1.9955166901904838e-05,
+ "loss": 1.0161,
+ "step": 308
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.0338259745864973,
+ "learning_rate": 1.9954575500708164e-05,
+ "loss": 1.0123,
+ "step": 309
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.2528306493767145,
+ "learning_rate": 1.995398023326907e-05,
+ "loss": 1.0563,
+ "step": 310
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.1022842626383675,
+ "learning_rate": 1.9953381099818756e-05,
+ "loss": 0.9896,
+ "step": 311
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.9341887599034772,
+ "learning_rate": 1.9952778100589912e-05,
+ "loss": 0.9934,
+ "step": 312
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.156864345777507,
+ "learning_rate": 1.9952171235816747e-05,
+ "loss": 0.9987,
+ "step": 313
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.0709841278140249,
+ "learning_rate": 1.9951560505734948e-05,
+ "loss": 1.0313,
+ "step": 314
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.0773209829635768,
+ "learning_rate": 1.9950945910581718e-05,
+ "loss": 1.0868,
+ "step": 315
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.8752748058379218,
+ "learning_rate": 1.9950327450595766e-05,
+ "loss": 0.8964,
+ "step": 316
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.0745752107298585,
+ "learning_rate": 1.9949705126017286e-05,
+ "loss": 1.0216,
+ "step": 317
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.1554871302658731,
+ "learning_rate": 1.9949078937087988e-05,
+ "loss": 1.0333,
+ "step": 318
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.1028418446189259,
+ "learning_rate": 1.994844888405107e-05,
+ "loss": 1.0139,
+ "step": 319
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.8438392565458945,
+ "learning_rate": 1.9947814967151246e-05,
+ "loss": 0.9158,
+ "step": 320
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.0364604199680414,
+ "learning_rate": 1.9947177186634716e-05,
+ "loss": 0.9688,
+ "step": 321
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.2178021317078678,
+ "learning_rate": 1.9946535542749187e-05,
+ "loss": 1.0816,
+ "step": 322
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.1932580154846006,
+ "learning_rate": 1.9945890035743866e-05,
+ "loss": 0.9809,
+ "step": 323
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.1311891952725674,
+ "learning_rate": 1.9945240665869465e-05,
+ "loss": 1.0205,
+ "step": 324
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.0033781289686534,
+ "learning_rate": 1.9944587433378187e-05,
+ "loss": 0.9005,
+ "step": 325
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.1183374382936138,
+ "learning_rate": 1.994393033852374e-05,
+ "loss": 0.9944,
+ "step": 326
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.1626536753558943,
+ "learning_rate": 1.9943269381561334e-05,
+ "loss": 0.9838,
+ "step": 327
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.1153734528792847,
+ "learning_rate": 1.994260456274768e-05,
+ "loss": 0.9332,
+ "step": 328
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.1631398320623185,
+ "learning_rate": 1.9941935882340976e-05,
+ "loss": 1.015,
+ "step": 329
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.1008251297518674,
+ "learning_rate": 1.994126334060094e-05,
+ "loss": 1.0311,
+ "step": 330
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.9609773365201152,
+ "learning_rate": 1.994058693778878e-05,
+ "loss": 0.9125,
+ "step": 331
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.0289968099817348,
+ "learning_rate": 1.9939906674167192e-05,
+ "loss": 1.0187,
+ "step": 332
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.9871375826142248,
+ "learning_rate": 1.993922255000039e-05,
+ "loss": 1.0017,
+ "step": 333
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.9840376508630518,
+ "learning_rate": 1.993853456555408e-05,
+ "loss": 0.8998,
+ "step": 334
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.005956093758362,
+ "learning_rate": 1.9937842721095468e-05,
+ "loss": 1.017,
+ "step": 335
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.0565881405397841,
+ "learning_rate": 1.9937147016893257e-05,
+ "loss": 1.0183,
+ "step": 336
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.9327636558797503,
+ "learning_rate": 1.9936447453217646e-05,
+ "loss": 0.9183,
+ "step": 337
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.9015052548631132,
+ "learning_rate": 1.9935744030340347e-05,
+ "loss": 0.9363,
+ "step": 338
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.112830724657339,
+ "learning_rate": 1.9935036748534555e-05,
+ "loss": 0.9958,
+ "step": 339
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.0940370866245723,
+ "learning_rate": 1.993432560807497e-05,
+ "loss": 0.8806,
+ "step": 340
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.0072231094052617,
+ "learning_rate": 1.993361060923779e-05,
+ "loss": 0.9866,
+ "step": 341
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.1803294860217535,
+ "learning_rate": 1.9932891752300717e-05,
+ "loss": 1.0277,
+ "step": 342
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.0648689434483245,
+ "learning_rate": 1.9932169037542947e-05,
+ "loss": 1.0703,
+ "step": 343
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.4552596539034177,
+ "learning_rate": 1.9931442465245164e-05,
+ "loss": 1.0366,
+ "step": 344
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.1152475023499158,
+ "learning_rate": 1.9930712035689576e-05,
+ "loss": 1.0401,
+ "step": 345
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.9860353379930871,
+ "learning_rate": 1.992997774915986e-05,
+ "loss": 1.0142,
+ "step": 346
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.9691128601581361,
+ "learning_rate": 1.992923960594121e-05,
+ "loss": 0.9511,
+ "step": 347
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.9029971163164436,
+ "learning_rate": 1.9928497606320308e-05,
+ "loss": 1.0134,
+ "step": 348
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.0987897442950476,
+ "learning_rate": 1.992775175058535e-05,
+ "loss": 0.988,
+ "step": 349
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.9245914555297068,
+ "learning_rate": 1.9927002039026002e-05,
+ "loss": 0.9449,
+ "step": 350
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.071436883991358,
+ "learning_rate": 1.9926248471933453e-05,
+ "loss": 1.0236,
+ "step": 351
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.10276548579562,
+ "learning_rate": 1.9925491049600382e-05,
+ "loss": 1.0356,
+ "step": 352
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.0581216489631218,
+ "learning_rate": 1.9924729772320953e-05,
+ "loss": 0.9895,
+ "step": 353
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.0260701970266382,
+ "learning_rate": 1.9923964640390846e-05,
+ "loss": 0.9689,
+ "step": 354
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.8816165434650342,
+ "learning_rate": 1.9923195654107227e-05,
+ "loss": 1.0149,
+ "step": 355
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.9546173722232031,
+ "learning_rate": 1.992242281376876e-05,
+ "loss": 0.9698,
+ "step": 356
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.2048203176710783,
+ "learning_rate": 1.9921646119675606e-05,
+ "loss": 0.9945,
+ "step": 357
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.7850382985235519,
+ "learning_rate": 1.9920865572129426e-05,
+ "loss": 0.8834,
+ "step": 358
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.0993381867966732,
+ "learning_rate": 1.9920081171433377e-05,
+ "loss": 1.0291,
+ "step": 359
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.9860726984166353,
+ "learning_rate": 1.991929291789211e-05,
+ "loss": 0.9798,
+ "step": 360
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.1649400082596675,
+ "learning_rate": 1.9918500811811778e-05,
+ "loss": 0.9856,
+ "step": 361
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.9768507287731225,
+ "learning_rate": 1.991770485350002e-05,
+ "loss": 1.0322,
+ "step": 362
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.9719930832304183,
+ "learning_rate": 1.991690504326597e-05,
+ "loss": 1.0114,
+ "step": 363
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.0208665386118512,
+ "learning_rate": 1.9916101381420285e-05,
+ "loss": 0.9605,
+ "step": 364
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.1091918338672133,
+ "learning_rate": 1.9915293868275083e-05,
+ "loss": 1.0122,
+ "step": 365
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.9916669511874985,
+ "learning_rate": 1.9914482504143996e-05,
+ "loss": 1.0572,
+ "step": 366
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.0877445095428253,
+ "learning_rate": 1.9913667289342147e-05,
+ "loss": 0.9825,
+ "step": 367
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.9951418535733804,
+ "learning_rate": 1.991284822418616e-05,
+ "loss": 0.9763,
+ "step": 368
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.1099440939197665,
+ "learning_rate": 1.9912025308994146e-05,
+ "loss": 0.9827,
+ "step": 369
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.9147837781723692,
+ "learning_rate": 1.9911198544085723e-05,
+ "loss": 0.9713,
+ "step": 370
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.0340673243328542,
+ "learning_rate": 1.991036792978199e-05,
+ "loss": 1.0105,
+ "step": 371
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.1372723304735337,
+ "learning_rate": 1.990953346640555e-05,
+ "loss": 1.0105,
+ "step": 372
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.0046670930534125,
+ "learning_rate": 1.9908695154280496e-05,
+ "loss": 0.8756,
+ "step": 373
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.007583801986781,
+ "learning_rate": 1.9907852993732425e-05,
+ "loss": 0.9576,
+ "step": 374
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.034350963445426,
+ "learning_rate": 1.990700698508842e-05,
+ "loss": 0.9691,
+ "step": 375
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.7365590280197416,
+ "learning_rate": 1.990615712867706e-05,
+ "loss": 0.8323,
+ "step": 376
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.1086572670483268,
+ "learning_rate": 1.9905303424828418e-05,
+ "loss": 1.0489,
+ "step": 377
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.98108011919429,
+ "learning_rate": 1.9904445873874068e-05,
+ "loss": 0.9333,
+ "step": 378
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.995597752797983,
+ "learning_rate": 1.9903584476147066e-05,
+ "loss": 0.9806,
+ "step": 379
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.0386606289436602,
+ "learning_rate": 1.9902719231981975e-05,
+ "loss": 0.9641,
+ "step": 380
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.9811938400165474,
+ "learning_rate": 1.9901850141714843e-05,
+ "loss": 0.98,
+ "step": 381
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.9680779744918118,
+ "learning_rate": 1.9900977205683213e-05,
+ "loss": 0.9748,
+ "step": 382
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.8834086257950933,
+ "learning_rate": 1.9900100424226124e-05,
+ "loss": 0.9368,
+ "step": 383
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.090124582012756,
+ "learning_rate": 1.9899219797684113e-05,
+ "loss": 0.9813,
+ "step": 384
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.0074425933799687,
+ "learning_rate": 1.98983353263992e-05,
+ "loss": 0.9535,
+ "step": 385
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.1085026154314939,
+ "learning_rate": 1.9897447010714905e-05,
+ "loss": 0.9324,
+ "step": 386
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.0106463055120962,
+ "learning_rate": 1.989655485097624e-05,
+ "loss": 0.9752,
+ "step": 387
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.09924917860149,
+ "learning_rate": 1.989565884752971e-05,
+ "loss": 1.0201,
+ "step": 388
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.1307196942637978,
+ "learning_rate": 1.9894759000723308e-05,
+ "loss": 0.986,
+ "step": 389
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.2395516795653494,
+ "learning_rate": 1.9893855310906526e-05,
+ "loss": 1.0174,
+ "step": 390
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.1755919525884955,
+ "learning_rate": 1.9892947778430352e-05,
+ "loss": 1.0104,
+ "step": 391
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.1286283315117354,
+ "learning_rate": 1.9892036403647256e-05,
+ "loss": 1.022,
+ "step": 392
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.0254048346488573,
+ "learning_rate": 1.9891121186911207e-05,
+ "loss": 0.9568,
+ "step": 393
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.0260073369434373,
+ "learning_rate": 1.9890202128577664e-05,
+ "loss": 1.0247,
+ "step": 394
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.1777603983178653,
+ "learning_rate": 1.988927922900358e-05,
+ "loss": 0.9998,
+ "step": 395
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.9728921221019166,
+ "learning_rate": 1.9888352488547397e-05,
+ "loss": 1.0295,
+ "step": 396
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.1873920401009974,
+ "learning_rate": 1.988742190756905e-05,
+ "loss": 0.9524,
+ "step": 397
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.1229702475569519,
+ "learning_rate": 1.9886487486429966e-05,
+ "loss": 0.9283,
+ "step": 398
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.9557043740069384,
+ "learning_rate": 1.9885549225493064e-05,
+ "loss": 1.0169,
+ "step": 399
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.8853322061290558,
+ "learning_rate": 1.9884607125122753e-05,
+ "loss": 0.9217,
+ "step": 400
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.9926636503259895,
+ "learning_rate": 1.988366118568494e-05,
+ "loss": 0.9532,
+ "step": 401
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.0006958386411087,
+ "learning_rate": 1.988271140754701e-05,
+ "loss": 0.9481,
+ "step": 402
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.0260858023013157,
+ "learning_rate": 1.9881757791077848e-05,
+ "loss": 0.9914,
+ "step": 403
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.0913630570085329,
+ "learning_rate": 1.9880800336647825e-05,
+ "loss": 0.9643,
+ "step": 404
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.109445115093341,
+ "learning_rate": 1.987983904462881e-05,
+ "loss": 1.0165,
+ "step": 405
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.8584168156054991,
+ "learning_rate": 1.9878873915394154e-05,
+ "loss": 0.9473,
+ "step": 406
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.1679236196407334,
+ "learning_rate": 1.9877904949318704e-05,
+ "loss": 1.0144,
+ "step": 407
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.1251644456814822,
+ "learning_rate": 1.9876932146778796e-05,
+ "loss": 0.9818,
+ "step": 408
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.92846100559881,
+ "learning_rate": 1.9875955508152254e-05,
+ "loss": 0.9586,
+ "step": 409
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.1989101861770795,
+ "learning_rate": 1.987497503381839e-05,
+ "loss": 1.0557,
+ "step": 410
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.1576181312622837,
+ "learning_rate": 1.9873990724158014e-05,
+ "loss": 0.9345,
+ "step": 411
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.9947611145521551,
+ "learning_rate": 1.987300257955342e-05,
+ "loss": 0.9857,
+ "step": 412
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.95083284712632,
+ "learning_rate": 1.987201060038839e-05,
+ "loss": 0.9734,
+ "step": 413
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.8788506837339953,
+ "learning_rate": 1.9871014787048197e-05,
+ "loss": 0.9648,
+ "step": 414
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.9652159901617957,
+ "learning_rate": 1.9870015139919606e-05,
+ "loss": 1.0217,
+ "step": 415
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.115957319344163,
+ "learning_rate": 1.9869011659390866e-05,
+ "loss": 1.0174,
+ "step": 416
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.10631243887674,
+ "learning_rate": 1.9868004345851716e-05,
+ "loss": 0.9295,
+ "step": 417
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.8396659931872941,
+ "learning_rate": 1.9866993199693393e-05,
+ "loss": 0.8177,
+ "step": 418
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.1557846141605452,
+ "learning_rate": 1.98659782213086e-05,
+ "loss": 1.064,
+ "step": 419
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.9000696594877208,
+ "learning_rate": 1.986495941109156e-05,
+ "loss": 0.894,
+ "step": 420
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.9488372546306818,
+ "learning_rate": 1.9863936769437956e-05,
+ "loss": 0.9482,
+ "step": 421
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.2431576583319777,
+ "learning_rate": 1.986291029674497e-05,
+ "loss": 1.0355,
+ "step": 422
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.9832767607334706,
+ "learning_rate": 1.986187999341128e-05,
+ "loss": 0.9785,
+ "step": 423
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.9066624673918414,
+ "learning_rate": 1.9860845859837034e-05,
+ "loss": 0.9399,
+ "step": 424
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.9804432267689261,
+ "learning_rate": 1.985980789642388e-05,
+ "loss": 0.9826,
+ "step": 425
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.0474637765844146,
+ "learning_rate": 1.985876610357496e-05,
+ "loss": 1.0477,
+ "step": 426
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.1981920004067885,
+ "learning_rate": 1.9857720481694887e-05,
+ "loss": 1.0065,
+ "step": 427
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.9423419660649944,
+ "learning_rate": 1.9856671031189765e-05,
+ "loss": 0.9813,
+ "step": 428
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.039276169197374,
+ "learning_rate": 1.98556177524672e-05,
+ "loss": 0.9061,
+ "step": 429
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.9606855679333003,
+ "learning_rate": 1.9854560645936262e-05,
+ "loss": 1.0137,
+ "step": 430
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.8373828115493064,
+ "learning_rate": 1.9853499712007523e-05,
+ "loss": 0.963,
+ "step": 431
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.208529699031717,
+ "learning_rate": 1.9852434951093035e-05,
+ "loss": 1.0257,
+ "step": 432
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.0672762000421674,
+ "learning_rate": 1.985136636360635e-05,
+ "loss": 0.9716,
+ "step": 433
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.0963028521904867,
+ "learning_rate": 1.985029394996248e-05,
+ "loss": 0.9474,
+ "step": 434
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.9163688190227294,
+ "learning_rate": 1.9849217710577945e-05,
+ "loss": 0.9505,
+ "step": 435
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.050544750709766,
+ "learning_rate": 1.9848137645870745e-05,
+ "loss": 0.9958,
+ "step": 436
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.0500893960835593,
+ "learning_rate": 1.9847053756260363e-05,
+ "loss": 0.9822,
+ "step": 437
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.9638183430641204,
+ "learning_rate": 1.984596604216777e-05,
+ "loss": 0.9274,
+ "step": 438
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.0027183551032477,
+ "learning_rate": 1.984487450401542e-05,
+ "loss": 1.0227,
+ "step": 439
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.0126448520312659,
+ "learning_rate": 1.9843779142227258e-05,
+ "loss": 0.939,
+ "step": 440
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.8601680052657347,
+ "learning_rate": 1.9842679957228706e-05,
+ "loss": 0.9062,
+ "step": 441
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.1152170579938703,
+ "learning_rate": 1.9841576949446675e-05,
+ "loss": 0.9915,
+ "step": 442
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.9228161270160428,
+ "learning_rate": 1.984047011930956e-05,
+ "loss": 0.9444,
+ "step": 443
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.9621324346983371,
+ "learning_rate": 1.9839359467247243e-05,
+ "loss": 0.9347,
+ "step": 444
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.077600427957637,
+ "learning_rate": 1.983824499369109e-05,
+ "loss": 1.0871,
+ "step": 445
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.1205398016744077,
+ "learning_rate": 1.9837126699073948e-05,
+ "loss": 0.9663,
+ "step": 446
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.05513833949092,
+ "learning_rate": 1.9836004583830146e-05,
+ "loss": 0.9598,
+ "step": 447
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0784477194893523,
+ "learning_rate": 1.9834878648395507e-05,
+ "loss": 1.0089,
+ "step": 448
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.218897394519893,
+ "learning_rate": 1.9833748893207326e-05,
+ "loss": 1.0376,
+ "step": 449
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.102108513161202,
+ "learning_rate": 1.9832615318704388e-05,
+ "loss": 1.002,
+ "step": 450
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.1117520137781198,
+ "learning_rate": 1.9831477925326962e-05,
+ "loss": 0.9867,
+ "step": 451
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.9516860024791847,
+ "learning_rate": 1.98303367135168e-05,
+ "loss": 0.9682,
+ "step": 452
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0469044011350126,
+ "learning_rate": 1.9829191683717133e-05,
+ "loss": 0.9555,
+ "step": 453
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.3651285527860566,
+ "learning_rate": 1.9828042836372677e-05,
+ "loss": 0.9684,
+ "step": 454
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.1737464733176803,
+ "learning_rate": 1.9826890171929634e-05,
+ "loss": 1.0662,
+ "step": 455
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.9965826863460079,
+ "learning_rate": 1.982573369083568e-05,
+ "loss": 1.0071,
+ "step": 456
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.9783513198655882,
+ "learning_rate": 1.9824573393539984e-05,
+ "loss": 0.9869,
+ "step": 457
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0311206030672846,
+ "learning_rate": 1.982340928049319e-05,
+ "loss": 0.9851,
+ "step": 458
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.8945449954022129,
+ "learning_rate": 1.9822241352147426e-05,
+ "loss": 0.9294,
+ "step": 459
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0110242576733897,
+ "learning_rate": 1.9821069608956307e-05,
+ "loss": 1.0002,
+ "step": 460
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.1047177352183692,
+ "learning_rate": 1.9819894051374917e-05,
+ "loss": 1.0201,
+ "step": 461
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0722862873101653,
+ "learning_rate": 1.981871467985983e-05,
+ "loss": 0.9839,
+ "step": 462
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.7992134051176842,
+ "learning_rate": 1.9817531494869105e-05,
+ "loss": 0.9206,
+ "step": 463
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.09694344736295,
+ "learning_rate": 1.9816344496862272e-05,
+ "loss": 0.9826,
+ "step": 464
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.112091064871851,
+ "learning_rate": 1.9815153686300352e-05,
+ "loss": 0.9378,
+ "step": 465
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0620939801683327,
+ "learning_rate": 1.981395906364584e-05,
+ "loss": 0.9833,
+ "step": 466
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0927986239274194,
+ "learning_rate": 1.9812760629362714e-05,
+ "loss": 0.9961,
+ "step": 467
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0260975079168466,
+ "learning_rate": 1.981155838391643e-05,
+ "loss": 0.898,
+ "step": 468
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.1148764798877446,
+ "learning_rate": 1.9810352327773935e-05,
+ "loss": 1.0624,
+ "step": 469
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.047369326060531,
+ "learning_rate": 1.9809142461403635e-05,
+ "loss": 1.0518,
+ "step": 470
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.9949304877707057,
+ "learning_rate": 1.9807928785275433e-05,
+ "loss": 0.9958,
+ "step": 471
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0400326074928232,
+ "learning_rate": 1.980671129986071e-05,
+ "loss": 0.9636,
+ "step": 472
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0311206230508945,
+ "learning_rate": 1.9805490005632323e-05,
+ "loss": 1.02,
+ "step": 473
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0879635240675696,
+ "learning_rate": 1.98042649030646e-05,
+ "loss": 1.0428,
+ "step": 474
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0166991214039482,
+ "learning_rate": 1.9803035992633366e-05,
+ "loss": 1.0782,
+ "step": 475
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.1494495553279038,
+ "learning_rate": 1.9801803274815915e-05,
+ "loss": 1.028,
+ "step": 476
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0502559175610942,
+ "learning_rate": 1.9800566750091018e-05,
+ "loss": 0.95,
+ "step": 477
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.1045546514014202,
+ "learning_rate": 1.9799326418938924e-05,
+ "loss": 1.0072,
+ "step": 478
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.3056896038881167,
+ "learning_rate": 1.979808228184137e-05,
+ "loss": 1.0127,
+ "step": 479
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.1149911519233298,
+ "learning_rate": 1.9796834339281557e-05,
+ "loss": 0.9874,
+ "step": 480
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.9535015908036342,
+ "learning_rate": 1.979558259174418e-05,
+ "loss": 0.8895,
+ "step": 481
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.1040398449723414,
+ "learning_rate": 1.9794327039715395e-05,
+ "loss": 1.0279,
+ "step": 482
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.2020662594960165,
+ "learning_rate": 1.979306768368285e-05,
+ "loss": 0.9662,
+ "step": 483
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.1617626482359902,
+ "learning_rate": 1.9791804524135663e-05,
+ "loss": 0.9733,
+ "step": 484
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.9858924210251201,
+ "learning_rate": 1.979053756156443e-05,
+ "loss": 0.9833,
+ "step": 485
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.08640612402288,
+ "learning_rate": 1.9789266796461222e-05,
+ "loss": 0.9355,
+ "step": 486
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0819304471796969,
+ "learning_rate": 1.9787992229319594e-05,
+ "loss": 0.9629,
+ "step": 487
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.8480780378607349,
+ "learning_rate": 1.978671386063457e-05,
+ "loss": 0.9419,
+ "step": 488
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.9771514137294415,
+ "learning_rate": 1.978543169090265e-05,
+ "loss": 0.977,
+ "step": 489
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0672876151108321,
+ "learning_rate": 1.9784145720621827e-05,
+ "loss": 0.9911,
+ "step": 490
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.9338685826013793,
+ "learning_rate": 1.9782855950291542e-05,
+ "loss": 1.001,
+ "step": 491
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.127158307035761,
+ "learning_rate": 1.978156238041274e-05,
+ "loss": 0.9582,
+ "step": 492
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0731823106590381,
+ "learning_rate": 1.9780265011487822e-05,
+ "loss": 0.9314,
+ "step": 493
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.1026747968864483,
+ "learning_rate": 1.9778963844020668e-05,
+ "loss": 0.9816,
+ "step": 494
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.8721020093136242,
+ "learning_rate": 1.977765887851664e-05,
+ "loss": 0.9347,
+ "step": 495
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.1173303456950077,
+ "learning_rate": 1.977635011548257e-05,
+ "loss": 1.0086,
+ "step": 496
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.1340305355028328,
+ "learning_rate": 1.9775037555426772e-05,
+ "loss": 1.0657,
+ "step": 497
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.8776348494635778,
+ "learning_rate": 1.9773721198859024e-05,
+ "loss": 0.9833,
+ "step": 498
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9597375630238096,
+ "learning_rate": 1.9772401046290584e-05,
+ "loss": 0.9231,
+ "step": 499
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.309040553408022,
+ "learning_rate": 1.9771077098234187e-05,
+ "loss": 1.0313,
+ "step": 500
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.1664832697368677,
+ "learning_rate": 1.9769749355204034e-05,
+ "loss": 0.9909,
+ "step": 501
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9732734163687559,
+ "learning_rate": 1.976841781771581e-05,
+ "loss": 0.9881,
+ "step": 502
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.251367057800584,
+ "learning_rate": 1.9767082486286667e-05,
+ "loss": 1.0196,
+ "step": 503
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.950723223553314,
+ "learning_rate": 1.9765743361435234e-05,
+ "loss": 0.9409,
+ "step": 504
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.0699038804523258,
+ "learning_rate": 1.9764400443681607e-05,
+ "loss": 1.0846,
+ "step": 505
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.0808870677516138,
+ "learning_rate": 1.9763053733547367e-05,
+ "loss": 1.0295,
+ "step": 506
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.8987687065357317,
+ "learning_rate": 1.976170323155555e-05,
+ "loss": 0.9478,
+ "step": 507
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.009587668356229,
+ "learning_rate": 1.976034893823069e-05,
+ "loss": 0.9822,
+ "step": 508
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.0760293799596983,
+ "learning_rate": 1.975899085409876e-05,
+ "loss": 1.0342,
+ "step": 509
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.979947021628255,
+ "learning_rate": 1.9757628979687247e-05,
+ "loss": 0.957,
+ "step": 510
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9102526524656575,
+ "learning_rate": 1.975626331552507e-05,
+ "loss": 0.9591,
+ "step": 511
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.0068945109167378,
+ "learning_rate": 1.9754893862142643e-05,
+ "loss": 0.9861,
+ "step": 512
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9908109026807387,
+ "learning_rate": 1.9753520620071846e-05,
+ "loss": 0.9733,
+ "step": 513
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9392793317686784,
+ "learning_rate": 1.9752143589846027e-05,
+ "loss": 0.9708,
+ "step": 514
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.1514636687669961,
+ "learning_rate": 1.9750762772000014e-05,
+ "loss": 0.9004,
+ "step": 515
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.0612799142292089,
+ "learning_rate": 1.9749378167070097e-05,
+ "loss": 1.0027,
+ "step": 516
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.0884952183522267,
+ "learning_rate": 1.9747989775594044e-05,
+ "loss": 0.9798,
+ "step": 517
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9909419382985257,
+ "learning_rate": 1.974659759811109e-05,
+ "loss": 1.0046,
+ "step": 518
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.1115561307544186,
+ "learning_rate": 1.9745201635161938e-05,
+ "loss": 1.0215,
+ "step": 519
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.2019653967298156,
+ "learning_rate": 1.9743801887288762e-05,
+ "loss": 0.972,
+ "step": 520
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9966881370550695,
+ "learning_rate": 1.9742398355035212e-05,
+ "loss": 0.9743,
+ "step": 521
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.1213143026660388,
+ "learning_rate": 1.9740991038946404e-05,
+ "loss": 1.1124,
+ "step": 522
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9883630166511729,
+ "learning_rate": 1.973957993956892e-05,
+ "loss": 0.9529,
+ "step": 523
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.0538838631766885,
+ "learning_rate": 1.9738165057450817e-05,
+ "loss": 0.9849,
+ "step": 524
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.0339361235832036,
+ "learning_rate": 1.9736746393141617e-05,
+ "loss": 0.8873,
+ "step": 525
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.1066944796273215,
+ "learning_rate": 1.9735323947192317e-05,
+ "loss": 0.9441,
+ "step": 526
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9596076853330495,
+ "learning_rate": 1.9733897720155377e-05,
+ "loss": 1.0429,
+ "step": 527
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.2700469172838509,
+ "learning_rate": 1.9732467712584723e-05,
+ "loss": 0.8791,
+ "step": 528
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9050792010677117,
+ "learning_rate": 1.973103392503576e-05,
+ "loss": 1.0016,
+ "step": 529
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.885318967539835,
+ "learning_rate": 1.9729596358065347e-05,
+ "loss": 0.9863,
+ "step": 530
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9803399653508227,
+ "learning_rate": 1.9728155012231825e-05,
+ "loss": 0.9765,
+ "step": 531
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.0469533529335453,
+ "learning_rate": 1.9726709888094994e-05,
+ "loss": 0.9708,
+ "step": 532
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9550877375109239,
+ "learning_rate": 1.972526098621612e-05,
+ "loss": 0.9091,
+ "step": 533
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.0055702317020512,
+ "learning_rate": 1.972380830715795e-05,
+ "loss": 1.0518,
+ "step": 534
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.8925291498978056,
+ "learning_rate": 1.9722351851484677e-05,
+ "loss": 0.9679,
+ "step": 535
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.845516568590393,
+ "learning_rate": 1.9720891619761974e-05,
+ "loss": 0.966,
+ "step": 536
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.8409431262046926,
+ "learning_rate": 1.9719427612556982e-05,
+ "loss": 0.9629,
+ "step": 537
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.0872103956772339,
+ "learning_rate": 1.9717959830438302e-05,
+ "loss": 0.9911,
+ "step": 538
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9873600073143931,
+ "learning_rate": 1.9716488273976006e-05,
+ "loss": 0.9889,
+ "step": 539
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9582318693838419,
+ "learning_rate": 1.971501294374162e-05,
+ "loss": 1.034,
+ "step": 540
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.0505492025517522,
+ "learning_rate": 1.971353384030816e-05,
+ "loss": 0.9326,
+ "step": 541
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.080752233944646,
+ "learning_rate": 1.9712050964250083e-05,
+ "loss": 1.0414,
+ "step": 542
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.0021047241613121,
+ "learning_rate": 1.9710564316143323e-05,
+ "loss": 0.9727,
+ "step": 543
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.0365183591134215,
+ "learning_rate": 1.9709073896565276e-05,
+ "loss": 1.0163,
+ "step": 544
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9263413124443935,
+ "learning_rate": 1.9707579706094807e-05,
+ "loss": 0.9942,
+ "step": 545
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9415012128457283,
+ "learning_rate": 1.970608174531224e-05,
+ "loss": 0.9592,
+ "step": 546
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.085023024342262,
+ "learning_rate": 1.970458001479937e-05,
+ "loss": 1.0216,
+ "step": 547
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.0777088572387146,
+ "learning_rate": 1.9703074515139445e-05,
+ "loss": 0.9411,
+ "step": 548
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.8954427184820994,
+ "learning_rate": 1.9701565246917184e-05,
+ "loss": 0.9779,
+ "step": 549
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9419731262583303,
+ "learning_rate": 1.9700052210718775e-05,
+ "loss": 0.9901,
+ "step": 550
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.8817663228638201,
+ "learning_rate": 1.969853540713186e-05,
+ "loss": 0.9236,
+ "step": 551
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.0954242523958229,
+ "learning_rate": 1.9697014836745552e-05,
+ "loss": 1.0373,
+ "step": 552
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.955612651878377,
+ "learning_rate": 1.9695490500150418e-05,
+ "loss": 1.0164,
+ "step": 553
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.0524488036996646,
+ "learning_rate": 1.9693962397938495e-05,
+ "loss": 0.961,
+ "step": 554
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9236957184737671,
+ "learning_rate": 1.9692430530703282e-05,
+ "loss": 1.0028,
+ "step": 555
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9901537987037,
+ "learning_rate": 1.9690894899039735e-05,
+ "loss": 1.0905,
+ "step": 556
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9930842769836845,
+ "learning_rate": 1.9689355503544277e-05,
+ "loss": 0.9376,
+ "step": 557
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.0058896090306109,
+ "learning_rate": 1.968781234481479e-05,
+ "loss": 1.0343,
+ "step": 558
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.8966508232440634,
+ "learning_rate": 1.9686265423450624e-05,
+ "loss": 0.9702,
+ "step": 559
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.0279038868521404,
+ "learning_rate": 1.9684714740052584e-05,
+ "loss": 0.843,
+ "step": 560
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9799864758819165,
+ "learning_rate": 1.9683160295222934e-05,
+ "loss": 0.9933,
+ "step": 561
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.8452950137389139,
+ "learning_rate": 1.9681602089565403e-05,
+ "loss": 0.8928,
+ "step": 562
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.0685671006382729,
+ "learning_rate": 1.968004012368518e-05,
+ "loss": 1.0005,
+ "step": 563
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.0725039177597746,
+ "learning_rate": 1.967847439818892e-05,
+ "loss": 1.0575,
+ "step": 564
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.062041235984927,
+ "learning_rate": 1.9676904913684725e-05,
+ "loss": 0.996,
+ "step": 565
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9523922622707521,
+ "learning_rate": 1.967533167078217e-05,
+ "loss": 0.9757,
+ "step": 566
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.7964819810821044,
+ "learning_rate": 1.9673754670092283e-05,
+ "loss": 0.8787,
+ "step": 567
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.1164433094758257,
+ "learning_rate": 1.9672173912227556e-05,
+ "loss": 1.0268,
+ "step": 568
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.0349146489037746,
+ "learning_rate": 1.967058939780193e-05,
+ "loss": 0.9208,
+ "step": 569
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9878509364700656,
+ "learning_rate": 1.966900112743082e-05,
+ "loss": 1.0082,
+ "step": 570
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.126424620877137,
+ "learning_rate": 1.966740910173108e-05,
+ "loss": 0.996,
+ "step": 571
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.0986175315641928,
+ "learning_rate": 1.9665813321321054e-05,
+ "loss": 0.9665,
+ "step": 572
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.0764478104278572,
+ "learning_rate": 1.9664213786820502e-05,
+ "loss": 1.0315,
+ "step": 573
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.8998641964862616,
+ "learning_rate": 1.9662610498850684e-05,
+ "loss": 0.9363,
+ "step": 574
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9679128404985713,
+ "learning_rate": 1.9661003458034288e-05,
+ "loss": 0.986,
+ "step": 575
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9776597248647867,
+ "learning_rate": 1.965939266499547e-05,
+ "loss": 0.9886,
+ "step": 576
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9991039283783492,
+ "learning_rate": 1.9657778120359848e-05,
+ "loss": 0.9933,
+ "step": 577
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9553531726543301,
+ "learning_rate": 1.965615982475449e-05,
+ "loss": 0.9762,
+ "step": 578
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9797491811369795,
+ "learning_rate": 1.9654537778807924e-05,
+ "loss": 1.0457,
+ "step": 579
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.0295952381693567,
+ "learning_rate": 1.9652911983150135e-05,
+ "loss": 0.9814,
+ "step": 580
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.3381153154690866,
+ "learning_rate": 1.965128243841256e-05,
+ "loss": 0.8935,
+ "step": 581
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.0032594519610416,
+ "learning_rate": 1.96496491452281e-05,
+ "loss": 0.9598,
+ "step": 582
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9816592441190367,
+ "learning_rate": 1.9648012104231106e-05,
+ "loss": 1.0833,
+ "step": 583
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.0541040477843548,
+ "learning_rate": 1.964637131605738e-05,
+ "loss": 1.0731,
+ "step": 584
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.0283305630893005,
+ "learning_rate": 1.9644726781344197e-05,
+ "loss": 0.9996,
+ "step": 585
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.165472800453247,
+ "learning_rate": 1.964307850073026e-05,
+ "loss": 1.0854,
+ "step": 586
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.021180337630849,
+ "learning_rate": 1.964142647485576e-05,
+ "loss": 1.0666,
+ "step": 587
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.1125142366457403,
+ "learning_rate": 1.9639770704362305e-05,
+ "loss": 1.049,
+ "step": 588
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9844476170662394,
+ "learning_rate": 1.9638111189892994e-05,
+ "loss": 0.9533,
+ "step": 589
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9812726625726741,
+ "learning_rate": 1.9636447932092354e-05,
+ "loss": 1.0329,
+ "step": 590
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9616451497505701,
+ "learning_rate": 1.963478093160638e-05,
+ "loss": 0.9243,
+ "step": 591
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.1285986493634241,
+ "learning_rate": 1.9633110189082515e-05,
+ "loss": 0.945,
+ "step": 592
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9294359108155703,
+ "learning_rate": 1.963143570516965e-05,
+ "loss": 0.9501,
+ "step": 593
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.1903078262896956,
+ "learning_rate": 1.9629757480518144e-05,
+ "loss": 1.0148,
+ "step": 594
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.980479406825127,
+ "learning_rate": 1.9628075515779796e-05,
+ "loss": 0.9456,
+ "step": 595
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.1164312675034156,
+ "learning_rate": 1.962638981160786e-05,
+ "loss": 1.0495,
+ "step": 596
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.048075933467679,
+ "learning_rate": 1.9624700368657045e-05,
+ "loss": 1.019,
+ "step": 597
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9192475461396691,
+ "learning_rate": 1.9623007187583518e-05,
+ "loss": 0.9797,
+ "step": 598
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9640226308845612,
+ "learning_rate": 1.962131026904488e-05,
+ "loss": 0.9873,
+ "step": 599
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.118639485931765,
+ "learning_rate": 1.96196096137002e-05,
+ "loss": 0.9998,
+ "step": 600
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.0617218463844134,
+ "learning_rate": 1.9617905222209998e-05,
+ "loss": 0.9422,
+ "step": 601
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.077601678319198,
+ "learning_rate": 1.961619709523623e-05,
+ "loss": 1.0219,
+ "step": 602
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.0317162756461955,
+ "learning_rate": 1.9614485233442316e-05,
+ "loss": 0.9653,
+ "step": 603
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9886237905965549,
+ "learning_rate": 1.961276963749313e-05,
+ "loss": 0.9958,
+ "step": 604
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.8621674625037563,
+ "learning_rate": 1.9611050308054982e-05,
+ "loss": 0.9172,
+ "step": 605
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.0538077678947617,
+ "learning_rate": 1.9609327245795642e-05,
+ "loss": 0.9863,
+ "step": 606
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.0845798248830119,
+ "learning_rate": 1.9607600451384327e-05,
+ "loss": 1.0658,
+ "step": 607
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.1150062798769047,
+ "learning_rate": 1.960586992549171e-05,
+ "loss": 0.9652,
+ "step": 608
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9016377809012965,
+ "learning_rate": 1.9604135668789897e-05,
+ "loss": 1.0217,
+ "step": 609
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.0451722451202368,
+ "learning_rate": 1.9602397681952462e-05,
+ "loss": 1.0304,
+ "step": 610
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.031425030441544,
+ "learning_rate": 1.9600655965654413e-05,
+ "loss": 1.0239,
+ "step": 611
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.1544689229364578,
+ "learning_rate": 1.959891052057222e-05,
+ "loss": 1.0324,
+ "step": 612
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.057258594257786,
+ "learning_rate": 1.9597161347383783e-05,
+ "loss": 0.9878,
+ "step": 613
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.0497997727389952,
+ "learning_rate": 1.959540844676847e-05,
+ "loss": 1.0141,
+ "step": 614
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9474310922138933,
+ "learning_rate": 1.9593651819407084e-05,
+ "loss": 1.0073,
+ "step": 615
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.021159403915022,
+ "learning_rate": 1.959189146598188e-05,
+ "loss": 1.084,
+ "step": 616
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.0154972835163243,
+ "learning_rate": 1.9590127387176556e-05,
+ "loss": 0.9819,
+ "step": 617
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.0302369938411624,
+ "learning_rate": 1.9588359583676263e-05,
+ "loss": 1.0083,
+ "step": 618
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.8616437948503519,
+ "learning_rate": 1.9586588056167595e-05,
+ "loss": 0.974,
+ "step": 619
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.1515168257775552,
+ "learning_rate": 1.958481280533859e-05,
+ "loss": 0.9448,
+ "step": 620
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.912537750499609,
+ "learning_rate": 1.958303383187874e-05,
+ "loss": 0.947,
+ "step": 621
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9996092764555125,
+ "learning_rate": 1.9581251136478974e-05,
+ "loss": 0.9699,
+ "step": 622
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9770734794835924,
+ "learning_rate": 1.9579464719831668e-05,
+ "loss": 0.9872,
+ "step": 623
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.028670285835641,
+ "learning_rate": 1.9577674582630653e-05,
+ "loss": 1.0365,
+ "step": 624
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9143019763944704,
+ "learning_rate": 1.957588072557119e-05,
+ "loss": 0.9081,
+ "step": 625
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.0167790399065086,
+ "learning_rate": 1.957408314935e-05,
+ "loss": 1.0287,
+ "step": 626
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.1155834478432463,
+ "learning_rate": 1.9572281854665233e-05,
+ "loss": 1.0148,
+ "step": 627
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.0183895668497078,
+ "learning_rate": 1.95704768422165e-05,
+ "loss": 1.0,
+ "step": 628
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9107872923916688,
+ "learning_rate": 1.956866811270484e-05,
+ "loss": 0.9703,
+ "step": 629
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.1143357699318515,
+ "learning_rate": 1.9566855666832743e-05,
+ "loss": 0.9656,
+ "step": 630
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.069422848971197,
+ "learning_rate": 1.9565039505304145e-05,
+ "loss": 0.9959,
+ "step": 631
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9567954135329023,
+ "learning_rate": 1.956321962882442e-05,
+ "loss": 0.9908,
+ "step": 632
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.1212128831786514,
+ "learning_rate": 1.956139603810039e-05,
+ "loss": 0.9949,
+ "step": 633
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.1504412617249653,
+ "learning_rate": 1.9559568733840317e-05,
+ "loss": 1.0192,
+ "step": 634
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9589081642164562,
+ "learning_rate": 1.9557737716753898e-05,
+ "loss": 1.0319,
+ "step": 635
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.8249215086553999,
+ "learning_rate": 1.9555902987552283e-05,
+ "loss": 0.8644,
+ "step": 636
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9906162133808685,
+ "learning_rate": 1.9554064546948064e-05,
+ "loss": 1.0065,
+ "step": 637
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.994494951635327,
+ "learning_rate": 1.9552222395655262e-05,
+ "loss": 0.9595,
+ "step": 638
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.0287369197090521,
+ "learning_rate": 1.9550376534389355e-05,
+ "loss": 0.9892,
+ "step": 639
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.859541536851516,
+ "learning_rate": 1.9548526963867253e-05,
+ "loss": 0.8776,
+ "step": 640
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.8642559431346785,
+ "learning_rate": 1.9546673684807303e-05,
+ "loss": 1.01,
+ "step": 641
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9788548014720811,
+ "learning_rate": 1.95448166979293e-05,
+ "loss": 0.9524,
+ "step": 642
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.033542662447903,
+ "learning_rate": 1.9542956003954477e-05,
+ "loss": 0.9357,
+ "step": 643
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.8718690983137145,
+ "learning_rate": 1.9541091603605508e-05,
+ "loss": 0.8491,
+ "step": 644
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9375592920806041,
+ "learning_rate": 1.95392234976065e-05,
+ "loss": 0.9115,
+ "step": 645
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.0320072811824508,
+ "learning_rate": 1.9537351686683003e-05,
+ "loss": 1.0233,
+ "step": 646
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9946938867087536,
+ "learning_rate": 1.9535476171562014e-05,
+ "loss": 0.986,
+ "step": 647
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.026914493281081,
+ "learning_rate": 1.9533596952971955e-05,
+ "loss": 1.0278,
+ "step": 648
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9583004175067096,
+ "learning_rate": 1.9531714031642698e-05,
+ "loss": 0.954,
+ "step": 649
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.0515715138309774,
+ "learning_rate": 1.9529827408305542e-05,
+ "loss": 0.9719,
+ "step": 650
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.005029351853862,
+ "learning_rate": 1.9527937083693233e-05,
+ "loss": 0.9243,
+ "step": 651
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.8932520359057077,
+ "learning_rate": 1.952604305853995e-05,
+ "loss": 0.9068,
+ "step": 652
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9695248569316938,
+ "learning_rate": 1.9524145333581315e-05,
+ "loss": 0.9584,
+ "step": 653
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.970978214225334,
+ "learning_rate": 1.9522243909554375e-05,
+ "loss": 0.9821,
+ "step": 654
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.0971857329654915,
+ "learning_rate": 1.952033878719763e-05,
+ "loss": 0.9564,
+ "step": 655
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.150321699726554,
+ "learning_rate": 1.9518429967251e-05,
+ "loss": 1.0102,
+ "step": 656
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.8737491584881734,
+ "learning_rate": 1.9516517450455853e-05,
+ "loss": 0.9647,
+ "step": 657
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9897709546027356,
+ "learning_rate": 1.951460123755499e-05,
+ "loss": 0.9557,
+ "step": 658
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.037634026058284,
+ "learning_rate": 1.9512681329292635e-05,
+ "loss": 1.037,
+ "step": 659
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9681029294161705,
+ "learning_rate": 1.951075772641447e-05,
+ "loss": 0.975,
+ "step": 660
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9516199687205524,
+ "learning_rate": 1.95088304296676e-05,
+ "loss": 0.9433,
+ "step": 661
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.024309750310836,
+ "learning_rate": 1.950689943980056e-05,
+ "loss": 1.0857,
+ "step": 662
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.226082072960153,
+ "learning_rate": 1.9504964757563322e-05,
+ "loss": 0.9702,
+ "step": 663
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.003867245307899,
+ "learning_rate": 1.95030263837073e-05,
+ "loss": 0.9951,
+ "step": 664
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9406162890873699,
+ "learning_rate": 1.9501084318985335e-05,
+ "loss": 1.0059,
+ "step": 665
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.1470288672543922,
+ "learning_rate": 1.94991385641517e-05,
+ "loss": 0.9457,
+ "step": 666
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.244537358005595,
+ "learning_rate": 1.9497189119962105e-05,
+ "loss": 1.0986,
+ "step": 667
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.0184080275706304,
+ "learning_rate": 1.9495235987173693e-05,
+ "loss": 0.9256,
+ "step": 668
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9521024969217489,
+ "learning_rate": 1.949327916654504e-05,
+ "loss": 0.9937,
+ "step": 669
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9984073794997418,
+ "learning_rate": 1.949131865883614e-05,
+ "loss": 0.9852,
+ "step": 670
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.8725748012597938,
+ "learning_rate": 1.948935446480845e-05,
+ "loss": 0.9448,
+ "step": 671
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.2651416339727257,
+ "learning_rate": 1.948738658522483e-05,
+ "loss": 1.0173,
+ "step": 672
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9569990583280045,
+ "learning_rate": 1.9485415020849583e-05,
+ "loss": 0.9413,
+ "step": 673
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.0091673046357896,
+ "learning_rate": 1.9483439772448444e-05,
+ "loss": 0.9742,
+ "step": 674
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.0912502810880946,
+ "learning_rate": 1.9481460840788573e-05,
+ "loss": 0.9711,
+ "step": 675
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9894366562807843,
+ "learning_rate": 1.9479478226638565e-05,
+ "loss": 0.8987,
+ "step": 676
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.2375743349343702,
+ "learning_rate": 1.947749193076845e-05,
+ "loss": 0.953,
+ "step": 677
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.1818693686390342,
+ "learning_rate": 1.9475501953949674e-05,
+ "loss": 0.9888,
+ "step": 678
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.0175580442144072,
+ "learning_rate": 1.9473508296955126e-05,
+ "loss": 0.9699,
+ "step": 679
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.0133467735882349,
+ "learning_rate": 1.9471510960559122e-05,
+ "loss": 1.0137,
+ "step": 680
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9750925206262613,
+ "learning_rate": 1.9469509945537395e-05,
+ "loss": 0.9695,
+ "step": 681
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.1890758658396041,
+ "learning_rate": 1.9467505252667126e-05,
+ "loss": 1.0032,
+ "step": 682
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.0248792917422507,
+ "learning_rate": 1.9465496882726913e-05,
+ "loss": 0.9401,
+ "step": 683
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.8310457998466182,
+ "learning_rate": 1.946348483649678e-05,
+ "loss": 0.9213,
+ "step": 684
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.881725876694822,
+ "learning_rate": 1.9461469114758184e-05,
+ "loss": 0.9014,
+ "step": 685
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9050813569396872,
+ "learning_rate": 1.9459449718294008e-05,
+ "loss": 0.9357,
+ "step": 686
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.2913300128537322,
+ "learning_rate": 1.945742664788856e-05,
+ "loss": 1.0287,
+ "step": 687
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9569365072551512,
+ "learning_rate": 1.9455399904327585e-05,
+ "loss": 1.005,
+ "step": 688
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.0316515778697148,
+ "learning_rate": 1.945336948839824e-05,
+ "loss": 0.9622,
+ "step": 689
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9699361546794562,
+ "learning_rate": 1.9451335400889114e-05,
+ "loss": 1.0499,
+ "step": 690
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.8506759232989879,
+ "learning_rate": 1.944929764259023e-05,
+ "loss": 0.8996,
+ "step": 691
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9500681568094445,
+ "learning_rate": 1.9447256214293026e-05,
+ "loss": 0.969,
+ "step": 692
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.986962190625067,
+ "learning_rate": 1.9445211116790365e-05,
+ "loss": 1.0111,
+ "step": 693
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9798509690880004,
+ "learning_rate": 1.9443162350876544e-05,
+ "loss": 0.9696,
+ "step": 694
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.1836872602890092,
+ "learning_rate": 1.944110991734728e-05,
+ "loss": 0.9689,
+ "step": 695
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.0403191883207907,
+ "learning_rate": 1.9439053816999717e-05,
+ "loss": 0.9156,
+ "step": 696
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9375830839514839,
+ "learning_rate": 1.9436994050632415e-05,
+ "loss": 0.9596,
+ "step": 697
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9713611354979523,
+ "learning_rate": 1.9434930619045367e-05,
+ "loss": 1.0016,
+ "step": 698
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.0222670176295763,
+ "learning_rate": 1.9432863523039986e-05,
+ "loss": 1.0191,
+ "step": 699
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.0016663007872235,
+ "learning_rate": 1.9430792763419105e-05,
+ "loss": 0.9667,
+ "step": 700
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.0336727978333773,
+ "learning_rate": 1.942871834098699e-05,
+ "loss": 1.0496,
+ "step": 701
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.8976968260939017,
+ "learning_rate": 1.9426640256549313e-05,
+ "loss": 0.9628,
+ "step": 702
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.8218270591072413,
+ "learning_rate": 1.9424558510913186e-05,
+ "loss": 0.8697,
+ "step": 703
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0184264262300544,
+ "learning_rate": 1.9422473104887133e-05,
+ "loss": 0.9943,
+ "step": 704
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0086518902498114,
+ "learning_rate": 1.9420384039281103e-05,
+ "loss": 0.9802,
+ "step": 705
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.020839670513961,
+ "learning_rate": 1.941829131490646e-05,
+ "loss": 0.9747,
+ "step": 706
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9388421502196214,
+ "learning_rate": 1.9416194932576e-05,
+ "loss": 0.9456,
+ "step": 707
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0485208531205585,
+ "learning_rate": 1.941409489310393e-05,
+ "loss": 0.9839,
+ "step": 708
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.909534274569543,
+ "learning_rate": 1.9411991197305878e-05,
+ "loss": 0.897,
+ "step": 709
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0205339097605757,
+ "learning_rate": 1.9409883845998905e-05,
+ "loss": 1.0331,
+ "step": 710
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9071340678012022,
+ "learning_rate": 1.9407772840001473e-05,
+ "loss": 0.8875,
+ "step": 711
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.92489406786702,
+ "learning_rate": 1.9405658180133477e-05,
+ "loss": 0.8978,
+ "step": 712
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9015811878458317,
+ "learning_rate": 1.9403539867216226e-05,
+ "loss": 0.9602,
+ "step": 713
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.1509869566858648,
+ "learning_rate": 1.9401417902072447e-05,
+ "loss": 0.9402,
+ "step": 714
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9630806118398794,
+ "learning_rate": 1.9399292285526286e-05,
+ "loss": 0.9199,
+ "step": 715
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0772982326322353,
+ "learning_rate": 1.939716301840331e-05,
+ "loss": 0.894,
+ "step": 716
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0518544320060759,
+ "learning_rate": 1.9395030101530504e-05,
+ "loss": 0.9789,
+ "step": 717
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.1599584115369084,
+ "learning_rate": 1.939289353573626e-05,
+ "loss": 1.0222,
+ "step": 718
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.1108159536370812,
+ "learning_rate": 1.9390753321850404e-05,
+ "loss": 0.9887,
+ "step": 719
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9830021628019077,
+ "learning_rate": 1.938860946070417e-05,
+ "loss": 0.9177,
+ "step": 720
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.310911248032901,
+ "learning_rate": 1.93864619531302e-05,
+ "loss": 0.9587,
+ "step": 721
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.8939556278909199,
+ "learning_rate": 1.9384310799962575e-05,
+ "loss": 0.8889,
+ "step": 722
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9591305508666739,
+ "learning_rate": 1.9382156002036764e-05,
+ "loss": 0.9946,
+ "step": 723
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0710300642782549,
+ "learning_rate": 1.9379997560189677e-05,
+ "loss": 1.0379,
+ "step": 724
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9746030258304443,
+ "learning_rate": 1.937783547525962e-05,
+ "loss": 0.9298,
+ "step": 725
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.15598408014875,
+ "learning_rate": 1.9375669748086326e-05,
+ "loss": 1.0194,
+ "step": 726
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9412306944896369,
+ "learning_rate": 1.937350037951094e-05,
+ "loss": 0.9599,
+ "step": 727
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0101795567883216,
+ "learning_rate": 1.9371327370376018e-05,
+ "loss": 1.0061,
+ "step": 728
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.8893756653729351,
+ "learning_rate": 1.936915072152553e-05,
+ "loss": 0.8683,
+ "step": 729
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.100881206486699,
+ "learning_rate": 1.936697043380486e-05,
+ "loss": 1.0098,
+ "step": 730
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0171269619725332,
+ "learning_rate": 1.936478650806081e-05,
+ "loss": 1.0467,
+ "step": 731
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0169124233342686,
+ "learning_rate": 1.936259894514159e-05,
+ "loss": 0.9184,
+ "step": 732
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.900877308262594,
+ "learning_rate": 1.9360407745896828e-05,
+ "loss": 0.9553,
+ "step": 733
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.8684162437660856,
+ "learning_rate": 1.9358212911177556e-05,
+ "loss": 0.9644,
+ "step": 734
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0240095551783224,
+ "learning_rate": 1.935601444183622e-05,
+ "loss": 0.9792,
+ "step": 735
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.085694430569257,
+ "learning_rate": 1.935381233872669e-05,
+ "loss": 0.9762,
+ "step": 736
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0056031342843705,
+ "learning_rate": 1.935160660270423e-05,
+ "loss": 1.0149,
+ "step": 737
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0601776270144836,
+ "learning_rate": 1.934939723462552e-05,
+ "loss": 0.9448,
+ "step": 738
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9568548976676675,
+ "learning_rate": 1.9347184235348663e-05,
+ "loss": 0.9735,
+ "step": 739
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9920128463040526,
+ "learning_rate": 1.9344967605733154e-05,
+ "loss": 0.9506,
+ "step": 740
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.863639412832335,
+ "learning_rate": 1.934274734663991e-05,
+ "loss": 0.9306,
+ "step": 741
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9496830263021463,
+ "learning_rate": 1.934052345893125e-05,
+ "loss": 0.9641,
+ "step": 742
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.010783206181086,
+ "learning_rate": 1.9338295943470915e-05,
+ "loss": 0.9932,
+ "step": 743
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9552038184149039,
+ "learning_rate": 1.9336064801124034e-05,
+ "loss": 1.0013,
+ "step": 744
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.899378048924427,
+ "learning_rate": 1.933383003275717e-05,
+ "loss": 0.9246,
+ "step": 745
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9170206504026146,
+ "learning_rate": 1.933159163923827e-05,
+ "loss": 0.9364,
+ "step": 746
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.8780328136560523,
+ "learning_rate": 1.9329349621436708e-05,
+ "loss": 0.9373,
+ "step": 747
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0103180987298908,
+ "learning_rate": 1.9327103980223255e-05,
+ "loss": 0.9843,
+ "step": 748
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.7846608277324099,
+ "learning_rate": 1.932485471647009e-05,
+ "loss": 0.8487,
+ "step": 749
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9682004254409378,
+ "learning_rate": 1.9322601831050804e-05,
+ "loss": 1.0076,
+ "step": 750
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9595767627175493,
+ "learning_rate": 1.9320345324840396e-05,
+ "loss": 0.9891,
+ "step": 751
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.8101944582398378,
+ "learning_rate": 1.9318085198715257e-05,
+ "loss": 0.8918,
+ "step": 752
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0538486654973627,
+ "learning_rate": 1.93158214535532e-05,
+ "loss": 0.9233,
+ "step": 753
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.0992956832039944,
+ "learning_rate": 1.9313554090233436e-05,
+ "loss": 0.9717,
+ "step": 754
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.7802381711934618,
+ "learning_rate": 1.9311283109636586e-05,
+ "loss": 0.8638,
+ "step": 755
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9880307162959113,
+ "learning_rate": 1.9309008512644668e-05,
+ "loss": 1.0436,
+ "step": 756
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9017787755132864,
+ "learning_rate": 1.930673030014111e-05,
+ "loss": 0.9658,
+ "step": 757
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.938643330038015,
+ "learning_rate": 1.930444847301075e-05,
+ "loss": 0.9487,
+ "step": 758
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9406237262171853,
+ "learning_rate": 1.9302163032139813e-05,
+ "loss": 0.9316,
+ "step": 759
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.0005251727323674,
+ "learning_rate": 1.9299873978415947e-05,
+ "loss": 1.0257,
+ "step": 760
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9071087095973415,
+ "learning_rate": 1.9297581312728187e-05,
+ "loss": 0.8911,
+ "step": 761
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.0070742314486858,
+ "learning_rate": 1.929528503596698e-05,
+ "loss": 0.986,
+ "step": 762
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.0345804470898885,
+ "learning_rate": 1.929298514902418e-05,
+ "loss": 1.0488,
+ "step": 763
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.0532405838039678,
+ "learning_rate": 1.929068165279303e-05,
+ "loss": 0.9029,
+ "step": 764
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.0435070968348559,
+ "learning_rate": 1.928837454816818e-05,
+ "loss": 0.9503,
+ "step": 765
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.8484725704835063,
+ "learning_rate": 1.9286063836045687e-05,
+ "loss": 0.8668,
+ "step": 766
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.8825696372712778,
+ "learning_rate": 1.9283749517323e-05,
+ "loss": 0.9284,
+ "step": 767
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.4106660348079156,
+ "learning_rate": 1.928143159289898e-05,
+ "loss": 0.8946,
+ "step": 768
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.8380573985935399,
+ "learning_rate": 1.927911006367388e-05,
+ "loss": 0.9591,
+ "step": 769
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9869997692007562,
+ "learning_rate": 1.927678493054935e-05,
+ "loss": 1.0069,
+ "step": 770
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.0001381100244082,
+ "learning_rate": 1.9274456194428454e-05,
+ "loss": 0.9832,
+ "step": 771
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9346411066835593,
+ "learning_rate": 1.9272123856215643e-05,
+ "loss": 0.8938,
+ "step": 772
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.826372309279838,
+ "learning_rate": 1.9269787916816764e-05,
+ "loss": 0.8649,
+ "step": 773
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9583486423185323,
+ "learning_rate": 1.9267448377139074e-05,
+ "loss": 1.0063,
+ "step": 774
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9155429157204348,
+ "learning_rate": 1.9265105238091227e-05,
+ "loss": 0.925,
+ "step": 775
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.100953437202969,
+ "learning_rate": 1.9262758500583265e-05,
+ "loss": 0.9781,
+ "step": 776
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9910767356335862,
+ "learning_rate": 1.9260408165526638e-05,
+ "loss": 0.9383,
+ "step": 777
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9466330830022603,
+ "learning_rate": 1.9258054233834184e-05,
+ "loss": 0.9991,
+ "step": 778
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.8974827937836365,
+ "learning_rate": 1.9255696706420147e-05,
+ "loss": 0.8803,
+ "step": 779
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.1879861898674295,
+ "learning_rate": 1.9253335584200165e-05,
+ "loss": 1.0229,
+ "step": 780
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.8632606726670962,
+ "learning_rate": 1.9250970868091268e-05,
+ "loss": 0.8801,
+ "step": 781
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.1017728120330221,
+ "learning_rate": 1.924860255901188e-05,
+ "loss": 1.0249,
+ "step": 782
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.8668625088114832,
+ "learning_rate": 1.9246230657881834e-05,
+ "loss": 0.9014,
+ "step": 783
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9298695696032182,
+ "learning_rate": 1.9243855165622345e-05,
+ "loss": 0.9866,
+ "step": 784
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.023055658401976,
+ "learning_rate": 1.9241476083156026e-05,
+ "loss": 1.0535,
+ "step": 785
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9742522622235574,
+ "learning_rate": 1.9239093411406885e-05,
+ "loss": 1.0512,
+ "step": 786
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.8059558327082518,
+ "learning_rate": 1.9236707151300326e-05,
+ "loss": 0.9789,
+ "step": 787
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.7563506496362821,
+ "learning_rate": 1.9234317303763145e-05,
+ "loss": 0.8526,
+ "step": 788
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9581062257110713,
+ "learning_rate": 1.9231923869723528e-05,
+ "loss": 0.9279,
+ "step": 789
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.0320384960403879,
+ "learning_rate": 1.922952685011106e-05,
+ "loss": 0.9907,
+ "step": 790
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.0243556166887016,
+ "learning_rate": 1.9227126245856716e-05,
+ "loss": 1.0331,
+ "step": 791
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9100755913648656,
+ "learning_rate": 1.922472205789286e-05,
+ "loss": 0.8855,
+ "step": 792
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.8853085856585948,
+ "learning_rate": 1.9222314287153255e-05,
+ "loss": 0.8872,
+ "step": 793
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9183764196710127,
+ "learning_rate": 1.9219902934573048e-05,
+ "loss": 0.9303,
+ "step": 794
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9601180616455742,
+ "learning_rate": 1.9217488001088784e-05,
+ "loss": 0.9848,
+ "step": 795
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.8006087133278984,
+ "learning_rate": 1.9215069487638396e-05,
+ "loss": 0.9199,
+ "step": 796
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9096110010507297,
+ "learning_rate": 1.92126473951612e-05,
+ "loss": 0.91,
+ "step": 797
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.1783963202706182,
+ "learning_rate": 1.921022172459791e-05,
+ "loss": 0.9924,
+ "step": 798
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.0558096188074213,
+ "learning_rate": 1.920779247689064e-05,
+ "loss": 1.0305,
+ "step": 799
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.0307054410548946,
+ "learning_rate": 1.9205359652982868e-05,
+ "loss": 0.9389,
+ "step": 800
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9140877921424031,
+ "learning_rate": 1.9202923253819482e-05,
+ "loss": 0.9895,
+ "step": 801
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.0093026349498095,
+ "learning_rate": 1.920048328034675e-05,
+ "loss": 1.0162,
+ "step": 802
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9471943682238223,
+ "learning_rate": 1.9198039733512326e-05,
+ "loss": 0.8681,
+ "step": 803
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.1060929804311044,
+ "learning_rate": 1.9195592614265262e-05,
+ "loss": 1.0339,
+ "step": 804
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9229585676372616,
+ "learning_rate": 1.9193141923555984e-05,
+ "loss": 0.9157,
+ "step": 805
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9515766351316286,
+ "learning_rate": 1.919068766233632e-05,
+ "loss": 0.95,
+ "step": 806
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.8835516838717262,
+ "learning_rate": 1.9188229831559468e-05,
+ "loss": 0.8409,
+ "step": 807
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.8486474945951469,
+ "learning_rate": 1.9185768432180026e-05,
+ "loss": 0.9541,
+ "step": 808
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.156521123032195,
+ "learning_rate": 1.9183303465153972e-05,
+ "loss": 1.008,
+ "step": 809
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.2124822154719448,
+ "learning_rate": 1.9180834931438673e-05,
+ "loss": 0.9374,
+ "step": 810
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.0578764771979732,
+ "learning_rate": 1.917836283199288e-05,
+ "loss": 0.9981,
+ "step": 811
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.0412671472392887,
+ "learning_rate": 1.917588716777672e-05,
+ "loss": 0.9376,
+ "step": 812
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9389118910154383,
+ "learning_rate": 1.917340793975172e-05,
+ "loss": 0.9755,
+ "step": 813
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.916400094928299,
+ "learning_rate": 1.917092514888078e-05,
+ "loss": 0.963,
+ "step": 814
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9537763841864934,
+ "learning_rate": 1.9168438796128193e-05,
+ "loss": 0.942,
+ "step": 815
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.0222942470093788,
+ "learning_rate": 1.9165948882459623e-05,
+ "loss": 1.0059,
+ "step": 816
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9531671291708219,
+ "learning_rate": 1.9163455408842123e-05,
+ "loss": 0.9337,
+ "step": 817
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9673206995827385,
+ "learning_rate": 1.9160958376244138e-05,
+ "loss": 0.9792,
+ "step": 818
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.076755346471494,
+ "learning_rate": 1.9158457785635478e-05,
+ "loss": 0.9649,
+ "step": 819
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.007573871583606,
+ "learning_rate": 1.915595363798735e-05,
+ "loss": 0.9941,
+ "step": 820
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.0196743835900524,
+ "learning_rate": 1.915344593427233e-05,
+ "loss": 0.9957,
+ "step": 821
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9186323059236925,
+ "learning_rate": 1.9150934675464384e-05,
+ "loss": 0.9715,
+ "step": 822
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.0582302569593054,
+ "learning_rate": 1.9148419862538858e-05,
+ "loss": 1.0031,
+ "step": 823
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.008376705645545,
+ "learning_rate": 1.9145901496472474e-05,
+ "loss": 1.0022,
+ "step": 824
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.020725588513926,
+ "learning_rate": 1.9143379578243335e-05,
+ "loss": 0.9596,
+ "step": 825
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.8751660256262519,
+ "learning_rate": 1.914085410883093e-05,
+ "loss": 0.8788,
+ "step": 826
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.8339919915175331,
+ "learning_rate": 1.9138325089216118e-05,
+ "loss": 0.9288,
+ "step": 827
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.0534247762404043,
+ "learning_rate": 1.913579252038114e-05,
+ "loss": 1.0412,
+ "step": 828
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9329756986366945,
+ "learning_rate": 1.9133256403309627e-05,
+ "loss": 0.9791,
+ "step": 829
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.1974038402905118,
+ "learning_rate": 1.913071673898656e-05,
+ "loss": 0.9471,
+ "step": 830
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9492837367064159,
+ "learning_rate": 1.912817352839833e-05,
+ "loss": 0.9876,
+ "step": 831
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.8964769346472791,
+ "learning_rate": 1.9125626772532683e-05,
+ "loss": 0.9526,
+ "step": 832
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9842887483002896,
+ "learning_rate": 1.9123076472378753e-05,
+ "loss": 0.9654,
+ "step": 833
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.0449871301329061,
+ "learning_rate": 1.9120522628927047e-05,
+ "loss": 0.9937,
+ "step": 834
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.0956586607327587,
+ "learning_rate": 1.9117965243169444e-05,
+ "loss": 0.9111,
+ "step": 835
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9499494145244028,
+ "learning_rate": 1.9115404316099212e-05,
+ "loss": 0.932,
+ "step": 836
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.1134533937275666,
+ "learning_rate": 1.9112839848710978e-05,
+ "loss": 0.9275,
+ "step": 837
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.987142200239311,
+ "learning_rate": 1.9110271842000755e-05,
+ "loss": 0.9796,
+ "step": 838
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.922861437517647,
+ "learning_rate": 1.9107700296965926e-05,
+ "loss": 0.9994,
+ "step": 839
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.040672496640339,
+ "learning_rate": 1.910512521460525e-05,
+ "loss": 0.9491,
+ "step": 840
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.121182245905376,
+ "learning_rate": 1.9102546595918857e-05,
+ "loss": 1.0086,
+ "step": 841
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9382489379393961,
+ "learning_rate": 1.9099964441908258e-05,
+ "loss": 0.9713,
+ "step": 842
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9032784112027201,
+ "learning_rate": 1.9097378753576327e-05,
+ "loss": 0.9544,
+ "step": 843
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.941344942029661,
+ "learning_rate": 1.9094789531927315e-05,
+ "loss": 0.9555,
+ "step": 844
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.0794338752456596,
+ "learning_rate": 1.909219677796685e-05,
+ "loss": 1.0294,
+ "step": 845
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9796208361823697,
+ "learning_rate": 1.9089600492701926e-05,
+ "loss": 0.9753,
+ "step": 846
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9391801401800499,
+ "learning_rate": 1.908700067714091e-05,
+ "loss": 0.9743,
+ "step": 847
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9746592070314117,
+ "learning_rate": 1.9084397332293537e-05,
+ "loss": 0.9676,
+ "step": 848
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9849563735725314,
+ "learning_rate": 1.9081790459170926e-05,
+ "loss": 0.9566,
+ "step": 849
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.2526725908887066,
+ "learning_rate": 1.9079180058785547e-05,
+ "loss": 0.9363,
+ "step": 850
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.024753362541994,
+ "learning_rate": 1.9076566132151255e-05,
+ "loss": 0.9719,
+ "step": 851
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.1193459382390043,
+ "learning_rate": 1.907394868028326e-05,
+ "loss": 0.9887,
+ "step": 852
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.0957167581230274,
+ "learning_rate": 1.9071327704198163e-05,
+ "loss": 0.9731,
+ "step": 853
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.8810929173302238,
+ "learning_rate": 1.906870320491391e-05,
+ "loss": 0.9067,
+ "step": 854
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9574523964981423,
+ "learning_rate": 1.9066075183449835e-05,
+ "loss": 0.9502,
+ "step": 855
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.921763216834495,
+ "learning_rate": 1.9063443640826624e-05,
+ "loss": 0.9252,
+ "step": 856
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.0542993606189686,
+ "learning_rate": 1.906080857806634e-05,
+ "loss": 0.9557,
+ "step": 857
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9125679542204395,
+ "learning_rate": 1.905816999619242e-05,
+ "loss": 0.9192,
+ "step": 858
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9505650432203572,
+ "learning_rate": 1.9055527896229642e-05,
+ "loss": 0.9343,
+ "step": 859
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9797085550859939,
+ "learning_rate": 1.905288227920418e-05,
+ "loss": 0.976,
+ "step": 860
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9378127947174137,
+ "learning_rate": 1.9050233146143554e-05,
+ "loss": 0.9898,
+ "step": 861
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.021298501233176,
+ "learning_rate": 1.9047580498076663e-05,
+ "loss": 1.0113,
+ "step": 862
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9821863956339766,
+ "learning_rate": 1.904492433603376e-05,
+ "loss": 1.0428,
+ "step": 863
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9461148960938188,
+ "learning_rate": 1.904226466104647e-05,
+ "loss": 0.9989,
+ "step": 864
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9747637648410694,
+ "learning_rate": 1.903960147414778e-05,
+ "loss": 0.9956,
+ "step": 865
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9287287699604906,
+ "learning_rate": 1.903693477637204e-05,
+ "loss": 0.9849,
+ "step": 866
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.0513732675592349,
+ "learning_rate": 1.9034264568754967e-05,
+ "loss": 0.9361,
+ "step": 867
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.8557869605686496,
+ "learning_rate": 1.9031590852333637e-05,
+ "loss": 0.9625,
+ "step": 868
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.120810804482619,
+ "learning_rate": 1.9028913628146487e-05,
+ "loss": 0.9831,
+ "step": 869
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.117583170179081,
+ "learning_rate": 1.902623289723333e-05,
+ "loss": 1.0286,
+ "step": 870
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9205853078015869,
+ "learning_rate": 1.902354866063532e-05,
+ "loss": 1.0139,
+ "step": 871
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.930674992163926,
+ "learning_rate": 1.9020860919394992e-05,
+ "loss": 1.0258,
+ "step": 872
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.0264752575648477,
+ "learning_rate": 1.9018169674556228e-05,
+ "loss": 1.0151,
+ "step": 873
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.0372148521221145,
+ "learning_rate": 1.901547492716428e-05,
+ "loss": 1.0382,
+ "step": 874
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9434622133219497,
+ "learning_rate": 1.9012776678265756e-05,
+ "loss": 0.9812,
+ "step": 875
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9052102214485251,
+ "learning_rate": 1.9010074928908624e-05,
+ "loss": 0.9339,
+ "step": 876
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9316266612889813,
+ "learning_rate": 1.900736968014221e-05,
+ "loss": 0.9013,
+ "step": 877
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9133470615785118,
+ "learning_rate": 1.9004660933017208e-05,
+ "loss": 0.9294,
+ "step": 878
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9536120519224839,
+ "learning_rate": 1.900194868858566e-05,
+ "loss": 0.9119,
+ "step": 879
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9357559667532992,
+ "learning_rate": 1.8999232947900968e-05,
+ "loss": 0.9743,
+ "step": 880
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.1386309824122716,
+ "learning_rate": 1.89965137120179e-05,
+ "loss": 0.925,
+ "step": 881
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.8998242598947602,
+ "learning_rate": 1.899379098199257e-05,
+ "loss": 0.976,
+ "step": 882
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9039806326398101,
+ "learning_rate": 1.899106475888246e-05,
+ "loss": 0.9895,
+ "step": 883
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9345914552802297,
+ "learning_rate": 1.89883350437464e-05,
+ "loss": 0.9303,
+ "step": 884
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.147386596990526,
+ "learning_rate": 1.8985601837644586e-05,
+ "loss": 1.078,
+ "step": 885
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.2155363128449506,
+ "learning_rate": 1.8982865141638557e-05,
+ "loss": 0.8954,
+ "step": 886
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.8840276880732251,
+ "learning_rate": 1.8980124956791216e-05,
+ "loss": 0.9941,
+ "step": 887
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.0028413770383007,
+ "learning_rate": 1.8977381284166818e-05,
+ "loss": 0.9192,
+ "step": 888
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.0383545522633073,
+ "learning_rate": 1.897463412483098e-05,
+ "loss": 0.9872,
+ "step": 889
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.807056210262064,
+ "learning_rate": 1.897188347985066e-05,
+ "loss": 0.9211,
+ "step": 890
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.976902533221811,
+ "learning_rate": 1.896912935029418e-05,
+ "loss": 0.9424,
+ "step": 891
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.8111651059263858,
+ "learning_rate": 1.896637173723121e-05,
+ "loss": 0.8296,
+ "step": 892
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9699141538464766,
+ "learning_rate": 1.8963610641732777e-05,
+ "loss": 0.9202,
+ "step": 893
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.875464251306016,
+ "learning_rate": 1.8960846064871257e-05,
+ "loss": 0.9113,
+ "step": 894
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9397587920810292,
+ "learning_rate": 1.8958078007720387e-05,
+ "loss": 0.946,
+ "step": 895
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9364869336361953,
+ "learning_rate": 1.895530647135524e-05,
+ "loss": 0.9354,
+ "step": 896
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9005529620400964,
+ "learning_rate": 1.8952531456852248e-05,
+ "loss": 0.9719,
+ "step": 897
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.0055156280112623,
+ "learning_rate": 1.8949752965289197e-05,
+ "loss": 1.0446,
+ "step": 898
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9450115081067701,
+ "learning_rate": 1.894697099774523e-05,
+ "loss": 0.945,
+ "step": 899
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9690686920697665,
+ "learning_rate": 1.894418555530082e-05,
+ "loss": 0.9882,
+ "step": 900
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9402681346380628,
+ "learning_rate": 1.89413966390378e-05,
+ "loss": 0.8947,
+ "step": 901
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9435792263502418,
+ "learning_rate": 1.8938604250039362e-05,
+ "loss": 0.9081,
+ "step": 902
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9565390998865296,
+ "learning_rate": 1.8935808389390032e-05,
+ "loss": 0.9928,
+ "step": 903
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.8622694297205252,
+ "learning_rate": 1.893300905817569e-05,
+ "loss": 0.9648,
+ "step": 904
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.0227949520042103,
+ "learning_rate": 1.8930206257483566e-05,
+ "loss": 1.0145,
+ "step": 905
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.1632548326893022,
+ "learning_rate": 1.8927399988402233e-05,
+ "loss": 0.9496,
+ "step": 906
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9640864762450096,
+ "learning_rate": 1.8924590252021614e-05,
+ "loss": 0.9789,
+ "step": 907
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.8875385431624611,
+ "learning_rate": 1.8921777049432985e-05,
+ "loss": 0.9221,
+ "step": 908
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.1563281326056831,
+ "learning_rate": 1.8918960381728947e-05,
+ "loss": 1.0354,
+ "step": 909
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.8245858613862742,
+ "learning_rate": 1.8916140250003475e-05,
+ "loss": 0.8914,
+ "step": 910
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9705990739391612,
+ "learning_rate": 1.891331665535187e-05,
+ "loss": 0.9331,
+ "step": 911
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.1044362797740637,
+ "learning_rate": 1.8910489598870784e-05,
+ "loss": 1.0126,
+ "step": 912
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.0385793610460308,
+ "learning_rate": 1.8907659081658214e-05,
+ "loss": 0.9817,
+ "step": 913
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.087873826847476,
+ "learning_rate": 1.8904825104813497e-05,
+ "loss": 1.0294,
+ "step": 914
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.1774112602561206,
+ "learning_rate": 1.8901987669437322e-05,
+ "loss": 0.9921,
+ "step": 915
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.044443883038202,
+ "learning_rate": 1.889914677663171e-05,
+ "loss": 1.027,
+ "step": 916
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.0418712955640264,
+ "learning_rate": 1.8896302427500042e-05,
+ "loss": 0.979,
+ "step": 917
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9799529582810814,
+ "learning_rate": 1.8893454623147017e-05,
+ "loss": 0.9805,
+ "step": 918
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.8380106587673484,
+ "learning_rate": 1.88906033646787e-05,
+ "loss": 0.9692,
+ "step": 919
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.8106029993756833,
+ "learning_rate": 1.8887748653202478e-05,
+ "loss": 0.8796,
+ "step": 920
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.967888763511043,
+ "learning_rate": 1.8884890489827097e-05,
+ "loss": 0.9426,
+ "step": 921
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.8338662955627411,
+ "learning_rate": 1.8882028875662627e-05,
+ "loss": 0.8948,
+ "step": 922
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.326596871236574,
+ "learning_rate": 1.8879163811820493e-05,
+ "loss": 1.002,
+ "step": 923
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9403465734130465,
+ "learning_rate": 1.8876295299413445e-05,
+ "loss": 0.9673,
+ "step": 924
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.939767471544234,
+ "learning_rate": 1.8873423339555584e-05,
+ "loss": 0.9745,
+ "step": 925
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9209027941278132,
+ "learning_rate": 1.8870547933362352e-05,
+ "loss": 0.9733,
+ "step": 926
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.980995120756399,
+ "learning_rate": 1.886766908195051e-05,
+ "loss": 0.9022,
+ "step": 927
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9822517815616164,
+ "learning_rate": 1.8864786786438187e-05,
+ "loss": 0.9176,
+ "step": 928
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9732772645231046,
+ "learning_rate": 1.886190104794482e-05,
+ "loss": 0.9272,
+ "step": 929
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9033484939097034,
+ "learning_rate": 1.8859011867591203e-05,
+ "loss": 0.9156,
+ "step": 930
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.821730878154358,
+ "learning_rate": 1.885611924649946e-05,
+ "loss": 0.8436,
+ "step": 931
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.8968746163391496,
+ "learning_rate": 1.885322318579305e-05,
+ "loss": 0.9866,
+ "step": 932
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9281483108229382,
+ "learning_rate": 1.8850323686596766e-05,
+ "loss": 0.9353,
+ "step": 933
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9780845576552375,
+ "learning_rate": 1.8847420750036748e-05,
+ "loss": 0.9498,
+ "step": 934
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.1074021798523632,
+ "learning_rate": 1.884451437724046e-05,
+ "loss": 0.9729,
+ "step": 935
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9864524913978737,
+ "learning_rate": 1.8841604569336702e-05,
+ "loss": 1.0014,
+ "step": 936
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.0299581565078642,
+ "learning_rate": 1.883869132745561e-05,
+ "loss": 0.9974,
+ "step": 937
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9825537232395749,
+ "learning_rate": 1.883577465272866e-05,
+ "loss": 1.0037,
+ "step": 938
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9682193599915532,
+ "learning_rate": 1.8832854546288642e-05,
+ "loss": 0.9545,
+ "step": 939
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.010030344909483,
+ "learning_rate": 1.8829931009269707e-05,
+ "loss": 1.0333,
+ "step": 940
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9617210233213701,
+ "learning_rate": 1.882700404280731e-05,
+ "loss": 1.0205,
+ "step": 941
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9103739156118881,
+ "learning_rate": 1.8824073648038258e-05,
+ "loss": 0.9213,
+ "step": 942
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.8942582250476749,
+ "learning_rate": 1.882113982610068e-05,
+ "loss": 0.9589,
+ "step": 943
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.2246614350808254,
+ "learning_rate": 1.881820257813404e-05,
+ "loss": 0.9832,
+ "step": 944
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9687670626406696,
+ "learning_rate": 1.8815261905279133e-05,
+ "loss": 1.0154,
+ "step": 945
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.884833132382099,
+ "learning_rate": 1.8812317808678075e-05,
+ "loss": 1.0016,
+ "step": 946
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.94668538176656,
+ "learning_rate": 1.8809370289474327e-05,
+ "loss": 0.9796,
+ "step": 947
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.0370259309774235,
+ "learning_rate": 1.8806419348812673e-05,
+ "loss": 0.9148,
+ "step": 948
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9825488412786969,
+ "learning_rate": 1.8803464987839217e-05,
+ "loss": 0.9951,
+ "step": 949
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.8949635972579602,
+ "learning_rate": 1.88005072077014e-05,
+ "loss": 0.9795,
+ "step": 950
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.0744539293861581,
+ "learning_rate": 1.8797546009547996e-05,
+ "loss": 1.0171,
+ "step": 951
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9486636984000835,
+ "learning_rate": 1.879458139452909e-05,
+ "loss": 0.989,
+ "step": 952
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9991123454952828,
+ "learning_rate": 1.8791613363796118e-05,
+ "loss": 0.9458,
+ "step": 953
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9393906617167698,
+ "learning_rate": 1.8788641918501817e-05,
+ "loss": 0.9234,
+ "step": 954
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9486632769964769,
+ "learning_rate": 1.8785667059800264e-05,
+ "loss": 0.9286,
+ "step": 955
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.8870441093503006,
+ "learning_rate": 1.8782688788846865e-05,
+ "loss": 0.9235,
+ "step": 956
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.8445444958236513,
+ "learning_rate": 1.877970710679834e-05,
+ "loss": 0.912,
+ "step": 957
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.0227589962975683,
+ "learning_rate": 1.877672201481275e-05,
+ "loss": 1.024,
+ "step": 958
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.1486051169109843,
+ "learning_rate": 1.877373351404946e-05,
+ "loss": 1.0335,
+ "step": 959
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9301070966417918,
+ "learning_rate": 1.8770741605669173e-05,
+ "loss": 0.968,
+ "step": 960
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.7461499466378382,
+ "learning_rate": 1.876774629083391e-05,
+ "loss": 0.9275,
+ "step": 961
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9433594283109852,
+ "learning_rate": 1.8764747570707017e-05,
+ "loss": 0.9797,
+ "step": 962
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.0462804278397861,
+ "learning_rate": 1.8761745446453167e-05,
+ "loss": 0.9755,
+ "step": 963
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.8697248223371079,
+ "learning_rate": 1.875873991923835e-05,
+ "loss": 0.8028,
+ "step": 964
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9238445016405439,
+ "learning_rate": 1.875573099022987e-05,
+ "loss": 0.9137,
+ "step": 965
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9185090223935947,
+ "learning_rate": 1.8752718660596367e-05,
+ "loss": 0.8734,
+ "step": 966
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.0060678826112743,
+ "learning_rate": 1.8749702931507797e-05,
+ "loss": 1.0001,
+ "step": 967
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.8640951411665814,
+ "learning_rate": 1.874668380413543e-05,
+ "loss": 0.9071,
+ "step": 968
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9579366597749922,
+ "learning_rate": 1.8743661279651856e-05,
+ "loss": 0.9849,
+ "step": 969
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.1644248312491494,
+ "learning_rate": 1.8740635359231e-05,
+ "loss": 1.0096,
+ "step": 970
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.91235757351678,
+ "learning_rate": 1.8737606044048086e-05,
+ "loss": 0.9553,
+ "step": 971
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.0253659887987323,
+ "learning_rate": 1.873457333527967e-05,
+ "loss": 0.948,
+ "step": 972
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.948888840073587,
+ "learning_rate": 1.873153723410362e-05,
+ "loss": 0.951,
+ "step": 973
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.96031752242475,
+ "learning_rate": 1.8728497741699115e-05,
+ "loss": 1.0257,
+ "step": 974
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9188946313189172,
+ "learning_rate": 1.872545485924667e-05,
+ "loss": 0.9749,
+ "step": 975
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.8163168615248108,
+ "learning_rate": 1.8722408587928104e-05,
+ "loss": 0.8978,
+ "step": 976
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.8876465101014909,
+ "learning_rate": 1.8719358928926546e-05,
+ "loss": 0.8812,
+ "step": 977
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.8862832476660408,
+ "learning_rate": 1.8716305883426456e-05,
+ "loss": 0.9869,
+ "step": 978
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.8935702653475464,
+ "learning_rate": 1.87132494526136e-05,
+ "loss": 0.9192,
+ "step": 979
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.1220879391513314,
+ "learning_rate": 1.8710189637675055e-05,
+ "loss": 0.9531,
+ "step": 980
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.0465585705009761,
+ "learning_rate": 1.8707126439799225e-05,
+ "loss": 1.0469,
+ "step": 981
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.970630297145126,
+ "learning_rate": 1.870405986017582e-05,
+ "loss": 0.9726,
+ "step": 982
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.1293785087481336,
+ "learning_rate": 1.8700989899995857e-05,
+ "loss": 0.9579,
+ "step": 983
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.6473436736666303,
+ "learning_rate": 1.8697916560451682e-05,
+ "loss": 0.8201,
+ "step": 984
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.1236681712299788,
+ "learning_rate": 1.869483984273694e-05,
+ "loss": 0.9738,
+ "step": 985
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.0555334678685977,
+ "learning_rate": 1.8691759748046594e-05,
+ "loss": 0.9551,
+ "step": 986
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9585948424160133,
+ "learning_rate": 1.8688676277576916e-05,
+ "loss": 0.9906,
+ "step": 987
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9632166565383102,
+ "learning_rate": 1.868558943252549e-05,
+ "loss": 0.9773,
+ "step": 988
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9429997781564794,
+ "learning_rate": 1.868249921409122e-05,
+ "loss": 0.9394,
+ "step": 989
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9246823658958572,
+ "learning_rate": 1.8679405623474294e-05,
+ "loss": 0.9167,
+ "step": 990
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.8955009469545985,
+ "learning_rate": 1.8676308661876242e-05,
+ "loss": 0.8777,
+ "step": 991
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.982781827274399,
+ "learning_rate": 1.8673208330499884e-05,
+ "loss": 0.947,
+ "step": 992
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9961617738740262,
+ "learning_rate": 1.8670104630549348e-05,
+ "loss": 1.0081,
+ "step": 993
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9333120768087941,
+ "learning_rate": 1.866699756323008e-05,
+ "loss": 1.015,
+ "step": 994
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9245694395241534,
+ "learning_rate": 1.866388712974883e-05,
+ "loss": 0.9596,
+ "step": 995
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9574096418211646,
+ "learning_rate": 1.866077333131365e-05,
+ "loss": 0.9551,
+ "step": 996
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9183459344015832,
+ "learning_rate": 1.8657656169133908e-05,
+ "loss": 0.9506,
+ "step": 997
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9746043591188694,
+ "learning_rate": 1.8654535644420276e-05,
+ "loss": 0.9975,
+ "step": 998
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.0161883025404344,
+ "learning_rate": 1.8651411758384718e-05,
+ "loss": 0.9156,
+ "step": 999
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.8604787162644674,
+ "learning_rate": 1.8648284512240527e-05,
+ "loss": 0.9258,
+ "step": 1000
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9866324292883194,
+ "learning_rate": 1.8645153907202285e-05,
+ "loss": 1.0335,
+ "step": 1001
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9478964519070111,
+ "learning_rate": 1.8642019944485884e-05,
+ "loss": 0.9153,
+ "step": 1002
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.0778311714738529,
+ "learning_rate": 1.863888262530852e-05,
+ "loss": 0.9707,
+ "step": 1003
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.093708408365323,
+ "learning_rate": 1.863574195088869e-05,
+ "loss": 0.9556,
+ "step": 1004
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9361796608322317,
+ "learning_rate": 1.8632597922446195e-05,
+ "loss": 0.9856,
+ "step": 1005
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.012900733529525,
+ "learning_rate": 1.8629450541202142e-05,
+ "loss": 0.9084,
+ "step": 1006
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.8303001646786801,
+ "learning_rate": 1.8626299808378933e-05,
+ "loss": 0.9557,
+ "step": 1007
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.002565523250232,
+ "learning_rate": 1.862314572520028e-05,
+ "loss": 0.9794,
+ "step": 1008
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.0463288126955839,
+ "learning_rate": 1.861998829289119e-05,
+ "loss": 0.9744,
+ "step": 1009
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.941255816261249,
+ "learning_rate": 1.861682751267798e-05,
+ "loss": 0.9816,
+ "step": 1010
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.8403448876068528,
+ "learning_rate": 1.861366338578825e-05,
+ "loss": 0.9146,
+ "step": 1011
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.0064716949813777,
+ "learning_rate": 1.8610495913450922e-05,
+ "loss": 1.0151,
+ "step": 1012
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9652778830269011,
+ "learning_rate": 1.8607325096896197e-05,
+ "loss": 0.9702,
+ "step": 1013
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9760926012576204,
+ "learning_rate": 1.8604150937355588e-05,
+ "loss": 0.9315,
+ "step": 1014
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.1018390357024368,
+ "learning_rate": 1.86009734360619e-05,
+ "loss": 0.9942,
+ "step": 1015
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9995486970554732,
+ "learning_rate": 1.8597792594249237e-05,
+ "loss": 0.9465,
+ "step": 1016
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.0214383067506745,
+ "learning_rate": 1.8594608413153e-05,
+ "loss": 0.9439,
+ "step": 1017
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9481003865939039,
+ "learning_rate": 1.8591420894009897e-05,
+ "loss": 0.901,
+ "step": 1018
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.885272514013036,
+ "learning_rate": 1.8588230038057913e-05,
+ "loss": 0.9229,
+ "step": 1019
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.800853261093981,
+ "learning_rate": 1.8585035846536347e-05,
+ "loss": 0.9221,
+ "step": 1020
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9120921974688442,
+ "learning_rate": 1.8581838320685782e-05,
+ "loss": 0.8923,
+ "step": 1021
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9651722966404574,
+ "learning_rate": 1.8578637461748105e-05,
+ "loss": 0.9409,
+ "step": 1022
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.056961914378481,
+ "learning_rate": 1.857543327096649e-05,
+ "loss": 1.0135,
+ "step": 1023
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9557639326224096,
+ "learning_rate": 1.85722257495854e-05,
+ "loss": 0.9502,
+ "step": 1024
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9921641896167874,
+ "learning_rate": 1.856901489885061e-05,
+ "loss": 0.9231,
+ "step": 1025
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.8787286117965492,
+ "learning_rate": 1.856580072000918e-05,
+ "loss": 0.8833,
+ "step": 1026
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.8355556429449305,
+ "learning_rate": 1.8562583214309447e-05,
+ "loss": 0.9224,
+ "step": 1027
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.0692043922458312,
+ "learning_rate": 1.855936238300106e-05,
+ "loss": 0.9477,
+ "step": 1028
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9987461436123213,
+ "learning_rate": 1.8556138227334957e-05,
+ "loss": 0.9554,
+ "step": 1029
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.972986412074494,
+ "learning_rate": 1.855291074856336e-05,
+ "loss": 0.9946,
+ "step": 1030
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9469262820444003,
+ "learning_rate": 1.8549679947939778e-05,
+ "loss": 0.9943,
+ "step": 1031
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.8556549384591313,
+ "learning_rate": 1.8546445826719023e-05,
+ "loss": 0.9185,
+ "step": 1032
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.0952135503576756,
+ "learning_rate": 1.8543208386157195e-05,
+ "loss": 1.0426,
+ "step": 1033
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9039786625494457,
+ "learning_rate": 1.853996762751167e-05,
+ "loss": 0.958,
+ "step": 1034
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.1473722853807693,
+ "learning_rate": 1.8536723552041124e-05,
+ "loss": 0.9515,
+ "step": 1035
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.8948166217125512,
+ "learning_rate": 1.853347616100552e-05,
+ "loss": 0.8794,
+ "step": 1036
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.90770359735155,
+ "learning_rate": 1.8530225455666103e-05,
+ "loss": 0.9746,
+ "step": 1037
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.8731173504476053,
+ "learning_rate": 1.8526971437285416e-05,
+ "loss": 0.9967,
+ "step": 1038
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9589506765396064,
+ "learning_rate": 1.8523714107127278e-05,
+ "loss": 0.9686,
+ "step": 1039
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9179436692910353,
+ "learning_rate": 1.8520453466456797e-05,
+ "loss": 0.9728,
+ "step": 1040
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.03075208257366,
+ "learning_rate": 1.8517189516540376e-05,
+ "loss": 0.9082,
+ "step": 1041
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9147623943087246,
+ "learning_rate": 1.8513922258645687e-05,
+ "loss": 0.982,
+ "step": 1042
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.0474706977056625,
+ "learning_rate": 1.8510651694041702e-05,
+ "loss": 0.9273,
+ "step": 1043
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.0085098460886854,
+ "learning_rate": 1.8507377823998664e-05,
+ "loss": 0.9805,
+ "step": 1044
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9433410143694413,
+ "learning_rate": 1.850410064978811e-05,
+ "loss": 0.8955,
+ "step": 1045
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.0023684630725813,
+ "learning_rate": 1.8500820172682858e-05,
+ "loss": 0.9487,
+ "step": 1046
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9042810223427815,
+ "learning_rate": 1.8497536393957005e-05,
+ "loss": 0.9821,
+ "step": 1047
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.947212109086368,
+ "learning_rate": 1.8494249314885932e-05,
+ "loss": 0.9766,
+ "step": 1048
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9009711629257964,
+ "learning_rate": 1.8490958936746304e-05,
+ "loss": 0.9436,
+ "step": 1049
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9872688297321746,
+ "learning_rate": 1.848766526081607e-05,
+ "loss": 1.0462,
+ "step": 1050
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.021365604415821,
+ "learning_rate": 1.8484368288374452e-05,
+ "loss": 1.0377,
+ "step": 1051
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.8925480189296228,
+ "learning_rate": 1.8481068020701954e-05,
+ "loss": 0.9581,
+ "step": 1052
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9920114223461531,
+ "learning_rate": 1.8477764459080364e-05,
+ "loss": 0.9259,
+ "step": 1053
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9946430045651665,
+ "learning_rate": 1.8474457604792746e-05,
+ "loss": 1.0001,
+ "step": 1054
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.040563362121139,
+ "learning_rate": 1.8471147459123447e-05,
+ "loss": 0.979,
+ "step": 1055
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.0489211802607663,
+ "learning_rate": 1.8467834023358088e-05,
+ "loss": 1.0051,
+ "step": 1056
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9349229223664337,
+ "learning_rate": 1.846451729878357e-05,
+ "loss": 0.9621,
+ "step": 1057
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.0071684249504653,
+ "learning_rate": 1.846119728668807e-05,
+ "loss": 0.9809,
+ "step": 1058
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.251399388463867,
+ "learning_rate": 1.845787398836104e-05,
+ "loss": 1.0743,
+ "step": 1059
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.0200937220397808,
+ "learning_rate": 1.8454547405093212e-05,
+ "loss": 0.9124,
+ "step": 1060
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.97407087086635,
+ "learning_rate": 1.8451217538176597e-05,
+ "loss": 0.9745,
+ "step": 1061
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.935686022956608,
+ "learning_rate": 1.844788438890447e-05,
+ "loss": 0.9901,
+ "step": 1062
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.8836199331209209,
+ "learning_rate": 1.8444547958571396e-05,
+ "loss": 0.9436,
+ "step": 1063
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.869197918578686,
+ "learning_rate": 1.84412082484732e-05,
+ "loss": 0.8845,
+ "step": 1064
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.8100663673943043,
+ "learning_rate": 1.8437865259906987e-05,
+ "loss": 0.8839,
+ "step": 1065
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.9655502745076743,
+ "learning_rate": 1.8434518994171136e-05,
+ "loss": 0.9423,
+ "step": 1066
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.8690748146944519,
+ "learning_rate": 1.84311694525653e-05,
+ "loss": 0.8431,
+ "step": 1067
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.8811276540359169,
+ "learning_rate": 1.84278166363904e-05,
+ "loss": 0.9079,
+ "step": 1068
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.0069589271499593,
+ "learning_rate": 1.8424460546948632e-05,
+ "loss": 0.9044,
+ "step": 1069
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.0234408727755917,
+ "learning_rate": 1.8421101185543463e-05,
+ "loss": 1.0407,
+ "step": 1070
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.0837464402225852,
+ "learning_rate": 1.841773855347963e-05,
+ "loss": 1.0263,
+ "step": 1071
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.0292504472504127,
+ "learning_rate": 1.841437265206314e-05,
+ "loss": 0.9548,
+ "step": 1072
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.908745860907942,
+ "learning_rate": 1.841100348260127e-05,
+ "loss": 0.9254,
+ "step": 1073
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.1646724407458005,
+ "learning_rate": 1.840763104640257e-05,
+ "loss": 0.9509,
+ "step": 1074
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.9909734683436034,
+ "learning_rate": 1.8404255344776853e-05,
+ "loss": 0.9289,
+ "step": 1075
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.002461658734012,
+ "learning_rate": 1.84008763790352e-05,
+ "loss": 1.0389,
+ "step": 1076
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.910458699203309,
+ "learning_rate": 1.8397494150489963e-05,
+ "loss": 0.9994,
+ "step": 1077
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.8121839823450351,
+ "learning_rate": 1.8394108660454766e-05,
+ "loss": 0.8556,
+ "step": 1078
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.0611317209258873,
+ "learning_rate": 1.8390719910244487e-05,
+ "loss": 0.9501,
+ "step": 1079
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.985560672984148,
+ "learning_rate": 1.8387327901175286e-05,
+ "loss": 0.9927,
+ "step": 1080
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.0521324339726432,
+ "learning_rate": 1.838393263456457e-05,
+ "loss": 0.98,
+ "step": 1081
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.9112400884123598,
+ "learning_rate": 1.838053411173103e-05,
+ "loss": 1.0643,
+ "step": 1082
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.7483426010932785,
+ "learning_rate": 1.8377132333994606e-05,
+ "loss": 0.8655,
+ "step": 1083
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.1086028513440693,
+ "learning_rate": 1.837372730267652e-05,
+ "loss": 1.0325,
+ "step": 1084
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.8236520402844943,
+ "learning_rate": 1.8370319019099236e-05,
+ "loss": 0.9421,
+ "step": 1085
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.9200072563997702,
+ "learning_rate": 1.8366907484586497e-05,
+ "loss": 0.942,
+ "step": 1086
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.7662944807350218,
+ "learning_rate": 1.83634927004633e-05,
+ "loss": 0.8268,
+ "step": 1087
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.9624631257461757,
+ "learning_rate": 1.8360074668055915e-05,
+ "loss": 0.9214,
+ "step": 1088
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.8908791958342578,
+ "learning_rate": 1.8356653388691857e-05,
+ "loss": 0.9221,
+ "step": 1089
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.9747783944581924,
+ "learning_rate": 1.8353228863699922e-05,
+ "loss": 1.0187,
+ "step": 1090
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.94025272242328,
+ "learning_rate": 1.8349801094410148e-05,
+ "loss": 0.9019,
+ "step": 1091
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.8947113472161731,
+ "learning_rate": 1.8346370082153843e-05,
+ "loss": 0.9624,
+ "step": 1092
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.985869019422871,
+ "learning_rate": 1.8342935828263574e-05,
+ "loss": 1.0161,
+ "step": 1093
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.8385657771238609,
+ "learning_rate": 1.8339498334073166e-05,
+ "loss": 0.8857,
+ "step": 1094
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.8999697279164486,
+ "learning_rate": 1.83360576009177e-05,
+ "loss": 0.9809,
+ "step": 1095
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.920623527150245,
+ "learning_rate": 1.833261363013352e-05,
+ "loss": 0.9333,
+ "step": 1096
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.075317101765974,
+ "learning_rate": 1.832916642305822e-05,
+ "loss": 0.9021,
+ "step": 1097
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.0610900088797453,
+ "learning_rate": 1.832571598103066e-05,
+ "loss": 0.9317,
+ "step": 1098
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.9197351143103815,
+ "learning_rate": 1.8322262305390948e-05,
+ "loss": 0.9651,
+ "step": 1099
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.9797419798113135,
+ "learning_rate": 1.8318805397480455e-05,
+ "loss": 0.9264,
+ "step": 1100
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.8828157469966135,
+ "learning_rate": 1.8315345258641802e-05,
+ "loss": 1.0207,
+ "step": 1101
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.0098529882564022,
+ "learning_rate": 1.8311881890218873e-05,
+ "loss": 0.8678,
+ "step": 1102
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.051989162435478,
+ "learning_rate": 1.830841529355679e-05,
+ "loss": 1.0025,
+ "step": 1103
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.927689066709106,
+ "learning_rate": 1.8304945470001948e-05,
+ "loss": 0.9607,
+ "step": 1104
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.9938916932686059,
+ "learning_rate": 1.8301472420901985e-05,
+ "loss": 1.0437,
+ "step": 1105
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.927141268019181,
+ "learning_rate": 1.8297996147605787e-05,
+ "loss": 0.9727,
+ "step": 1106
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.9726136652594168,
+ "learning_rate": 1.829451665146351e-05,
+ "loss": 0.9964,
+ "step": 1107
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.9530963671108253,
+ "learning_rate": 1.8291033933826535e-05,
+ "loss": 0.952,
+ "step": 1108
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.106990848275471,
+ "learning_rate": 1.8287547996047523e-05,
+ "loss": 0.9508,
+ "step": 1109
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.9673686147980758,
+ "learning_rate": 1.8284058839480363e-05,
+ "loss": 0.9681,
+ "step": 1110
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.8343046704531886,
+ "learning_rate": 1.8280566465480206e-05,
+ "loss": 0.9171,
+ "step": 1111
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.7441537131888142,
+ "learning_rate": 1.8277070875403455e-05,
+ "loss": 0.8558,
+ "step": 1112
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.9930369606195396,
+ "learning_rate": 1.8273572070607756e-05,
+ "loss": 0.9596,
+ "step": 1113
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.047302277217802,
+ "learning_rate": 1.8270070052451995e-05,
+ "loss": 1.0074,
+ "step": 1114
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.9635653288831207,
+ "learning_rate": 1.8266564822296323e-05,
+ "loss": 0.9943,
+ "step": 1115
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.0121450460420647,
+ "learning_rate": 1.826305638150213e-05,
+ "loss": 0.9459,
+ "step": 1116
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.8734428971146019,
+ "learning_rate": 1.825954473143205e-05,
+ "loss": 0.8691,
+ "step": 1117
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9891237469797581,
+ "learning_rate": 1.8256029873449976e-05,
+ "loss": 1.0021,
+ "step": 1118
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.8383843397321018,
+ "learning_rate": 1.825251180892103e-05,
+ "loss": 0.8792,
+ "step": 1119
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9169848284867886,
+ "learning_rate": 1.8248990539211596e-05,
+ "loss": 0.9782,
+ "step": 1120
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.812537991341618,
+ "learning_rate": 1.8245466065689282e-05,
+ "loss": 0.8762,
+ "step": 1121
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9712230515155104,
+ "learning_rate": 1.824193838972297e-05,
+ "loss": 1.0158,
+ "step": 1122
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 1.0467586430429845,
+ "learning_rate": 1.823840751268275e-05,
+ "loss": 0.9798,
+ "step": 1123
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9931502683538673,
+ "learning_rate": 1.8234873435939987e-05,
+ "loss": 0.9315,
+ "step": 1124
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.8856443563518548,
+ "learning_rate": 1.8231336160867275e-05,
+ "loss": 0.9519,
+ "step": 1125
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.8506641491583667,
+ "learning_rate": 1.8227795688838446e-05,
+ "loss": 0.9529,
+ "step": 1126
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 1.051464422643405,
+ "learning_rate": 1.822425202122858e-05,
+ "loss": 1.0674,
+ "step": 1127
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9251080756042125,
+ "learning_rate": 1.8220705159413996e-05,
+ "loss": 0.8493,
+ "step": 1128
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.8941700685899955,
+ "learning_rate": 1.8217155104772256e-05,
+ "loss": 1.028,
+ "step": 1129
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9002436654481246,
+ "learning_rate": 1.8213601858682158e-05,
+ "loss": 0.8781,
+ "step": 1130
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.8857809098263753,
+ "learning_rate": 1.8210045422523744e-05,
+ "loss": 1.0007,
+ "step": 1131
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.8212635799496153,
+ "learning_rate": 1.8206485797678294e-05,
+ "loss": 0.8996,
+ "step": 1132
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9372748971442931,
+ "learning_rate": 1.820292298552832e-05,
+ "loss": 0.9198,
+ "step": 1133
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.851328921929426,
+ "learning_rate": 1.819935698745759e-05,
+ "loss": 0.9322,
+ "step": 1134
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9908145107104688,
+ "learning_rate": 1.8195787804851076e-05,
+ "loss": 0.959,
+ "step": 1135
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.7937945012112343,
+ "learning_rate": 1.8192215439095025e-05,
+ "loss": 0.8538,
+ "step": 1136
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 1.087608126510884,
+ "learning_rate": 1.8188639891576893e-05,
+ "loss": 1.0047,
+ "step": 1137
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.7446048568531575,
+ "learning_rate": 1.8185061163685386e-05,
+ "loss": 0.8969,
+ "step": 1138
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 1.0822139796141856,
+ "learning_rate": 1.818147925681044e-05,
+ "loss": 0.9502,
+ "step": 1139
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9600482594870258,
+ "learning_rate": 1.8177894172343227e-05,
+ "loss": 0.9806,
+ "step": 1140
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9326920716444723,
+ "learning_rate": 1.817430591167615e-05,
+ "loss": 1.0264,
+ "step": 1141
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.97081959340405,
+ "learning_rate": 1.8170714476202848e-05,
+ "loss": 1.0173,
+ "step": 1142
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9811815614394553,
+ "learning_rate": 1.8167119867318197e-05,
+ "loss": 0.9681,
+ "step": 1143
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9219602092362942,
+ "learning_rate": 1.81635220864183e-05,
+ "loss": 0.9602,
+ "step": 1144
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9553618273012217,
+ "learning_rate": 1.8159921134900486e-05,
+ "loss": 0.988,
+ "step": 1145
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.902221747042117,
+ "learning_rate": 1.8156317014163337e-05,
+ "loss": 0.9988,
+ "step": 1146
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 1.0407944510509943,
+ "learning_rate": 1.8152709725606642e-05,
+ "loss": 1.0251,
+ "step": 1147
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 1.0741838415932836,
+ "learning_rate": 1.8149099270631434e-05,
+ "loss": 0.9604,
+ "step": 1148
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 1.0814179410634572,
+ "learning_rate": 1.8145485650639973e-05,
+ "loss": 0.9236,
+ "step": 1149
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9407307179166049,
+ "learning_rate": 1.8141868867035745e-05,
+ "loss": 0.9019,
+ "step": 1150
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 1.0587512295938475,
+ "learning_rate": 1.8138248921223465e-05,
+ "loss": 0.9648,
+ "step": 1151
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.8537552139110713,
+ "learning_rate": 1.8134625814609084e-05,
+ "loss": 0.9121,
+ "step": 1152
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.94281045754773,
+ "learning_rate": 1.8130999548599767e-05,
+ "loss": 0.9113,
+ "step": 1153
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9429053125538163,
+ "learning_rate": 1.8127370124603927e-05,
+ "loss": 0.8986,
+ "step": 1154
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.932604760192039,
+ "learning_rate": 1.8123737544031178e-05,
+ "loss": 0.9518,
+ "step": 1155
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9607272552947523,
+ "learning_rate": 1.8120101808292373e-05,
+ "loss": 0.9391,
+ "step": 1156
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9337823412461219,
+ "learning_rate": 1.81164629187996e-05,
+ "loss": 1.0413,
+ "step": 1157
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9922449201391417,
+ "learning_rate": 1.811282087696615e-05,
+ "loss": 1.0025,
+ "step": 1158
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 1.0355438362648628,
+ "learning_rate": 1.8109175684206558e-05,
+ "loss": 0.9801,
+ "step": 1159
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 1.106835762087524,
+ "learning_rate": 1.8105527341936574e-05,
+ "loss": 1.0027,
+ "step": 1160
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9230478593917503,
+ "learning_rate": 1.810187585157317e-05,
+ "loss": 0.9705,
+ "step": 1161
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 1.0355760595769947,
+ "learning_rate": 1.8098221214534543e-05,
+ "loss": 0.9471,
+ "step": 1162
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 1.0294104362813747,
+ "learning_rate": 1.8094563432240107e-05,
+ "loss": 1.0289,
+ "step": 1163
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 1.052462509540482,
+ "learning_rate": 1.8090902506110513e-05,
+ "loss": 0.9765,
+ "step": 1164
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.8886580869632086,
+ "learning_rate": 1.8087238437567614e-05,
+ "loss": 0.9855,
+ "step": 1165
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9099349721948723,
+ "learning_rate": 1.8083571228034498e-05,
+ "loss": 0.921,
+ "step": 1166
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9484292405916163,
+ "learning_rate": 1.807990087893546e-05,
+ "loss": 0.944,
+ "step": 1167
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9717620608623369,
+ "learning_rate": 1.807622739169603e-05,
+ "loss": 0.9103,
+ "step": 1168
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9608276324247862,
+ "learning_rate": 1.807255076774294e-05,
+ "loss": 0.8674,
+ "step": 1169
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 1.001344637303453,
+ "learning_rate": 1.8068871008504153e-05,
+ "loss": 0.9837,
+ "step": 1170
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.8544404041403327,
+ "learning_rate": 1.8065188115408844e-05,
+ "loss": 0.8755,
+ "step": 1171
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9141723297334705,
+ "learning_rate": 1.8061502089887406e-05,
+ "loss": 0.9457,
+ "step": 1172
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9886223468023465,
+ "learning_rate": 1.805781293337145e-05,
+ "loss": 0.9728,
+ "step": 1173
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 1.0600081486254507,
+ "learning_rate": 1.8054120647293798e-05,
+ "loss": 0.9208,
+ "step": 1174
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.8436680768892132,
+ "learning_rate": 1.8050425233088496e-05,
+ "loss": 0.9457,
+ "step": 1175
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9626244775383591,
+ "learning_rate": 1.80467266921908e-05,
+ "loss": 0.9307,
+ "step": 1176
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9433187089471458,
+ "learning_rate": 1.8043025026037178e-05,
+ "loss": 0.9741,
+ "step": 1177
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 1.0903787871951747,
+ "learning_rate": 1.8039320236065314e-05,
+ "loss": 1.0251,
+ "step": 1178
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 1.011102846547872,
+ "learning_rate": 1.803561232371411e-05,
+ "loss": 0.9666,
+ "step": 1179
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9294396451230055,
+ "learning_rate": 1.803190129042367e-05,
+ "loss": 0.9695,
+ "step": 1180
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.973261193007203,
+ "learning_rate": 1.8028187137635325e-05,
+ "loss": 0.9897,
+ "step": 1181
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9442468983865271,
+ "learning_rate": 1.8024469866791602e-05,
+ "loss": 0.9682,
+ "step": 1182
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.8074446116689458,
+ "learning_rate": 1.802074947933625e-05,
+ "loss": 0.8829,
+ "step": 1183
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9634063394636038,
+ "learning_rate": 1.801702597671422e-05,
+ "loss": 0.9819,
+ "step": 1184
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 1.0653655707501328,
+ "learning_rate": 1.8013299360371685e-05,
+ "loss": 0.9773,
+ "step": 1185
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 1.065647363497974,
+ "learning_rate": 1.8009569631756013e-05,
+ "loss": 0.9461,
+ "step": 1186
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9984831822120185,
+ "learning_rate": 1.8005836792315793e-05,
+ "loss": 0.9614,
+ "step": 1187
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9474674675908505,
+ "learning_rate": 1.800210084350081e-05,
+ "loss": 0.9559,
+ "step": 1188
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9824152707120614,
+ "learning_rate": 1.799836178676207e-05,
+ "loss": 0.9607,
+ "step": 1189
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.8882962592401471,
+ "learning_rate": 1.799461962355178e-05,
+ "loss": 0.9513,
+ "step": 1190
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.875115814404789,
+ "learning_rate": 1.7990874355323345e-05,
+ "loss": 1.0244,
+ "step": 1191
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9643588720334034,
+ "learning_rate": 1.7987125983531393e-05,
+ "loss": 0.9125,
+ "step": 1192
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 1.1274974643025621,
+ "learning_rate": 1.7983374509631742e-05,
+ "loss": 0.9605,
+ "step": 1193
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9349648433629218,
+ "learning_rate": 1.7979619935081424e-05,
+ "loss": 0.9982,
+ "step": 1194
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.815653080520809,
+ "learning_rate": 1.797586226133867e-05,
+ "loss": 0.8316,
+ "step": 1195
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.7910781887120026,
+ "learning_rate": 1.7972101489862924e-05,
+ "loss": 0.8553,
+ "step": 1196
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.8841774936513562,
+ "learning_rate": 1.7968337622114824e-05,
+ "loss": 0.9186,
+ "step": 1197
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.8864445348989288,
+ "learning_rate": 1.7964570659556206e-05,
+ "loss": 0.991,
+ "step": 1198
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9410301349745667,
+ "learning_rate": 1.796080060365012e-05,
+ "loss": 1.0007,
+ "step": 1199
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 1.0084725960935985,
+ "learning_rate": 1.7957027455860815e-05,
+ "loss": 1.0259,
+ "step": 1200
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9242962301251781,
+ "learning_rate": 1.795325121765373e-05,
+ "loss": 0.9528,
+ "step": 1201
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.8983632041439181,
+ "learning_rate": 1.794947189049552e-05,
+ "loss": 1.0111,
+ "step": 1202
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9318108229499958,
+ "learning_rate": 1.7945689475854033e-05,
+ "loss": 0.9742,
+ "step": 1203
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9564298457717368,
+ "learning_rate": 1.7941903975198305e-05,
+ "loss": 0.9364,
+ "step": 1204
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.8748108268713141,
+ "learning_rate": 1.7938115389998595e-05,
+ "loss": 0.9488,
+ "step": 1205
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9151840630689074,
+ "learning_rate": 1.7934323721726334e-05,
+ "loss": 0.9233,
+ "step": 1206
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 1.1004063155897594,
+ "learning_rate": 1.7930528971854166e-05,
+ "loss": 0.978,
+ "step": 1207
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 1.020310681755496,
+ "learning_rate": 1.792673114185593e-05,
+ "loss": 0.9741,
+ "step": 1208
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 1.0025734462588363,
+ "learning_rate": 1.7922930233206656e-05,
+ "loss": 0.984,
+ "step": 1209
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 1.0263104225725488,
+ "learning_rate": 1.7919126247382576e-05,
+ "loss": 0.9906,
+ "step": 1210
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 1.0610648658830775,
+ "learning_rate": 1.791531918586112e-05,
+ "loss": 0.9219,
+ "step": 1211
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.8737007195313583,
+ "learning_rate": 1.7911509050120892e-05,
+ "loss": 0.8917,
+ "step": 1212
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9704653680939311,
+ "learning_rate": 1.7907695841641716e-05,
+ "loss": 0.9956,
+ "step": 1213
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.944752034675354,
+ "learning_rate": 1.7903879561904597e-05,
+ "loss": 0.9019,
+ "step": 1214
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.880769968121093,
+ "learning_rate": 1.790006021239173e-05,
+ "loss": 0.9645,
+ "step": 1215
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.8357603643593192,
+ "learning_rate": 1.789623779458651e-05,
+ "loss": 0.9863,
+ "step": 1216
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9190765177231858,
+ "learning_rate": 1.789241230997352e-05,
+ "loss": 0.9466,
+ "step": 1217
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 1.0091428692503979,
+ "learning_rate": 1.7888583760038534e-05,
+ "loss": 0.954,
+ "step": 1218
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9789893907912125,
+ "learning_rate": 1.7884752146268513e-05,
+ "loss": 0.9281,
+ "step": 1219
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.7775611238049662,
+ "learning_rate": 1.7880917470151614e-05,
+ "loss": 0.8869,
+ "step": 1220
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9877691453435866,
+ "learning_rate": 1.7877079733177185e-05,
+ "loss": 0.9017,
+ "step": 1221
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.0823586569795929,
+ "learning_rate": 1.7873238936835754e-05,
+ "loss": 1.0658,
+ "step": 1222
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9611279006529012,
+ "learning_rate": 1.786939508261904e-05,
+ "loss": 0.9879,
+ "step": 1223
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.99106652800465,
+ "learning_rate": 1.786554817201996e-05,
+ "loss": 1.0262,
+ "step": 1224
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.8822594092653521,
+ "learning_rate": 1.78616982065326e-05,
+ "loss": 0.912,
+ "step": 1225
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.8537100476653262,
+ "learning_rate": 1.785784518765225e-05,
+ "loss": 0.9129,
+ "step": 1226
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.8768396188614327,
+ "learning_rate": 1.7853989116875373e-05,
+ "loss": 0.9473,
+ "step": 1227
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9503641946345763,
+ "learning_rate": 1.7850129995699626e-05,
+ "loss": 0.872,
+ "step": 1228
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.7876477230036979,
+ "learning_rate": 1.7846267825623843e-05,
+ "loss": 0.8937,
+ "step": 1229
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.8876088559819371,
+ "learning_rate": 1.7842402608148053e-05,
+ "loss": 0.8703,
+ "step": 1230
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9609032274449619,
+ "learning_rate": 1.7838534344773453e-05,
+ "loss": 0.9976,
+ "step": 1231
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9656891401062295,
+ "learning_rate": 1.7834663037002444e-05,
+ "loss": 0.9703,
+ "step": 1232
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.0140256801740455,
+ "learning_rate": 1.7830788686338586e-05,
+ "loss": 0.9849,
+ "step": 1233
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9256255304578437,
+ "learning_rate": 1.7826911294286636e-05,
+ "loss": 0.9887,
+ "step": 1234
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9372804605703864,
+ "learning_rate": 1.782303086235253e-05,
+ "loss": 0.9322,
+ "step": 1235
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.057818883814277,
+ "learning_rate": 1.781914739204338e-05,
+ "loss": 0.9809,
+ "step": 1236
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9676727878687101,
+ "learning_rate": 1.7815260884867486e-05,
+ "loss": 0.8472,
+ "step": 1237
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.0569684049944226,
+ "learning_rate": 1.781137134233432e-05,
+ "loss": 1.0111,
+ "step": 1238
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9478058009751226,
+ "learning_rate": 1.7807478765954532e-05,
+ "loss": 0.9973,
+ "step": 1239
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.8491612751728744,
+ "learning_rate": 1.7803583157239958e-05,
+ "loss": 0.932,
+ "step": 1240
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.063733075647585,
+ "learning_rate": 1.7799684517703605e-05,
+ "loss": 0.9402,
+ "step": 1241
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.927719158248588,
+ "learning_rate": 1.779578284885966e-05,
+ "loss": 0.9304,
+ "step": 1242
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.051457256994107,
+ "learning_rate": 1.779187815222349e-05,
+ "loss": 1.0014,
+ "step": 1243
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9145175644707474,
+ "learning_rate": 1.778797042931163e-05,
+ "loss": 0.8904,
+ "step": 1244
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9673432670172137,
+ "learning_rate": 1.7784059681641798e-05,
+ "loss": 0.9841,
+ "step": 1245
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.0263580266051877,
+ "learning_rate": 1.778014591073288e-05,
+ "loss": 0.8794,
+ "step": 1246
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9370820179122746,
+ "learning_rate": 1.777622911810494e-05,
+ "loss": 0.9087,
+ "step": 1247
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.0194286821464282,
+ "learning_rate": 1.777230930527922e-05,
+ "loss": 1.0346,
+ "step": 1248
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9636997168720651,
+ "learning_rate": 1.7768386473778124e-05,
+ "loss": 0.9335,
+ "step": 1249
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.0358575002137034,
+ "learning_rate": 1.7764460625125236e-05,
+ "loss": 1.0072,
+ "step": 1250
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9861474572796306,
+ "learning_rate": 1.776053176084531e-05,
+ "loss": 0.8985,
+ "step": 1251
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.0167985703717612,
+ "learning_rate": 1.7756599882464274e-05,
+ "loss": 1.0352,
+ "step": 1252
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.8661834944686028,
+ "learning_rate": 1.7752664991509224e-05,
+ "loss": 0.8714,
+ "step": 1253
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.918220279098925,
+ "learning_rate": 1.7748727089508423e-05,
+ "loss": 0.9672,
+ "step": 1254
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.1654835314400813,
+ "learning_rate": 1.7744786177991307e-05,
+ "loss": 0.9206,
+ "step": 1255
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.8782864953837353,
+ "learning_rate": 1.774084225848849e-05,
+ "loss": 0.94,
+ "step": 1256
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.001155049995312,
+ "learning_rate": 1.773689533253173e-05,
+ "loss": 0.9866,
+ "step": 1257
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.0195402057298208,
+ "learning_rate": 1.7732945401653978e-05,
+ "loss": 0.9989,
+ "step": 1258
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.8561818909574825,
+ "learning_rate": 1.7728992467389342e-05,
+ "loss": 0.9136,
+ "step": 1259
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9988571455787769,
+ "learning_rate": 1.7725036531273087e-05,
+ "loss": 0.9246,
+ "step": 1260
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.8682760409408626,
+ "learning_rate": 1.7721077594841663e-05,
+ "loss": 0.9751,
+ "step": 1261
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.111973346218321,
+ "learning_rate": 1.771711565963267e-05,
+ "loss": 0.9218,
+ "step": 1262
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9717967427736105,
+ "learning_rate": 1.7713150727184878e-05,
+ "loss": 0.8805,
+ "step": 1263
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.8552273677571272,
+ "learning_rate": 1.770918279903822e-05,
+ "loss": 0.9544,
+ "step": 1264
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.8998533409942734,
+ "learning_rate": 1.77052118767338e-05,
+ "loss": 0.9733,
+ "step": 1265
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9964597092880161,
+ "learning_rate": 1.7701237961813874e-05,
+ "loss": 1.0174,
+ "step": 1266
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9465538103341393,
+ "learning_rate": 1.7697261055821864e-05,
+ "loss": 0.9353,
+ "step": 1267
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.965431191161349,
+ "learning_rate": 1.7693281160302354e-05,
+ "loss": 0.9351,
+ "step": 1268
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.264881078759489,
+ "learning_rate": 1.7689298276801095e-05,
+ "loss": 0.9759,
+ "step": 1269
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.8668281934705195,
+ "learning_rate": 1.7685312406864986e-05,
+ "loss": 0.9194,
+ "step": 1270
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9861236166681108,
+ "learning_rate": 1.7681323552042094e-05,
+ "loss": 0.9005,
+ "step": 1271
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.8703101375822144,
+ "learning_rate": 1.767733171388165e-05,
+ "loss": 0.9608,
+ "step": 1272
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9571789034000655,
+ "learning_rate": 1.7673336893934033e-05,
+ "loss": 1.0034,
+ "step": 1273
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.8375835970467771,
+ "learning_rate": 1.7669339093750786e-05,
+ "loss": 0.9383,
+ "step": 1274
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.873469492585692,
+ "learning_rate": 1.766533831488461e-05,
+ "loss": 0.8697,
+ "step": 1275
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.8574427602674748,
+ "learning_rate": 1.7661334558889357e-05,
+ "loss": 0.9356,
+ "step": 1276
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.0067934798289755,
+ "learning_rate": 1.7657327827320046e-05,
+ "loss": 1.0,
+ "step": 1277
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9170434737361712,
+ "learning_rate": 1.765331812173284e-05,
+ "loss": 1.0018,
+ "step": 1278
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9622237370244004,
+ "learning_rate": 1.7649305443685068e-05,
+ "loss": 0.9527,
+ "step": 1279
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9526954216812155,
+ "learning_rate": 1.76452897947352e-05,
+ "loss": 0.9224,
+ "step": 1280
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.8787480910042309,
+ "learning_rate": 1.7641271176442876e-05,
+ "loss": 0.9485,
+ "step": 1281
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9787231008114128,
+ "learning_rate": 1.7637249590368878e-05,
+ "loss": 1.071,
+ "step": 1282
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.074853454478588,
+ "learning_rate": 1.763322503807514e-05,
+ "loss": 0.9908,
+ "step": 1283
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.0235177131060391,
+ "learning_rate": 1.7629197521124758e-05,
+ "loss": 0.9707,
+ "step": 1284
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.0875192420075175,
+ "learning_rate": 1.7625167041081967e-05,
+ "loss": 0.9887,
+ "step": 1285
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9522200400988253,
+ "learning_rate": 1.7621133599512163e-05,
+ "loss": 0.9261,
+ "step": 1286
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.8772882364742024,
+ "learning_rate": 1.761709719798189e-05,
+ "loss": 0.9698,
+ "step": 1287
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.0802712789454298,
+ "learning_rate": 1.761305783805883e-05,
+ "loss": 0.9234,
+ "step": 1288
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9513937791159559,
+ "learning_rate": 1.7609015521311836e-05,
+ "loss": 0.9386,
+ "step": 1289
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9495223336458697,
+ "learning_rate": 1.7604970249310893e-05,
+ "loss": 0.8736,
+ "step": 1290
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9214574673472887,
+ "learning_rate": 1.7600922023627137e-05,
+ "loss": 0.9226,
+ "step": 1291
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9482194716583879,
+ "learning_rate": 1.759687084583285e-05,
+ "loss": 0.9556,
+ "step": 1292
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9567744881336991,
+ "learning_rate": 1.759281671750147e-05,
+ "loss": 0.8896,
+ "step": 1293
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.0176557179080163,
+ "learning_rate": 1.7588759640207564e-05,
+ "loss": 0.969,
+ "step": 1294
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.0003284870829507,
+ "learning_rate": 1.7584699615526857e-05,
+ "loss": 0.9976,
+ "step": 1295
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.0040582437896988,
+ "learning_rate": 1.7580636645036224e-05,
+ "loss": 0.9894,
+ "step": 1296
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.918876339799899,
+ "learning_rate": 1.757657073031367e-05,
+ "loss": 0.9548,
+ "step": 1297
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9850769224964281,
+ "learning_rate": 1.7572501872938343e-05,
+ "loss": 0.9577,
+ "step": 1298
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.8495345907621838,
+ "learning_rate": 1.756843007449055e-05,
+ "loss": 0.9728,
+ "step": 1299
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.0372223951877135,
+ "learning_rate": 1.7564355336551727e-05,
+ "loss": 0.9794,
+ "step": 1300
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.0085653027349983,
+ "learning_rate": 1.7560277660704455e-05,
+ "loss": 1.0044,
+ "step": 1301
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.7253238694380731,
+ "learning_rate": 1.755619704853246e-05,
+ "loss": 0.8192,
+ "step": 1302
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.0086435192691192,
+ "learning_rate": 1.7552113501620595e-05,
+ "loss": 0.939,
+ "step": 1303
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.8356277905093833,
+ "learning_rate": 1.7548027021554874e-05,
+ "loss": 0.9647,
+ "step": 1304
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.077391101906165,
+ "learning_rate": 1.754393760992243e-05,
+ "loss": 0.9581,
+ "step": 1305
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.950685147204352,
+ "learning_rate": 1.7539845268311548e-05,
+ "loss": 0.9141,
+ "step": 1306
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9903336045903749,
+ "learning_rate": 1.7535749998311645e-05,
+ "loss": 1.004,
+ "step": 1307
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.886512493678247,
+ "learning_rate": 1.753165180151328e-05,
+ "loss": 0.9016,
+ "step": 1308
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.8966168752937466,
+ "learning_rate": 1.752755067950814e-05,
+ "loss": 0.8623,
+ "step": 1309
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.1163911336091636,
+ "learning_rate": 1.752344663388906e-05,
+ "loss": 1.0104,
+ "step": 1310
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9704813893448934,
+ "learning_rate": 1.7519339666249997e-05,
+ "loss": 0.9913,
+ "step": 1311
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.8654900747799749,
+ "learning_rate": 1.7515229778186052e-05,
+ "loss": 0.9129,
+ "step": 1312
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.8695657587765406,
+ "learning_rate": 1.7511116971293463e-05,
+ "loss": 0.9766,
+ "step": 1313
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.0624852345997384,
+ "learning_rate": 1.7507001247169587e-05,
+ "loss": 1.0302,
+ "step": 1314
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.009772185178878,
+ "learning_rate": 1.7502882607412933e-05,
+ "loss": 0.8837,
+ "step": 1315
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9065866430845643,
+ "learning_rate": 1.749876105362313e-05,
+ "loss": 0.93,
+ "step": 1316
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9044651105180835,
+ "learning_rate": 1.7494636587400942e-05,
+ "loss": 0.8793,
+ "step": 1317
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.8743606898363903,
+ "learning_rate": 1.749050921034826e-05,
+ "loss": 0.9691,
+ "step": 1318
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.8690789555787685,
+ "learning_rate": 1.7486378924068123e-05,
+ "loss": 0.9389,
+ "step": 1319
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.8684902881631846,
+ "learning_rate": 1.748224573016467e-05,
+ "loss": 0.9315,
+ "step": 1320
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9516158888783537,
+ "learning_rate": 1.7478109630243195e-05,
+ "loss": 0.9167,
+ "step": 1321
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.04736303605119,
+ "learning_rate": 1.747397062591011e-05,
+ "loss": 0.9415,
+ "step": 1322
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.0474196172115005,
+ "learning_rate": 1.746982871877296e-05,
+ "loss": 0.993,
+ "step": 1323
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.8808689748638467,
+ "learning_rate": 1.7465683910440405e-05,
+ "loss": 0.9259,
+ "step": 1324
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.5210880159917515,
+ "learning_rate": 1.7461536202522248e-05,
+ "loss": 0.8962,
+ "step": 1325
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.0686317969125403,
+ "learning_rate": 1.745738559662941e-05,
+ "loss": 0.9928,
+ "step": 1326
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9764220372749869,
+ "learning_rate": 1.7453232094373936e-05,
+ "loss": 0.9462,
+ "step": 1327
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9282082567096386,
+ "learning_rate": 1.7449075697369005e-05,
+ "loss": 0.8972,
+ "step": 1328
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.0169851063290778,
+ "learning_rate": 1.7444916407228904e-05,
+ "loss": 1.0223,
+ "step": 1329
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.1104902130143832,
+ "learning_rate": 1.744075422556906e-05,
+ "loss": 0.9622,
+ "step": 1330
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.0214960904935697,
+ "learning_rate": 1.7436589154006014e-05,
+ "loss": 0.9756,
+ "step": 1331
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.837251199700536,
+ "learning_rate": 1.743242119415743e-05,
+ "loss": 0.9294,
+ "step": 1332
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9962820179048386,
+ "learning_rate": 1.7428250347642102e-05,
+ "loss": 0.968,
+ "step": 1333
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.8767083473968104,
+ "learning_rate": 1.7424076616079933e-05,
+ "loss": 0.8565,
+ "step": 1334
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.010031928570089,
+ "learning_rate": 1.7419900001091953e-05,
+ "loss": 1.0199,
+ "step": 1335
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.1472981012746335,
+ "learning_rate": 1.7415720504300314e-05,
+ "loss": 0.9862,
+ "step": 1336
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.0177100131803676,
+ "learning_rate": 1.741153812732828e-05,
+ "loss": 1.0558,
+ "step": 1337
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.8399003820676054,
+ "learning_rate": 1.7407352871800246e-05,
+ "loss": 0.8926,
+ "step": 1338
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.8163751235189033,
+ "learning_rate": 1.7403164739341708e-05,
+ "loss": 0.8762,
+ "step": 1339
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9110139912937479,
+ "learning_rate": 1.739897373157929e-05,
+ "loss": 0.9706,
+ "step": 1340
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.0523904592959699,
+ "learning_rate": 1.7394779850140736e-05,
+ "loss": 0.9904,
+ "step": 1341
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.0046387478908356,
+ "learning_rate": 1.7390583096654895e-05,
+ "loss": 0.9543,
+ "step": 1342
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9421259997094655,
+ "learning_rate": 1.7386383472751745e-05,
+ "loss": 0.9508,
+ "step": 1343
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9915527167921139,
+ "learning_rate": 1.7382180980062365e-05,
+ "loss": 0.9085,
+ "step": 1344
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9711906402895569,
+ "learning_rate": 1.7377975620218954e-05,
+ "loss": 0.9789,
+ "step": 1345
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9636222306821435,
+ "learning_rate": 1.7373767394854836e-05,
+ "loss": 0.9992,
+ "step": 1346
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9504937840612754,
+ "learning_rate": 1.7369556305604422e-05,
+ "loss": 0.9774,
+ "step": 1347
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.85508756889946,
+ "learning_rate": 1.736534235410326e-05,
+ "loss": 0.9298,
+ "step": 1348
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.001724464321067,
+ "learning_rate": 1.7361125541988e-05,
+ "loss": 0.8969,
+ "step": 1349
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.0795430574842528,
+ "learning_rate": 1.7356905870896407e-05,
+ "loss": 1.0655,
+ "step": 1350
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.8365748326097373,
+ "learning_rate": 1.735268334246734e-05,
+ "loss": 0.8813,
+ "step": 1351
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.8549159907728331,
+ "learning_rate": 1.7348457958340792e-05,
+ "loss": 0.9173,
+ "step": 1352
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9514007537768869,
+ "learning_rate": 1.7344229720157846e-05,
+ "loss": 0.899,
+ "step": 1353
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9243396251319407,
+ "learning_rate": 1.7339998629560705e-05,
+ "loss": 0.9007,
+ "step": 1354
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9386279339333949,
+ "learning_rate": 1.7335764688192676e-05,
+ "loss": 0.9582,
+ "step": 1355
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.938863003614121,
+ "learning_rate": 1.733152789769817e-05,
+ "loss": 0.9466,
+ "step": 1356
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.7746920848726053,
+ "learning_rate": 1.7327288259722714e-05,
+ "loss": 0.8744,
+ "step": 1357
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.8800209367790253,
+ "learning_rate": 1.7323045775912927e-05,
+ "loss": 0.9296,
+ "step": 1358
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9563591592089701,
+ "learning_rate": 1.7318800447916543e-05,
+ "loss": 0.9415,
+ "step": 1359
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.0823425450262547,
+ "learning_rate": 1.7314552277382403e-05,
+ "loss": 0.9155,
+ "step": 1360
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9294769199886521,
+ "learning_rate": 1.7310301265960446e-05,
+ "loss": 0.9396,
+ "step": 1361
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.0128160910927457,
+ "learning_rate": 1.7306047415301706e-05,
+ "loss": 0.9102,
+ "step": 1362
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.7193778243209328,
+ "learning_rate": 1.7301790727058344e-05,
+ "loss": 0.8595,
+ "step": 1363
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.912559188534125,
+ "learning_rate": 1.7297531202883598e-05,
+ "loss": 0.9292,
+ "step": 1364
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9827161963308617,
+ "learning_rate": 1.7293268844431826e-05,
+ "loss": 0.9035,
+ "step": 1365
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.8175980382563796,
+ "learning_rate": 1.7289003653358472e-05,
+ "loss": 0.8728,
+ "step": 1366
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9892005407224478,
+ "learning_rate": 1.7284735631320093e-05,
+ "loss": 0.9637,
+ "step": 1367
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.8617142576245806,
+ "learning_rate": 1.7280464779974335e-05,
+ "loss": 0.8283,
+ "step": 1368
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.7774387917332699,
+ "learning_rate": 1.7276191100979952e-05,
+ "loss": 0.8982,
+ "step": 1369
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.0089743260360584,
+ "learning_rate": 1.7271914595996784e-05,
+ "loss": 0.9725,
+ "step": 1370
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.0235716444291723,
+ "learning_rate": 1.7267635266685782e-05,
+ "loss": 0.9613,
+ "step": 1371
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.8920760393771107,
+ "learning_rate": 1.7263353114708993e-05,
+ "loss": 0.8932,
+ "step": 1372
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9677838999532018,
+ "learning_rate": 1.7259068141729542e-05,
+ "loss": 0.9674,
+ "step": 1373
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.0557970334664732,
+ "learning_rate": 1.7254780349411677e-05,
+ "loss": 0.889,
+ "step": 1374
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9006718214048022,
+ "learning_rate": 1.7250489739420718e-05,
+ "loss": 0.9292,
+ "step": 1375
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.8978208423654963,
+ "learning_rate": 1.7246196313423095e-05,
+ "loss": 0.9762,
+ "step": 1376
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.012413888892859,
+ "learning_rate": 1.7241900073086318e-05,
+ "loss": 0.9616,
+ "step": 1377
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9219652935841612,
+ "learning_rate": 1.7237601020079003e-05,
+ "loss": 0.9597,
+ "step": 1378
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 1.1667935403837504,
+ "learning_rate": 1.7233299156070852e-05,
+ "loss": 0.952,
+ "step": 1379
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 1.0657355088586513,
+ "learning_rate": 1.7228994482732653e-05,
+ "loss": 0.9978,
+ "step": 1380
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.8668487883174316,
+ "learning_rate": 1.72246870017363e-05,
+ "loss": 0.9998,
+ "step": 1381
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.83077840213205,
+ "learning_rate": 1.7220376714754766e-05,
+ "loss": 0.9163,
+ "step": 1382
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 1.0103332678264763,
+ "learning_rate": 1.7216063623462112e-05,
+ "loss": 0.9694,
+ "step": 1383
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9523874223780286,
+ "learning_rate": 1.7211747729533504e-05,
+ "loss": 0.9678,
+ "step": 1384
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.8803444409792228,
+ "learning_rate": 1.7207429034645176e-05,
+ "loss": 1.0225,
+ "step": 1385
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9128646750795694,
+ "learning_rate": 1.720310754047446e-05,
+ "loss": 0.8621,
+ "step": 1386
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9722068964197508,
+ "learning_rate": 1.719878324869978e-05,
+ "loss": 0.965,
+ "step": 1387
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.8680942448861937,
+ "learning_rate": 1.7194456161000634e-05,
+ "loss": 0.9419,
+ "step": 1388
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.8871975637381099,
+ "learning_rate": 1.719012627905762e-05,
+ "loss": 0.94,
+ "step": 1389
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 1.1752264909759393,
+ "learning_rate": 1.718579360455241e-05,
+ "loss": 0.9567,
+ "step": 1390
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.942533816212278,
+ "learning_rate": 1.7181458139167767e-05,
+ "loss": 1.0213,
+ "step": 1391
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 1.0487718670291166,
+ "learning_rate": 1.7177119884587536e-05,
+ "loss": 0.9706,
+ "step": 1392
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.8596585129841071,
+ "learning_rate": 1.717277884249664e-05,
+ "loss": 0.9062,
+ "step": 1393
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9969760294900244,
+ "learning_rate": 1.716843501458109e-05,
+ "loss": 0.9547,
+ "step": 1394
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.8619602284633182,
+ "learning_rate": 1.716408840252799e-05,
+ "loss": 0.9775,
+ "step": 1395
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9848956550531245,
+ "learning_rate": 1.7159739008025503e-05,
+ "loss": 0.8821,
+ "step": 1396
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.972351111094236,
+ "learning_rate": 1.7155386832762892e-05,
+ "loss": 0.9936,
+ "step": 1397
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9780137870066115,
+ "learning_rate": 1.715103187843048e-05,
+ "loss": 0.9961,
+ "step": 1398
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9471594062714703,
+ "learning_rate": 1.7146674146719688e-05,
+ "loss": 0.9669,
+ "step": 1399
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.8751949009152656,
+ "learning_rate": 1.7142313639323012e-05,
+ "loss": 0.9254,
+ "step": 1400
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.8897100420997975,
+ "learning_rate": 1.7137950357934017e-05,
+ "loss": 1.01,
+ "step": 1401
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9184010627795944,
+ "learning_rate": 1.7133584304247354e-05,
+ "loss": 1.049,
+ "step": 1402
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 1.0906008522685957,
+ "learning_rate": 1.7129215479958747e-05,
+ "loss": 0.8526,
+ "step": 1403
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9814138374215998,
+ "learning_rate": 1.7124843886765e-05,
+ "loss": 0.9829,
+ "step": 1404
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9558542792106863,
+ "learning_rate": 1.712046952636398e-05,
+ "loss": 0.9828,
+ "step": 1405
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 1.0645694257914269,
+ "learning_rate": 1.7116092400454655e-05,
+ "loss": 0.9607,
+ "step": 1406
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9576178635912473,
+ "learning_rate": 1.7111712510737035e-05,
+ "loss": 0.9126,
+ "step": 1407
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9954897980335197,
+ "learning_rate": 1.7107329858912226e-05,
+ "loss": 0.9274,
+ "step": 1408
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9924604001165576,
+ "learning_rate": 1.7102944446682393e-05,
+ "loss": 0.8743,
+ "step": 1409
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9578220938883492,
+ "learning_rate": 1.709855627575079e-05,
+ "loss": 0.9546,
+ "step": 1410
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 1.0133446370102202,
+ "learning_rate": 1.7094165347821724e-05,
+ "loss": 1.0115,
+ "step": 1411
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.8321384838785534,
+ "learning_rate": 1.7089771664600584e-05,
+ "loss": 0.9437,
+ "step": 1412
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 1.092412274875756,
+ "learning_rate": 1.708537522779382e-05,
+ "loss": 0.9602,
+ "step": 1413
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.8434696929509511,
+ "learning_rate": 1.7080976039108964e-05,
+ "loss": 0.8267,
+ "step": 1414
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.8888575481756741,
+ "learning_rate": 1.7076574100254614e-05,
+ "loss": 0.9449,
+ "step": 1415
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 1.04536133976919,
+ "learning_rate": 1.707216941294042e-05,
+ "loss": 0.9354,
+ "step": 1416
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9264301612973153,
+ "learning_rate": 1.706776197887712e-05,
+ "loss": 0.993,
+ "step": 1417
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 1.0448132360437183,
+ "learning_rate": 1.7063351799776514e-05,
+ "loss": 0.9921,
+ "step": 1418
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 1.045890976485631,
+ "learning_rate": 1.7058938877351456e-05,
+ "loss": 0.9247,
+ "step": 1419
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9208093556615694,
+ "learning_rate": 1.705452321331588e-05,
+ "loss": 0.9543,
+ "step": 1420
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.7828588302711406,
+ "learning_rate": 1.7050104809384774e-05,
+ "loss": 0.8153,
+ "step": 1421
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9600069794377464,
+ "learning_rate": 1.70456836672742e-05,
+ "loss": 1.0262,
+ "step": 1422
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 1.0360936079688903,
+ "learning_rate": 1.704125978870128e-05,
+ "loss": 1.0395,
+ "step": 1423
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9443134361395065,
+ "learning_rate": 1.7036833175384192e-05,
+ "loss": 0.9432,
+ "step": 1424
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.8957425811268978,
+ "learning_rate": 1.7032403829042182e-05,
+ "loss": 0.966,
+ "step": 1425
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9221316446068092,
+ "learning_rate": 1.7027971751395563e-05,
+ "loss": 0.9855,
+ "step": 1426
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9926472916387251,
+ "learning_rate": 1.7023536944165697e-05,
+ "loss": 0.912,
+ "step": 1427
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.7608951737971176,
+ "learning_rate": 1.7019099409075014e-05,
+ "loss": 0.9003,
+ "step": 1428
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 1.013479925276139,
+ "learning_rate": 1.7014659147847005e-05,
+ "loss": 0.9588,
+ "step": 1429
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9573690285449755,
+ "learning_rate": 1.701021616220621e-05,
+ "loss": 0.9775,
+ "step": 1430
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9034380119557153,
+ "learning_rate": 1.7005770453878234e-05,
+ "loss": 0.9887,
+ "step": 1431
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 1.0344792829013392,
+ "learning_rate": 1.7001322024589742e-05,
+ "loss": 1.0164,
+ "step": 1432
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.962111819460091,
+ "learning_rate": 1.6996870876068455e-05,
+ "loss": 0.9532,
+ "step": 1433
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9307416216103737,
+ "learning_rate": 1.6992417010043144e-05,
+ "loss": 0.9921,
+ "step": 1434
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9097415683906258,
+ "learning_rate": 1.6987960428243637e-05,
+ "loss": 0.945,
+ "step": 1435
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9216637422367028,
+ "learning_rate": 1.6983501132400825e-05,
+ "loss": 0.9544,
+ "step": 1436
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 1.0345086013912552,
+ "learning_rate": 1.6979039124246643e-05,
+ "loss": 0.9326,
+ "step": 1437
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.860918853096138,
+ "learning_rate": 1.6974574405514083e-05,
+ "loss": 0.9159,
+ "step": 1438
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 1.052868992385184,
+ "learning_rate": 1.6970106977937192e-05,
+ "loss": 1.0088,
+ "step": 1439
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.976301583243505,
+ "learning_rate": 1.696563684325107e-05,
+ "loss": 1.0028,
+ "step": 1440
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.8985498819345825,
+ "learning_rate": 1.6961164003191862e-05,
+ "loss": 0.9355,
+ "step": 1441
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.959126962988729,
+ "learning_rate": 1.6956688459496767e-05,
+ "loss": 0.9988,
+ "step": 1442
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 1.000398571568602,
+ "learning_rate": 1.695221021390404e-05,
+ "loss": 0.9796,
+ "step": 1443
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.948004513811074,
+ "learning_rate": 1.6947729268152972e-05,
+ "loss": 0.9664,
+ "step": 1444
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.8547904586649323,
+ "learning_rate": 1.6943245623983918e-05,
+ "loss": 0.9382,
+ "step": 1445
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.8169213509760057,
+ "learning_rate": 1.6938759283138268e-05,
+ "loss": 0.9215,
+ "step": 1446
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 1.0583824570606166,
+ "learning_rate": 1.693427024735847e-05,
+ "loss": 1.0131,
+ "step": 1447
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 1.1342358394785241,
+ "learning_rate": 1.692977851838801e-05,
+ "loss": 0.9261,
+ "step": 1448
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9996791553998676,
+ "learning_rate": 1.6925284097971427e-05,
+ "loss": 0.9718,
+ "step": 1449
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9034438119698405,
+ "learning_rate": 1.6920786987854296e-05,
+ "loss": 0.991,
+ "step": 1450
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9447949661240993,
+ "learning_rate": 1.691628718978325e-05,
+ "loss": 0.9383,
+ "step": 1451
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9586454035674055,
+ "learning_rate": 1.691178470550596e-05,
+ "loss": 0.9168,
+ "step": 1452
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9428822661438724,
+ "learning_rate": 1.6907279536771127e-05,
+ "loss": 0.91,
+ "step": 1453
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9251971302121317,
+ "learning_rate": 1.6902771685328524e-05,
+ "loss": 1.0019,
+ "step": 1454
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.918862537239612,
+ "learning_rate": 1.6898261152928933e-05,
+ "loss": 0.9831,
+ "step": 1455
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.8880369061076363,
+ "learning_rate": 1.6893747941324197e-05,
+ "loss": 0.9869,
+ "step": 1456
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9157286966793228,
+ "learning_rate": 1.6889232052267203e-05,
+ "loss": 0.9341,
+ "step": 1457
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9134731812829485,
+ "learning_rate": 1.688471348751186e-05,
+ "loss": 0.9283,
+ "step": 1458
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9093999478065837,
+ "learning_rate": 1.688019224881313e-05,
+ "loss": 1.0232,
+ "step": 1459
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 1.0429037065797877,
+ "learning_rate": 1.6875668337927014e-05,
+ "loss": 0.9529,
+ "step": 1460
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9601967408948001,
+ "learning_rate": 1.6871141756610544e-05,
+ "loss": 1.0154,
+ "step": 1461
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9615543416593485,
+ "learning_rate": 1.6866612506621788e-05,
+ "loss": 0.9286,
+ "step": 1462
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.912076570285461,
+ "learning_rate": 1.6862080589719863e-05,
+ "loss": 0.902,
+ "step": 1463
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.8642930007495335,
+ "learning_rate": 1.6857546007664908e-05,
+ "loss": 0.9412,
+ "step": 1464
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9544234669861017,
+ "learning_rate": 1.6853008762218103e-05,
+ "loss": 0.9903,
+ "step": 1465
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.8391242045717849,
+ "learning_rate": 1.684846885514166e-05,
+ "loss": 0.8772,
+ "step": 1466
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.8676096900956322,
+ "learning_rate": 1.6843926288198828e-05,
+ "loss": 0.9685,
+ "step": 1467
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9146017413241526,
+ "learning_rate": 1.683938106315389e-05,
+ "loss": 1.0165,
+ "step": 1468
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.8709824758554244,
+ "learning_rate": 1.683483318177216e-05,
+ "loss": 0.9719,
+ "step": 1469
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9430879983467464,
+ "learning_rate": 1.6830282645819974e-05,
+ "loss": 0.9654,
+ "step": 1470
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9178135352600507,
+ "learning_rate": 1.6825729457064718e-05,
+ "loss": 0.9424,
+ "step": 1471
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 1.0915652350945149,
+ "learning_rate": 1.6821173617274793e-05,
+ "loss": 0.9302,
+ "step": 1472
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9417539872874993,
+ "learning_rate": 1.6816615128219635e-05,
+ "loss": 0.9071,
+ "step": 1473
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9151500094703141,
+ "learning_rate": 1.681205399166971e-05,
+ "loss": 0.9681,
+ "step": 1474
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9286394678407768,
+ "learning_rate": 1.6807490209396506e-05,
+ "loss": 0.9415,
+ "step": 1475
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.997686222732575,
+ "learning_rate": 1.6802923783172553e-05,
+ "loss": 0.9448,
+ "step": 1476
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9997420392693974,
+ "learning_rate": 1.679835471477139e-05,
+ "loss": 0.9966,
+ "step": 1477
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 1.025454770988222,
+ "learning_rate": 1.6793783005967593e-05,
+ "loss": 1.0061,
+ "step": 1478
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9004400517970723,
+ "learning_rate": 1.678920865853676e-05,
+ "loss": 0.9713,
+ "step": 1479
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 1.035609350141977,
+ "learning_rate": 1.678463167425552e-05,
+ "loss": 0.925,
+ "step": 1480
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.8205620842968449,
+ "learning_rate": 1.6780052054901512e-05,
+ "loss": 0.9319,
+ "step": 1481
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 1.0026055411591541,
+ "learning_rate": 1.6775469802253416e-05,
+ "loss": 0.9171,
+ "step": 1482
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.8729671855534491,
+ "learning_rate": 1.6770884918090923e-05,
+ "loss": 0.9424,
+ "step": 1483
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9003800462124079,
+ "learning_rate": 1.6766297404194745e-05,
+ "loss": 0.9922,
+ "step": 1484
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 1.0235000465659894,
+ "learning_rate": 1.6761707262346624e-05,
+ "loss": 0.9668,
+ "step": 1485
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9256919446053998,
+ "learning_rate": 1.675711449432932e-05,
+ "loss": 0.9285,
+ "step": 1486
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.8698603521807748,
+ "learning_rate": 1.6752519101926606e-05,
+ "loss": 0.9668,
+ "step": 1487
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9720860628297219,
+ "learning_rate": 1.6747921086923284e-05,
+ "loss": 0.99,
+ "step": 1488
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.8167870246160064,
+ "learning_rate": 1.674332045110517e-05,
+ "loss": 0.9021,
+ "step": 1489
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 1.069653115198386,
+ "learning_rate": 1.6738717196259092e-05,
+ "loss": 0.9952,
+ "step": 1490
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9081746219961461,
+ "learning_rate": 1.673411132417291e-05,
+ "loss": 0.9976,
+ "step": 1491
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9091308363718774,
+ "learning_rate": 1.672950283663548e-05,
+ "loss": 0.953,
+ "step": 1492
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9170653072328966,
+ "learning_rate": 1.6724891735436697e-05,
+ "loss": 0.9369,
+ "step": 1493
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9447879394939125,
+ "learning_rate": 1.6720278022367453e-05,
+ "loss": 0.9319,
+ "step": 1494
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 1.063860724905578,
+ "learning_rate": 1.6715661699219664e-05,
+ "loss": 0.8929,
+ "step": 1495
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9372827125363168,
+ "learning_rate": 1.6711042767786257e-05,
+ "loss": 0.9613,
+ "step": 1496
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9973853986012087,
+ "learning_rate": 1.6706421229861168e-05,
+ "loss": 0.9321,
+ "step": 1497
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.933708910044373,
+ "learning_rate": 1.6701797087239354e-05,
+ "loss": 0.9819,
+ "step": 1498
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.8501823140475498,
+ "learning_rate": 1.6697170341716772e-05,
+ "loss": 0.9083,
+ "step": 1499
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.8968192349851679,
+ "learning_rate": 1.6692540995090403e-05,
+ "loss": 0.9311,
+ "step": 1500
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9467954363380379,
+ "learning_rate": 1.668790904915823e-05,
+ "loss": 0.9867,
+ "step": 1501
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9770498456859923,
+ "learning_rate": 1.6683274505719248e-05,
+ "loss": 0.9755,
+ "step": 1502
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9244002799395564,
+ "learning_rate": 1.6678637366573455e-05,
+ "loss": 0.9607,
+ "step": 1503
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 1.0089997803593413,
+ "learning_rate": 1.667399763352187e-05,
+ "loss": 1.0077,
+ "step": 1504
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9729824091419037,
+ "learning_rate": 1.666935530836651e-05,
+ "loss": 0.9754,
+ "step": 1505
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.8937345069773265,
+ "learning_rate": 1.6664710392910396e-05,
+ "loss": 0.9528,
+ "step": 1506
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.7828933226169947,
+ "learning_rate": 1.6660062888957564e-05,
+ "loss": 0.8448,
+ "step": 1507
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.6881530097825312,
+ "learning_rate": 1.665541279831305e-05,
+ "loss": 0.8297,
+ "step": 1508
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.8383643213490903,
+ "learning_rate": 1.6650760122782898e-05,
+ "loss": 0.8944,
+ "step": 1509
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 1.0492979343611466,
+ "learning_rate": 1.6646104864174147e-05,
+ "loss": 0.9559,
+ "step": 1510
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.7860944374266666,
+ "learning_rate": 1.664144702429485e-05,
+ "loss": 0.8304,
+ "step": 1511
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9289932358432823,
+ "learning_rate": 1.663678660495406e-05,
+ "loss": 0.9132,
+ "step": 1512
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9193187827812805,
+ "learning_rate": 1.663212360796183e-05,
+ "loss": 0.9172,
+ "step": 1513
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.8729349335008283,
+ "learning_rate": 1.662745803512921e-05,
+ "loss": 0.9643,
+ "step": 1514
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9358818082059703,
+ "learning_rate": 1.662278988826826e-05,
+ "loss": 0.8925,
+ "step": 1515
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.8943626139959501,
+ "learning_rate": 1.6618119169192027e-05,
+ "loss": 0.9392,
+ "step": 1516
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9151007624865264,
+ "learning_rate": 1.661344587971457e-05,
+ "loss": 0.9498,
+ "step": 1517
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9338211772395434,
+ "learning_rate": 1.6608770021650945e-05,
+ "loss": 0.9619,
+ "step": 1518
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9029817897960257,
+ "learning_rate": 1.6604091596817193e-05,
+ "loss": 1.0062,
+ "step": 1519
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9996484500817483,
+ "learning_rate": 1.6599410607030363e-05,
+ "loss": 0.9812,
+ "step": 1520
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.8765015379256176,
+ "learning_rate": 1.6594727054108498e-05,
+ "loss": 0.9065,
+ "step": 1521
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 1.0423180898129178,
+ "learning_rate": 1.659004093987064e-05,
+ "loss": 0.9466,
+ "step": 1522
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.7874776388995165,
+ "learning_rate": 1.6585352266136814e-05,
+ "loss": 0.8455,
+ "step": 1523
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.7938745462427268,
+ "learning_rate": 1.6580661034728055e-05,
+ "loss": 0.9201,
+ "step": 1524
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.8100864066177429,
+ "learning_rate": 1.6575967247466376e-05,
+ "loss": 0.8825,
+ "step": 1525
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.7304176748429774,
+ "learning_rate": 1.657127090617479e-05,
+ "loss": 0.9078,
+ "step": 1526
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.8454782615675868,
+ "learning_rate": 1.656657201267731e-05,
+ "loss": 0.9717,
+ "step": 1527
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 1.0033589088155495,
+ "learning_rate": 1.6561870568798927e-05,
+ "loss": 0.9113,
+ "step": 1528
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 1.0045541920143346,
+ "learning_rate": 1.655716657636562e-05,
+ "loss": 0.9802,
+ "step": 1529
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9549525759581301,
+ "learning_rate": 1.6552460037204382e-05,
+ "loss": 0.902,
+ "step": 1530
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 1.0385600265264971,
+ "learning_rate": 1.6547750953143168e-05,
+ "loss": 0.9371,
+ "step": 1531
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 1.026356414371794,
+ "learning_rate": 1.654303932601093e-05,
+ "loss": 0.9704,
+ "step": 1532
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.8946408892785592,
+ "learning_rate": 1.6538325157637614e-05,
+ "loss": 0.9272,
+ "step": 1533
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9433836414294297,
+ "learning_rate": 1.653360844985415e-05,
+ "loss": 1.0327,
+ "step": 1534
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 1.0136214861170298,
+ "learning_rate": 1.652888920449245e-05,
+ "loss": 0.9562,
+ "step": 1535
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8516349942077444,
+ "learning_rate": 1.6524167423385414e-05,
+ "loss": 0.9044,
+ "step": 1536
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8606918324559609,
+ "learning_rate": 1.651944310836693e-05,
+ "loss": 0.889,
+ "step": 1537
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9621898897088845,
+ "learning_rate": 1.6514716261271866e-05,
+ "loss": 0.9425,
+ "step": 1538
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8999210884555426,
+ "learning_rate": 1.6509986883936073e-05,
+ "loss": 0.9559,
+ "step": 1539
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9132688261405465,
+ "learning_rate": 1.650525497819639e-05,
+ "loss": 0.9636,
+ "step": 1540
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.96739887282077,
+ "learning_rate": 1.6500520545890634e-05,
+ "loss": 0.958,
+ "step": 1541
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8669479456473806,
+ "learning_rate": 1.6495783588857605e-05,
+ "loss": 0.9078,
+ "step": 1542
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8583747225253263,
+ "learning_rate": 1.649104410893708e-05,
+ "loss": 0.992,
+ "step": 1543
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8029229062809408,
+ "learning_rate": 1.648630210796982e-05,
+ "loss": 0.9104,
+ "step": 1544
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 1.0517436166476481,
+ "learning_rate": 1.6481557587797562e-05,
+ "loss": 0.9127,
+ "step": 1545
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8792737661317848,
+ "learning_rate": 1.6476810550263023e-05,
+ "loss": 0.9328,
+ "step": 1546
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8118400865453468,
+ "learning_rate": 1.6472060997209898e-05,
+ "loss": 0.9258,
+ "step": 1547
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.894095224382569,
+ "learning_rate": 1.6467308930482863e-05,
+ "loss": 0.9422,
+ "step": 1548
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8008306113009263,
+ "learning_rate": 1.6462554351927558e-05,
+ "loss": 0.8864,
+ "step": 1549
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 1.0824491035305586,
+ "learning_rate": 1.6457797263390613e-05,
+ "loss": 0.9603,
+ "step": 1550
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9440647145528336,
+ "learning_rate": 1.6453037666719624e-05,
+ "loss": 0.9356,
+ "step": 1551
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9523845323654504,
+ "learning_rate": 1.6448275563763162e-05,
+ "loss": 0.9583,
+ "step": 1552
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8590795033317857,
+ "learning_rate": 1.644351095637078e-05,
+ "loss": 0.923,
+ "step": 1553
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 1.0084967034937304,
+ "learning_rate": 1.6438743846392987e-05,
+ "loss": 0.972,
+ "step": 1554
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9013444708051802,
+ "learning_rate": 1.6433974235681274e-05,
+ "loss": 0.9805,
+ "step": 1555
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8851420202420345,
+ "learning_rate": 1.6429202126088112e-05,
+ "loss": 0.9088,
+ "step": 1556
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 1.1459972501861888,
+ "learning_rate": 1.6424427519466925e-05,
+ "loss": 0.9487,
+ "step": 1557
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9391257487421422,
+ "learning_rate": 1.641965041767212e-05,
+ "loss": 0.9777,
+ "step": 1558
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 1.004314381160487,
+ "learning_rate": 1.6414870822559064e-05,
+ "loss": 0.8921,
+ "step": 1559
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9153049856368219,
+ "learning_rate": 1.6410088735984103e-05,
+ "loss": 0.9034,
+ "step": 1560
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9175691044105417,
+ "learning_rate": 1.6405304159804534e-05,
+ "loss": 0.9555,
+ "step": 1561
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 1.0218738680397261,
+ "learning_rate": 1.6400517095878644e-05,
+ "loss": 0.9464,
+ "step": 1562
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8311183845638194,
+ "learning_rate": 1.6395727546065665e-05,
+ "loss": 0.9857,
+ "step": 1563
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8372882474738428,
+ "learning_rate": 1.6390935512225806e-05,
+ "loss": 0.9277,
+ "step": 1564
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9439256261421974,
+ "learning_rate": 1.6386140996220232e-05,
+ "loss": 0.9889,
+ "step": 1565
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 1.0188128413048634,
+ "learning_rate": 1.6381343999911088e-05,
+ "loss": 0.9182,
+ "step": 1566
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9367484244107676,
+ "learning_rate": 1.6376544525161463e-05,
+ "loss": 1.0082,
+ "step": 1567
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 1.2649438792775638,
+ "learning_rate": 1.6371742573835426e-05,
+ "loss": 1.0295,
+ "step": 1568
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9343919815578396,
+ "learning_rate": 1.636693814779799e-05,
+ "loss": 0.9987,
+ "step": 1569
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 1.026191881903781,
+ "learning_rate": 1.6362131248915145e-05,
+ "loss": 1.0093,
+ "step": 1570
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8863813583139732,
+ "learning_rate": 1.6357321879053833e-05,
+ "loss": 0.8857,
+ "step": 1571
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9310895226485517,
+ "learning_rate": 1.6352510040081962e-05,
+ "loss": 0.9583,
+ "step": 1572
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8785760391659503,
+ "learning_rate": 1.634769573386839e-05,
+ "loss": 0.9509,
+ "step": 1573
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.810865397517544,
+ "learning_rate": 1.634287896228294e-05,
+ "loss": 0.8363,
+ "step": 1574
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 1.0450274779532105,
+ "learning_rate": 1.6338059727196386e-05,
+ "loss": 0.9478,
+ "step": 1575
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8563817460351204,
+ "learning_rate": 1.6333238030480473e-05,
+ "loss": 0.9341,
+ "step": 1576
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9202654722314237,
+ "learning_rate": 1.6328413874007884e-05,
+ "loss": 0.9441,
+ "step": 1577
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 1.0921613482811823,
+ "learning_rate": 1.6323587259652267e-05,
+ "loss": 0.9607,
+ "step": 1578
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9704540789978041,
+ "learning_rate": 1.6318758189288227e-05,
+ "loss": 0.9413,
+ "step": 1579
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9560575287925731,
+ "learning_rate": 1.6313926664791316e-05,
+ "loss": 0.9676,
+ "step": 1580
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 1.0573638071375642,
+ "learning_rate": 1.6309092688038047e-05,
+ "loss": 0.9644,
+ "step": 1581
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9045907983710009,
+ "learning_rate": 1.6304256260905872e-05,
+ "loss": 0.9729,
+ "step": 1582
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 1.158506692783041,
+ "learning_rate": 1.6299417385273216e-05,
+ "loss": 1.0491,
+ "step": 1583
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8630420509315611,
+ "learning_rate": 1.629457606301943e-05,
+ "loss": 0.8856,
+ "step": 1584
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8794259243667014,
+ "learning_rate": 1.6289732296024837e-05,
+ "loss": 0.9319,
+ "step": 1585
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9083656791090297,
+ "learning_rate": 1.6284886086170697e-05,
+ "loss": 0.9013,
+ "step": 1586
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9551313866642618,
+ "learning_rate": 1.628003743533922e-05,
+ "loss": 0.9521,
+ "step": 1587
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9662740779782306,
+ "learning_rate": 1.6275186345413566e-05,
+ "loss": 1.0104,
+ "step": 1588
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9321753471339548,
+ "learning_rate": 1.627033281827785e-05,
+ "loss": 0.8977,
+ "step": 1589
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9744976506191133,
+ "learning_rate": 1.6265476855817116e-05,
+ "loss": 0.9655,
+ "step": 1590
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8960702114476669,
+ "learning_rate": 1.6260618459917366e-05,
+ "loss": 0.9226,
+ "step": 1591
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8867331235304629,
+ "learning_rate": 1.6255757632465553e-05,
+ "loss": 0.9158,
+ "step": 1592
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8162593563955296,
+ "learning_rate": 1.625089437534956e-05,
+ "loss": 0.8893,
+ "step": 1593
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 1.092573754242538,
+ "learning_rate": 1.624602869045822e-05,
+ "loss": 0.992,
+ "step": 1594
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8359118484411704,
+ "learning_rate": 1.624116057968131e-05,
+ "loss": 0.9061,
+ "step": 1595
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8157544465859347,
+ "learning_rate": 1.6236290044909543e-05,
+ "loss": 0.8577,
+ "step": 1596
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9343697029660539,
+ "learning_rate": 1.6231417088034585e-05,
+ "loss": 1.0001,
+ "step": 1597
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8888199191652654,
+ "learning_rate": 1.622654171094904e-05,
+ "loss": 0.9049,
+ "step": 1598
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8916764016407774,
+ "learning_rate": 1.6221663915546437e-05,
+ "loss": 0.9234,
+ "step": 1599
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 1.0462153793340085,
+ "learning_rate": 1.6216783703721265e-05,
+ "loss": 0.9814,
+ "step": 1600
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9179145616912302,
+ "learning_rate": 1.6211901077368937e-05,
+ "loss": 0.9493,
+ "step": 1601
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9001674153041553,
+ "learning_rate": 1.620701603838581e-05,
+ "loss": 0.9446,
+ "step": 1602
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 1.2033915557290602,
+ "learning_rate": 1.6202128588669177e-05,
+ "loss": 0.9634,
+ "step": 1603
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8090382291036919,
+ "learning_rate": 1.619723873011727e-05,
+ "loss": 0.9208,
+ "step": 1604
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8668030797487888,
+ "learning_rate": 1.6192346464629247e-05,
+ "loss": 0.9509,
+ "step": 1605
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9079607458115487,
+ "learning_rate": 1.6187451794105212e-05,
+ "loss": 0.9816,
+ "step": 1606
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9660764324244697,
+ "learning_rate": 1.61825547204462e-05,
+ "loss": 1.0215,
+ "step": 1607
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9707479728013486,
+ "learning_rate": 1.6177655245554177e-05,
+ "loss": 1.0278,
+ "step": 1608
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9116418133277676,
+ "learning_rate": 1.617275337133204e-05,
+ "loss": 0.955,
+ "step": 1609
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9842417634853147,
+ "learning_rate": 1.6167849099683623e-05,
+ "loss": 0.9409,
+ "step": 1610
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9733329443171795,
+ "learning_rate": 1.6162942432513687e-05,
+ "loss": 0.9357,
+ "step": 1611
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.801403566635771,
+ "learning_rate": 1.6158033371727924e-05,
+ "loss": 0.8624,
+ "step": 1612
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9661459166620155,
+ "learning_rate": 1.6153121919232962e-05,
+ "loss": 0.9435,
+ "step": 1613
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8764119756580947,
+ "learning_rate": 1.614820807693635e-05,
+ "loss": 0.9952,
+ "step": 1614
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9749522107857632,
+ "learning_rate": 1.6143291846746563e-05,
+ "loss": 0.9781,
+ "step": 1615
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8900510283221014,
+ "learning_rate": 1.613837323057301e-05,
+ "loss": 0.8868,
+ "step": 1616
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9346525229346695,
+ "learning_rate": 1.6133452230326035e-05,
+ "loss": 0.9183,
+ "step": 1617
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9925792295783066,
+ "learning_rate": 1.6128528847916883e-05,
+ "loss": 0.9407,
+ "step": 1618
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.898656388238625,
+ "learning_rate": 1.6123603085257746e-05,
+ "loss": 0.9664,
+ "step": 1619
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 1.0469184473259812,
+ "learning_rate": 1.6118674944261732e-05,
+ "loss": 0.9371,
+ "step": 1620
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9487561282792712,
+ "learning_rate": 1.6113744426842882e-05,
+ "loss": 0.953,
+ "step": 1621
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9421710034497124,
+ "learning_rate": 1.6108811534916137e-05,
+ "loss": 0.9241,
+ "step": 1622
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8810989743636531,
+ "learning_rate": 1.6103876270397387e-05,
+ "loss": 0.8962,
+ "step": 1623
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9968793884243532,
+ "learning_rate": 1.609893863520343e-05,
+ "loss": 1.0071,
+ "step": 1624
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8305670777743684,
+ "learning_rate": 1.609399863125198e-05,
+ "loss": 0.9459,
+ "step": 1625
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8219634437237389,
+ "learning_rate": 1.6089056260461687e-05,
+ "loss": 0.8953,
+ "step": 1626
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 1.1505444861757854,
+ "learning_rate": 1.6084111524752107e-05,
+ "loss": 0.986,
+ "step": 1627
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8523137932717626,
+ "learning_rate": 1.607916442604372e-05,
+ "loss": 0.9969,
+ "step": 1628
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9113958021126214,
+ "learning_rate": 1.6074214966257914e-05,
+ "loss": 0.9257,
+ "step": 1629
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9641508088054317,
+ "learning_rate": 1.6069263147317015e-05,
+ "loss": 0.9442,
+ "step": 1630
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8907370688959207,
+ "learning_rate": 1.6064308971144236e-05,
+ "loss": 0.9364,
+ "step": 1631
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9715843489375122,
+ "learning_rate": 1.605935243966374e-05,
+ "loss": 0.9028,
+ "step": 1632
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8060891755145814,
+ "learning_rate": 1.6054393554800574e-05,
+ "loss": 0.9515,
+ "step": 1633
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9235647289276558,
+ "learning_rate": 1.604943231848072e-05,
+ "loss": 0.9238,
+ "step": 1634
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9883917004228607,
+ "learning_rate": 1.604446873263106e-05,
+ "loss": 0.9704,
+ "step": 1635
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.929373229477013,
+ "learning_rate": 1.6039502799179394e-05,
+ "loss": 0.9839,
+ "step": 1636
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9518091827387594,
+ "learning_rate": 1.6034534520054435e-05,
+ "loss": 0.968,
+ "step": 1637
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.8240557236482343,
+ "learning_rate": 1.60295638971858e-05,
+ "loss": 0.8758,
+ "step": 1638
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.8988646170980703,
+ "learning_rate": 1.602459093250403e-05,
+ "loss": 0.9427,
+ "step": 1639
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 1.0399035690145213,
+ "learning_rate": 1.601961562794056e-05,
+ "loss": 1.0229,
+ "step": 1640
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9054750140407912,
+ "learning_rate": 1.601463798542775e-05,
+ "loss": 0.9607,
+ "step": 1641
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.8685646231286541,
+ "learning_rate": 1.6009658006898848e-05,
+ "loss": 0.9334,
+ "step": 1642
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.8687059821196736,
+ "learning_rate": 1.600467569428803e-05,
+ "loss": 0.9283,
+ "step": 1643
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9373054769488423,
+ "learning_rate": 1.599969104953036e-05,
+ "loss": 0.9141,
+ "step": 1644
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.8599121214184482,
+ "learning_rate": 1.599470407456182e-05,
+ "loss": 0.9604,
+ "step": 1645
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.886202487687736,
+ "learning_rate": 1.5989714771319297e-05,
+ "loss": 0.9236,
+ "step": 1646
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9637264303046981,
+ "learning_rate": 1.5984723141740578e-05,
+ "loss": 0.9264,
+ "step": 1647
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9532559907710517,
+ "learning_rate": 1.597972918776435e-05,
+ "loss": 1.0119,
+ "step": 1648
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9386242314684291,
+ "learning_rate": 1.5974732911330208e-05,
+ "loss": 0.9295,
+ "step": 1649
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9951195295678786,
+ "learning_rate": 1.5969734314378654e-05,
+ "loss": 0.9378,
+ "step": 1650
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 1.0412107333124232,
+ "learning_rate": 1.5964733398851078e-05,
+ "loss": 0.9474,
+ "step": 1651
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.8952264426932172,
+ "learning_rate": 1.5959730166689783e-05,
+ "loss": 0.9031,
+ "step": 1652
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.988442495445306,
+ "learning_rate": 1.5954724619837966e-05,
+ "loss": 0.9892,
+ "step": 1653
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9511420149297078,
+ "learning_rate": 1.5949716760239722e-05,
+ "loss": 0.9458,
+ "step": 1654
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.8964385327916379,
+ "learning_rate": 1.5944706589840046e-05,
+ "loss": 0.8642,
+ "step": 1655
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.8590231471822083,
+ "learning_rate": 1.5939694110584833e-05,
+ "loss": 0.8998,
+ "step": 1656
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.8920728082079487,
+ "learning_rate": 1.593467932442087e-05,
+ "loss": 0.9607,
+ "step": 1657
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9544576933919202,
+ "learning_rate": 1.5929662233295846e-05,
+ "loss": 0.935,
+ "step": 1658
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.8502120316835678,
+ "learning_rate": 1.5924642839158334e-05,
+ "loss": 0.9636,
+ "step": 1659
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9514711201063468,
+ "learning_rate": 1.591962114395781e-05,
+ "loss": 0.9251,
+ "step": 1660
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 1.0858296479725027,
+ "learning_rate": 1.5914597149644654e-05,
+ "loss": 0.9738,
+ "step": 1661
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.8945887283740663,
+ "learning_rate": 1.5909570858170115e-05,
+ "loss": 0.9372,
+ "step": 1662
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 1.009456322676137,
+ "learning_rate": 1.5904542271486346e-05,
+ "loss": 0.9836,
+ "step": 1663
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9302074749871171,
+ "learning_rate": 1.5899511391546403e-05,
+ "loss": 0.9074,
+ "step": 1664
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9317359260782803,
+ "learning_rate": 1.5894478220304215e-05,
+ "loss": 0.8998,
+ "step": 1665
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.8862048439650887,
+ "learning_rate": 1.5889442759714603e-05,
+ "loss": 0.9158,
+ "step": 1666
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.8615007126234028,
+ "learning_rate": 1.5884405011733294e-05,
+ "loss": 0.9098,
+ "step": 1667
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9394511331370565,
+ "learning_rate": 1.587936497831688e-05,
+ "loss": 0.9882,
+ "step": 1668
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9331670670411267,
+ "learning_rate": 1.5874322661422856e-05,
+ "loss": 0.9461,
+ "step": 1669
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9574709930547879,
+ "learning_rate": 1.5869278063009602e-05,
+ "loss": 0.9056,
+ "step": 1670
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9897335453489471,
+ "learning_rate": 1.586423118503638e-05,
+ "loss": 0.9442,
+ "step": 1671
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 1.0220996200971046,
+ "learning_rate": 1.585918202946334e-05,
+ "loss": 0.9034,
+ "step": 1672
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9193755218614106,
+ "learning_rate": 1.5854130598251514e-05,
+ "loss": 0.9581,
+ "step": 1673
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9870346970797649,
+ "learning_rate": 1.5849076893362822e-05,
+ "loss": 0.9264,
+ "step": 1674
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 1.039457898673744,
+ "learning_rate": 1.584402091676006e-05,
+ "loss": 0.9098,
+ "step": 1675
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 1.1401573467226491,
+ "learning_rate": 1.5838962670406918e-05,
+ "loss": 1.0577,
+ "step": 1676
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.894286038150505,
+ "learning_rate": 1.5833902156267956e-05,
+ "loss": 0.8931,
+ "step": 1677
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9652079324928932,
+ "learning_rate": 1.582883937630862e-05,
+ "loss": 1.0096,
+ "step": 1678
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9401791514652397,
+ "learning_rate": 1.5823774332495236e-05,
+ "loss": 0.9264,
+ "step": 1679
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9620125826617901,
+ "learning_rate": 1.581870702679501e-05,
+ "loss": 0.9533,
+ "step": 1680
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9589747071021635,
+ "learning_rate": 1.581363746117602e-05,
+ "loss": 0.9813,
+ "step": 1681
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.919466288168128,
+ "learning_rate": 1.580856563760724e-05,
+ "loss": 0.9512,
+ "step": 1682
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9267699720189961,
+ "learning_rate": 1.5803491558058486e-05,
+ "loss": 0.9616,
+ "step": 1683
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9352422133052664,
+ "learning_rate": 1.579841522450049e-05,
+ "loss": 0.9843,
+ "step": 1684
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9370435738627757,
+ "learning_rate": 1.5793336638904838e-05,
+ "loss": 0.912,
+ "step": 1685
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9332076370582065,
+ "learning_rate": 1.578825580324399e-05,
+ "loss": 0.9923,
+ "step": 1686
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.951260585951387,
+ "learning_rate": 1.5783172719491288e-05,
+ "loss": 0.9523,
+ "step": 1687
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.7654624186718446,
+ "learning_rate": 1.577808738962094e-05,
+ "loss": 0.9168,
+ "step": 1688
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.8394142240439104,
+ "learning_rate": 1.577299981560803e-05,
+ "loss": 0.9731,
+ "step": 1689
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9373661181598301,
+ "learning_rate": 1.5767909999428513e-05,
+ "loss": 0.9761,
+ "step": 1690
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9951245389073456,
+ "learning_rate": 1.576281794305922e-05,
+ "loss": 0.9156,
+ "step": 1691
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9027336014340304,
+ "learning_rate": 1.575772364847784e-05,
+ "loss": 0.9491,
+ "step": 1692
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.919806862989453,
+ "learning_rate": 1.575262711766294e-05,
+ "loss": 0.9288,
+ "step": 1693
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.922166860727834,
+ "learning_rate": 1.5747528352593956e-05,
+ "loss": 0.9126,
+ "step": 1694
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.758448753362842,
+ "learning_rate": 1.574242735525119e-05,
+ "loss": 0.8826,
+ "step": 1695
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.0925031705747983,
+ "learning_rate": 1.5737324127615808e-05,
+ "loss": 0.9526,
+ "step": 1696
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9315342222587545,
+ "learning_rate": 1.5732218671669847e-05,
+ "loss": 0.9478,
+ "step": 1697
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.8836753853774646,
+ "learning_rate": 1.5727110989396205e-05,
+ "loss": 0.9345,
+ "step": 1698
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.0748642897816478,
+ "learning_rate": 1.5722001082778645e-05,
+ "loss": 1.019,
+ "step": 1699
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.1020416653196514,
+ "learning_rate": 1.5716888953801805e-05,
+ "loss": 1.0358,
+ "step": 1700
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9173402686748258,
+ "learning_rate": 1.5711774604451168e-05,
+ "loss": 0.9385,
+ "step": 1701
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9366699045544487,
+ "learning_rate": 1.5706658036713093e-05,
+ "loss": 0.943,
+ "step": 1702
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.7891702619702629,
+ "learning_rate": 1.5701539252574795e-05,
+ "loss": 0.8825,
+ "step": 1703
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.017602644064826,
+ "learning_rate": 1.5696418254024344e-05,
+ "loss": 0.8916,
+ "step": 1704
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.3463408000185373,
+ "learning_rate": 1.569129504305069e-05,
+ "loss": 1.0137,
+ "step": 1705
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.8712942647447294,
+ "learning_rate": 1.568616962164362e-05,
+ "loss": 0.9353,
+ "step": 1706
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.8650729231108287,
+ "learning_rate": 1.5681041991793788e-05,
+ "loss": 0.9479,
+ "step": 1707
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9822409711635433,
+ "learning_rate": 1.567591215549271e-05,
+ "loss": 0.9564,
+ "step": 1708
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.8814317638018199,
+ "learning_rate": 1.567078011473276e-05,
+ "loss": 0.9055,
+ "step": 1709
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.1831561052929551,
+ "learning_rate": 1.5665645871507152e-05,
+ "loss": 0.9414,
+ "step": 1710
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9461182275489118,
+ "learning_rate": 1.5660509427809973e-05,
+ "loss": 0.8379,
+ "step": 1711
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.8260539390039969,
+ "learning_rate": 1.565537078563616e-05,
+ "loss": 0.8412,
+ "step": 1712
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9858948315309966,
+ "learning_rate": 1.56502299469815e-05,
+ "loss": 0.8946,
+ "step": 1713
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.0240841326059864,
+ "learning_rate": 1.564508691384264e-05,
+ "loss": 0.9578,
+ "step": 1714
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.7651010798958877,
+ "learning_rate": 1.5639941688217063e-05,
+ "loss": 0.8796,
+ "step": 1715
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.1230359951819133,
+ "learning_rate": 1.5634794272103126e-05,
+ "loss": 1.0366,
+ "step": 1716
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.935980380559438,
+ "learning_rate": 1.562964466750003e-05,
+ "loss": 0.9666,
+ "step": 1717
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9957875414558593,
+ "learning_rate": 1.562449287640781e-05,
+ "loss": 0.9951,
+ "step": 1718
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.8744195717038817,
+ "learning_rate": 1.5619338900827368e-05,
+ "loss": 0.8881,
+ "step": 1719
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.123415163024355,
+ "learning_rate": 1.5614182742760448e-05,
+ "loss": 0.9967,
+ "step": 1720
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.859045065460368,
+ "learning_rate": 1.5609024404209643e-05,
+ "loss": 0.9039,
+ "step": 1721
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9674654018347075,
+ "learning_rate": 1.5603863887178393e-05,
+ "loss": 0.9268,
+ "step": 1722
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9149679270302562,
+ "learning_rate": 1.5598701193670983e-05,
+ "loss": 0.9366,
+ "step": 1723
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9167962507331943,
+ "learning_rate": 1.559353632569254e-05,
+ "loss": 1.0223,
+ "step": 1724
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9813654701057842,
+ "learning_rate": 1.5588369285249048e-05,
+ "loss": 0.9668,
+ "step": 1725
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9413559871033231,
+ "learning_rate": 1.5583200074347318e-05,
+ "loss": 0.9297,
+ "step": 1726
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9073621845606187,
+ "learning_rate": 1.557802869499501e-05,
+ "loss": 0.9528,
+ "step": 1727
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.2468338184260404,
+ "learning_rate": 1.5572855149200637e-05,
+ "loss": 0.9368,
+ "step": 1728
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9798699092319569,
+ "learning_rate": 1.5567679438973543e-05,
+ "loss": 0.951,
+ "step": 1729
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.8833200037199986,
+ "learning_rate": 1.5562501566323906e-05,
+ "loss": 0.8742,
+ "step": 1730
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9096483030358838,
+ "learning_rate": 1.555732153326276e-05,
+ "loss": 0.9921,
+ "step": 1731
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.0721509786194834,
+ "learning_rate": 1.5552139341801965e-05,
+ "loss": 0.9341,
+ "step": 1732
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9381596829629454,
+ "learning_rate": 1.554695499395423e-05,
+ "loss": 0.9631,
+ "step": 1733
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9777345180892383,
+ "learning_rate": 1.5541768491733092e-05,
+ "loss": 0.9804,
+ "step": 1734
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.0917306506317828,
+ "learning_rate": 1.5536579837152927e-05,
+ "loss": 0.9922,
+ "step": 1735
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.0477642076686153,
+ "learning_rate": 1.5531389032228955e-05,
+ "loss": 1.0333,
+ "step": 1736
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.870623788143449,
+ "learning_rate": 1.552619607897722e-05,
+ "loss": 0.8795,
+ "step": 1737
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.055005728652181,
+ "learning_rate": 1.55210009794146e-05,
+ "loss": 0.9359,
+ "step": 1738
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.8410838945685877,
+ "learning_rate": 1.5515803735558827e-05,
+ "loss": 0.9358,
+ "step": 1739
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.0248185442014413,
+ "learning_rate": 1.5510604349428438e-05,
+ "loss": 0.9276,
+ "step": 1740
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.908309785158247,
+ "learning_rate": 1.550540282304282e-05,
+ "loss": 0.8945,
+ "step": 1741
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9716596627688002,
+ "learning_rate": 1.550019915842218e-05,
+ "loss": 0.9354,
+ "step": 1742
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9215256625889569,
+ "learning_rate": 1.549499335758757e-05,
+ "loss": 0.9204,
+ "step": 1743
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 1.0485240710442505,
+ "learning_rate": 1.548978542256086e-05,
+ "loss": 0.9759,
+ "step": 1744
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.936937516570783,
+ "learning_rate": 1.5484575355364744e-05,
+ "loss": 0.903,
+ "step": 1745
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8799230397541101,
+ "learning_rate": 1.5479363158022763e-05,
+ "loss": 0.9482,
+ "step": 1746
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9926020716383587,
+ "learning_rate": 1.547414883255927e-05,
+ "loss": 1.0108,
+ "step": 1747
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 1.0724867931817301,
+ "learning_rate": 1.546893238099945e-05,
+ "loss": 0.9212,
+ "step": 1748
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9441285736579836,
+ "learning_rate": 1.5463713805369312e-05,
+ "loss": 0.974,
+ "step": 1749
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.921483222049883,
+ "learning_rate": 1.5458493107695688e-05,
+ "loss": 0.951,
+ "step": 1750
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.960342092289876,
+ "learning_rate": 1.5453270290006237e-05,
+ "loss": 0.9335,
+ "step": 1751
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9828879812954129,
+ "learning_rate": 1.544804535432945e-05,
+ "loss": 0.9867,
+ "step": 1752
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8234945392114452,
+ "learning_rate": 1.544281830269462e-05,
+ "loss": 0.8914,
+ "step": 1753
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 1.0000826402879177,
+ "learning_rate": 1.5437589137131882e-05,
+ "loss": 0.9773,
+ "step": 1754
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 1.0033786882011886,
+ "learning_rate": 1.5432357859672177e-05,
+ "loss": 0.9349,
+ "step": 1755
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.7510089365029284,
+ "learning_rate": 1.542712447234728e-05,
+ "loss": 0.8271,
+ "step": 1756
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8384109135762632,
+ "learning_rate": 1.542188897718977e-05,
+ "loss": 0.9021,
+ "step": 1757
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 1.0363702461555846,
+ "learning_rate": 1.5416651376233062e-05,
+ "loss": 0.9671,
+ "step": 1758
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8585982568857289,
+ "learning_rate": 1.5411411671511376e-05,
+ "loss": 0.8992,
+ "step": 1759
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8840105709138143,
+ "learning_rate": 1.5406169865059747e-05,
+ "loss": 1.0145,
+ "step": 1760
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9339167697478992,
+ "learning_rate": 1.5400925958914045e-05,
+ "loss": 0.8929,
+ "step": 1761
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 1.1207516317267792,
+ "learning_rate": 1.5395679955110927e-05,
+ "loss": 1.0126,
+ "step": 1762
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 1.0200583417237226,
+ "learning_rate": 1.53904318556879e-05,
+ "loss": 0.9466,
+ "step": 1763
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9513314620444498,
+ "learning_rate": 1.5385181662683244e-05,
+ "loss": 0.8953,
+ "step": 1764
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9586741531772605,
+ "learning_rate": 1.5379929378136088e-05,
+ "loss": 0.9473,
+ "step": 1765
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9022331216422342,
+ "learning_rate": 1.5374675004086353e-05,
+ "loss": 0.9663,
+ "step": 1766
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8490262990446552,
+ "learning_rate": 1.5369418542574782e-05,
+ "loss": 0.8788,
+ "step": 1767
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8058145521238111,
+ "learning_rate": 1.536415999564292e-05,
+ "loss": 0.8929,
+ "step": 1768
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8345851041186947,
+ "learning_rate": 1.5358899365333123e-05,
+ "loss": 0.9236,
+ "step": 1769
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.826077318304091,
+ "learning_rate": 1.5353636653688563e-05,
+ "loss": 0.8243,
+ "step": 1770
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9184599602068002,
+ "learning_rate": 1.534837186275322e-05,
+ "loss": 0.9559,
+ "step": 1771
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.863563339396089,
+ "learning_rate": 1.5343104994571877e-05,
+ "loss": 0.8943,
+ "step": 1772
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9687564666016926,
+ "learning_rate": 1.533783605119012e-05,
+ "loss": 1.0246,
+ "step": 1773
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8016700165126284,
+ "learning_rate": 1.5332565034654344e-05,
+ "loss": 0.779,
+ "step": 1774
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9272282536162947,
+ "learning_rate": 1.5327291947011763e-05,
+ "loss": 0.9734,
+ "step": 1775
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 1.0683084960482627,
+ "learning_rate": 1.5322016790310373e-05,
+ "loss": 0.9624,
+ "step": 1776
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8605987032033153,
+ "learning_rate": 1.5316739566598985e-05,
+ "loss": 0.9616,
+ "step": 1777
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8935171980628035,
+ "learning_rate": 1.531146027792722e-05,
+ "loss": 0.9985,
+ "step": 1778
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8958522226373784,
+ "learning_rate": 1.530617892634548e-05,
+ "loss": 0.9093,
+ "step": 1779
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9414368976653644,
+ "learning_rate": 1.5300895513904993e-05,
+ "loss": 1.0025,
+ "step": 1780
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8947294596843949,
+ "learning_rate": 1.529561004265777e-05,
+ "loss": 0.9433,
+ "step": 1781
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9481206915774962,
+ "learning_rate": 1.5290322514656624e-05,
+ "loss": 0.9654,
+ "step": 1782
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.7649948174764278,
+ "learning_rate": 1.5285032931955177e-05,
+ "loss": 0.7975,
+ "step": 1783
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9290340911306668,
+ "learning_rate": 1.527974129660784e-05,
+ "loss": 0.8933,
+ "step": 1784
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9044986483269645,
+ "learning_rate": 1.527444761066982e-05,
+ "loss": 0.9713,
+ "step": 1785
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9253380811005857,
+ "learning_rate": 1.5269151876197127e-05,
+ "loss": 0.9433,
+ "step": 1786
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8757542968221452,
+ "learning_rate": 1.5263854095246557e-05,
+ "loss": 0.8957,
+ "step": 1787
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.7785950358937312,
+ "learning_rate": 1.5258554269875716e-05,
+ "loss": 0.8482,
+ "step": 1788
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8628071590588661,
+ "learning_rate": 1.5253252402142989e-05,
+ "loss": 0.9646,
+ "step": 1789
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.940808457354721,
+ "learning_rate": 1.5247948494107566e-05,
+ "loss": 0.9177,
+ "step": 1790
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9681807352846368,
+ "learning_rate": 1.5242642547829416e-05,
+ "loss": 0.9723,
+ "step": 1791
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.821825971484946,
+ "learning_rate": 1.523733456536931e-05,
+ "loss": 0.9946,
+ "step": 1792
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 1.1533371339083218,
+ "learning_rate": 1.5232024548788813e-05,
+ "loss": 0.9811,
+ "step": 1793
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.9144868418475506,
+ "learning_rate": 1.5226712500150267e-05,
+ "loss": 0.8728,
+ "step": 1794
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.944671776521524,
+ "learning_rate": 1.5221398421516816e-05,
+ "loss": 1.0094,
+ "step": 1795
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.7670008530648152,
+ "learning_rate": 1.5216082314952383e-05,
+ "loss": 0.9459,
+ "step": 1796
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.9327346622916476,
+ "learning_rate": 1.521076418252168e-05,
+ "loss": 0.9518,
+ "step": 1797
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8943519516706805,
+ "learning_rate": 1.5205444026290218e-05,
+ "loss": 0.9016,
+ "step": 1798
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.836843296484399,
+ "learning_rate": 1.5200121848324276e-05,
+ "loss": 0.9211,
+ "step": 1799
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 1.0837447047544206,
+ "learning_rate": 1.5194797650690926e-05,
+ "loss": 0.9503,
+ "step": 1800
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.866782774054129,
+ "learning_rate": 1.5189471435458032e-05,
+ "loss": 0.8956,
+ "step": 1801
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 1.0952009207818028,
+ "learning_rate": 1.5184143204694231e-05,
+ "loss": 0.9741,
+ "step": 1802
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8934284107949934,
+ "learning_rate": 1.5178812960468945e-05,
+ "loss": 0.9812,
+ "step": 1803
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.9055985900214036,
+ "learning_rate": 1.5173480704852379e-05,
+ "loss": 0.98,
+ "step": 1804
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 1.0521748629311196,
+ "learning_rate": 1.5168146439915525e-05,
+ "loss": 0.9679,
+ "step": 1805
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8971230691493547,
+ "learning_rate": 1.5162810167730144e-05,
+ "loss": 0.9648,
+ "step": 1806
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.7884612665388886,
+ "learning_rate": 1.5157471890368785e-05,
+ "loss": 0.8784,
+ "step": 1807
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.9183625147776798,
+ "learning_rate": 1.5152131609904773e-05,
+ "loss": 0.9053,
+ "step": 1808
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 1.331903266211853,
+ "learning_rate": 1.5146789328412213e-05,
+ "loss": 0.928,
+ "step": 1809
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8890745911546998,
+ "learning_rate": 1.5141445047965984e-05,
+ "loss": 1.0026,
+ "step": 1810
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8178399292146777,
+ "learning_rate": 1.5136098770641741e-05,
+ "loss": 0.9229,
+ "step": 1811
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.7975421469547915,
+ "learning_rate": 1.513075049851592e-05,
+ "loss": 0.8221,
+ "step": 1812
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.7856140415304413,
+ "learning_rate": 1.5125400233665728e-05,
+ "loss": 0.8835,
+ "step": 1813
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.9212337717189589,
+ "learning_rate": 1.5120047978169146e-05,
+ "loss": 0.8905,
+ "step": 1814
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.9664484359458227,
+ "learning_rate": 1.5114693734104926e-05,
+ "loss": 0.9473,
+ "step": 1815
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.901758224483286,
+ "learning_rate": 1.5109337503552594e-05,
+ "loss": 0.9158,
+ "step": 1816
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.9341148763526235,
+ "learning_rate": 1.5103979288592454e-05,
+ "loss": 0.9773,
+ "step": 1817
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 1.062966710994553,
+ "learning_rate": 1.5098619091305571e-05,
+ "loss": 0.9751,
+ "step": 1818
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.9580639512609573,
+ "learning_rate": 1.5093256913773786e-05,
+ "loss": 0.95,
+ "step": 1819
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.9326599922642728,
+ "learning_rate": 1.50878927580797e-05,
+ "loss": 1.0127,
+ "step": 1820
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.9152850976254324,
+ "learning_rate": 1.5082526626306698e-05,
+ "loss": 0.9637,
+ "step": 1821
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.9167252904757152,
+ "learning_rate": 1.5077158520538921e-05,
+ "loss": 0.9266,
+ "step": 1822
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8893893633564427,
+ "learning_rate": 1.5071788442861277e-05,
+ "loss": 0.9904,
+ "step": 1823
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8546769886453603,
+ "learning_rate": 1.5066416395359444e-05,
+ "loss": 1.0025,
+ "step": 1824
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8742731068086662,
+ "learning_rate": 1.5061042380119864e-05,
+ "loss": 0.8514,
+ "step": 1825
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.9191629656740666,
+ "learning_rate": 1.5055666399229743e-05,
+ "loss": 0.9986,
+ "step": 1826
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.864915264868844,
+ "learning_rate": 1.5050288454777047e-05,
+ "loss": 0.9264,
+ "step": 1827
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8898894014288737,
+ "learning_rate": 1.504490854885051e-05,
+ "loss": 1.0025,
+ "step": 1828
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8919176795271149,
+ "learning_rate": 1.5039526683539627e-05,
+ "loss": 0.9007,
+ "step": 1829
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8326676139129455,
+ "learning_rate": 1.5034142860934649e-05,
+ "loss": 0.838,
+ "step": 1830
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 1.0331897953323774,
+ "learning_rate": 1.5028757083126594e-05,
+ "loss": 0.9448,
+ "step": 1831
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8882070918904326,
+ "learning_rate": 1.5023369352207229e-05,
+ "loss": 1.02,
+ "step": 1832
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.7227145232727819,
+ "learning_rate": 1.5017979670269096e-05,
+ "loss": 0.9057,
+ "step": 1833
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.9827321389264827,
+ "learning_rate": 1.501258803940548e-05,
+ "loss": 0.9532,
+ "step": 1834
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.7656284602546837,
+ "learning_rate": 1.500719446171043e-05,
+ "loss": 0.8359,
+ "step": 1835
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8720059317674618,
+ "learning_rate": 1.500179893927875e-05,
+ "loss": 0.8801,
+ "step": 1836
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 1.1718128370645895,
+ "learning_rate": 1.4996401474205997e-05,
+ "loss": 0.9533,
+ "step": 1837
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8427219416576509,
+ "learning_rate": 1.4991002068588484e-05,
+ "loss": 0.8424,
+ "step": 1838
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8372703249382428,
+ "learning_rate": 1.4985600724523282e-05,
+ "loss": 0.9005,
+ "step": 1839
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8579908451353849,
+ "learning_rate": 1.4980197444108205e-05,
+ "loss": 0.9429,
+ "step": 1840
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 1.1220620242593762,
+ "learning_rate": 1.4974792229441826e-05,
+ "loss": 0.9728,
+ "step": 1841
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.7949552663730435,
+ "learning_rate": 1.4969385082623473e-05,
+ "loss": 0.8879,
+ "step": 1842
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8958167476098237,
+ "learning_rate": 1.4963976005753216e-05,
+ "loss": 0.9128,
+ "step": 1843
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.9033673883749678,
+ "learning_rate": 1.4958565000931877e-05,
+ "loss": 0.9956,
+ "step": 1844
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 1.0174307423574056,
+ "learning_rate": 1.4953152070261027e-05,
+ "loss": 0.9825,
+ "step": 1845
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8964749493654028,
+ "learning_rate": 1.494773721584299e-05,
+ "loss": 0.985,
+ "step": 1846
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 1.0439340860246706,
+ "learning_rate": 1.4942320439780833e-05,
+ "loss": 0.9507,
+ "step": 1847
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8641536189166213,
+ "learning_rate": 1.4936901744178367e-05,
+ "loss": 0.925,
+ "step": 1848
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.6972359878388217,
+ "learning_rate": 1.4931481131140149e-05,
+ "loss": 0.7657,
+ "step": 1849
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 1.2185536373113726,
+ "learning_rate": 1.4926058602771484e-05,
+ "loss": 0.9898,
+ "step": 1850
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.760487348808859,
+ "learning_rate": 1.4920634161178424e-05,
+ "loss": 0.8861,
+ "step": 1851
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9047981685612663,
+ "learning_rate": 1.4915207808467756e-05,
+ "loss": 0.9518,
+ "step": 1852
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8646224202452631,
+ "learning_rate": 1.4909779546747011e-05,
+ "loss": 0.9563,
+ "step": 1853
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8897413974385131,
+ "learning_rate": 1.4904349378124467e-05,
+ "loss": 0.9682,
+ "step": 1854
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9572314021465514,
+ "learning_rate": 1.489891730470914e-05,
+ "loss": 0.9532,
+ "step": 1855
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8679164618142823,
+ "learning_rate": 1.4893483328610778e-05,
+ "loss": 0.9026,
+ "step": 1856
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 1.1461550220832444,
+ "learning_rate": 1.488804745193988e-05,
+ "loss": 0.9126,
+ "step": 1857
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8775301602086298,
+ "learning_rate": 1.4882609676807675e-05,
+ "loss": 0.9167,
+ "step": 1858
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 1.0076946607347246,
+ "learning_rate": 1.4877170005326136e-05,
+ "loss": 0.9368,
+ "step": 1859
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9608832261364295,
+ "learning_rate": 1.4871728439607967e-05,
+ "loss": 0.9469,
+ "step": 1860
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 1.1218595981287796,
+ "learning_rate": 1.4866284981766607e-05,
+ "loss": 0.9426,
+ "step": 1861
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.912410310177432,
+ "learning_rate": 1.4860839633916236e-05,
+ "loss": 0.9367,
+ "step": 1862
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9675360940968317,
+ "learning_rate": 1.4855392398171762e-05,
+ "loss": 0.963,
+ "step": 1863
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9692011340827513,
+ "learning_rate": 1.484994327664883e-05,
+ "loss": 0.9727,
+ "step": 1864
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8849105582044469,
+ "learning_rate": 1.4844492271463814e-05,
+ "loss": 0.921,
+ "step": 1865
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 1.068024996188178,
+ "learning_rate": 1.4839039384733821e-05,
+ "loss": 0.9958,
+ "step": 1866
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.862400497555066,
+ "learning_rate": 1.4833584618576695e-05,
+ "loss": 0.8949,
+ "step": 1867
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9514879455715923,
+ "learning_rate": 1.4828127975111e-05,
+ "loss": 1.0166,
+ "step": 1868
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.7556953785166127,
+ "learning_rate": 1.4822669456456031e-05,
+ "loss": 0.9001,
+ "step": 1869
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8476460852870521,
+ "learning_rate": 1.4817209064731819e-05,
+ "loss": 0.9309,
+ "step": 1870
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.7924200952817001,
+ "learning_rate": 1.4811746802059115e-05,
+ "loss": 0.8525,
+ "step": 1871
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9220226215613513,
+ "learning_rate": 1.48062826705594e-05,
+ "loss": 0.9904,
+ "step": 1872
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8424416831984529,
+ "learning_rate": 1.4800816672354876e-05,
+ "loss": 0.9067,
+ "step": 1873
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8755187086763289,
+ "learning_rate": 1.4795348809568477e-05,
+ "loss": 0.9751,
+ "step": 1874
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8876459553345205,
+ "learning_rate": 1.4789879084323858e-05,
+ "loss": 0.8903,
+ "step": 1875
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8531474128314328,
+ "learning_rate": 1.4784407498745394e-05,
+ "loss": 0.9167,
+ "step": 1876
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9237689557291372,
+ "learning_rate": 1.477893405495819e-05,
+ "loss": 0.9348,
+ "step": 1877
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8840559268033596,
+ "learning_rate": 1.4773458755088068e-05,
+ "loss": 0.908,
+ "step": 1878
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 1.01725625317237,
+ "learning_rate": 1.4767981601261567e-05,
+ "loss": 0.9485,
+ "step": 1879
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8884451600536032,
+ "learning_rate": 1.4762502595605957e-05,
+ "loss": 0.9618,
+ "step": 1880
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9167076330508916,
+ "learning_rate": 1.4757021740249213e-05,
+ "loss": 0.9419,
+ "step": 1881
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9094547219117403,
+ "learning_rate": 1.4751539037320044e-05,
+ "loss": 0.9002,
+ "step": 1882
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.7609836642879874,
+ "learning_rate": 1.4746054488947863e-05,
+ "loss": 0.852,
+ "step": 1883
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.844623919132773,
+ "learning_rate": 1.4740568097262811e-05,
+ "loss": 0.9807,
+ "step": 1884
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9576966050485445,
+ "learning_rate": 1.473507986439573e-05,
+ "loss": 0.9275,
+ "step": 1885
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9913526844748883,
+ "learning_rate": 1.4729589792478193e-05,
+ "loss": 0.986,
+ "step": 1886
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8390399493507212,
+ "learning_rate": 1.4724097883642482e-05,
+ "loss": 0.9242,
+ "step": 1887
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9453551353246631,
+ "learning_rate": 1.4718604140021588e-05,
+ "loss": 0.9209,
+ "step": 1888
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8644027160141361,
+ "learning_rate": 1.471310856374922e-05,
+ "loss": 0.8872,
+ "step": 1889
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8697883635399205,
+ "learning_rate": 1.470761115695979e-05,
+ "loss": 0.9393,
+ "step": 1890
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.898613074240735,
+ "learning_rate": 1.4702111921788437e-05,
+ "loss": 0.9549,
+ "step": 1891
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9366020106528409,
+ "learning_rate": 1.4696610860370997e-05,
+ "loss": 0.912,
+ "step": 1892
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9502018485280642,
+ "learning_rate": 1.4691107974844015e-05,
+ "loss": 1.0275,
+ "step": 1893
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 1.006905999368359,
+ "learning_rate": 1.468560326734475e-05,
+ "loss": 0.9756,
+ "step": 1894
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9537657347541025,
+ "learning_rate": 1.4680096740011172e-05,
+ "loss": 0.917,
+ "step": 1895
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9985993297073632,
+ "learning_rate": 1.4674588394981948e-05,
+ "loss": 0.9081,
+ "step": 1896
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9431546445191886,
+ "learning_rate": 1.4669078234396454e-05,
+ "loss": 0.9207,
+ "step": 1897
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.8773297684382142,
+ "learning_rate": 1.4663566260394775e-05,
+ "loss": 0.9485,
+ "step": 1898
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.8740250009384237,
+ "learning_rate": 1.4658052475117704e-05,
+ "loss": 0.8924,
+ "step": 1899
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9815806454494395,
+ "learning_rate": 1.4652536880706723e-05,
+ "loss": 0.9698,
+ "step": 1900
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9486474181443565,
+ "learning_rate": 1.4647019479304028e-05,
+ "loss": 0.9345,
+ "step": 1901
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9601631197817476,
+ "learning_rate": 1.4641500273052516e-05,
+ "loss": 0.9815,
+ "step": 1902
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.7483636069441965,
+ "learning_rate": 1.463597926409578e-05,
+ "loss": 0.8775,
+ "step": 1903
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.7654916172107221,
+ "learning_rate": 1.4630456454578122e-05,
+ "loss": 0.8878,
+ "step": 1904
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 1.013322787317673,
+ "learning_rate": 1.462493184664453e-05,
+ "loss": 0.9808,
+ "step": 1905
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.796514685013387,
+ "learning_rate": 1.4619405442440702e-05,
+ "loss": 0.8519,
+ "step": 1906
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9158301962619945,
+ "learning_rate": 1.4613877244113033e-05,
+ "loss": 0.965,
+ "step": 1907
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.8941717164503102,
+ "learning_rate": 1.4608347253808605e-05,
+ "loss": 0.9278,
+ "step": 1908
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9260085549648269,
+ "learning_rate": 1.460281547367521e-05,
+ "loss": 0.9213,
+ "step": 1909
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 1.0249095710219696,
+ "learning_rate": 1.4597281905861318e-05,
+ "loss": 0.9649,
+ "step": 1910
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.8800021703463716,
+ "learning_rate": 1.4591746552516109e-05,
+ "loss": 0.9598,
+ "step": 1911
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9022079788538561,
+ "learning_rate": 1.4586209415789452e-05,
+ "loss": 0.9409,
+ "step": 1912
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9166074557382154,
+ "learning_rate": 1.4580670497831904e-05,
+ "loss": 0.9037,
+ "step": 1913
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.8184827726326348,
+ "learning_rate": 1.4575129800794718e-05,
+ "loss": 0.8209,
+ "step": 1914
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.979929768624538,
+ "learning_rate": 1.4569587326829834e-05,
+ "loss": 0.9214,
+ "step": 1915
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 1.0725457211273963,
+ "learning_rate": 1.4564043078089891e-05,
+ "loss": 1.0183,
+ "step": 1916
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9438217707664711,
+ "learning_rate": 1.4558497056728205e-05,
+ "loss": 0.9136,
+ "step": 1917
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.7603743013151304,
+ "learning_rate": 1.4552949264898795e-05,
+ "loss": 0.8404,
+ "step": 1918
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.6976747074120535,
+ "learning_rate": 1.4547399704756348e-05,
+ "loss": 0.8418,
+ "step": 1919
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.8805257176947605,
+ "learning_rate": 1.4541848378456255e-05,
+ "loss": 0.8595,
+ "step": 1920
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9905031655779478,
+ "learning_rate": 1.4536295288154594e-05,
+ "loss": 0.9428,
+ "step": 1921
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 1.0733219660298396,
+ "learning_rate": 1.4530740436008111e-05,
+ "loss": 1.0012,
+ "step": 1922
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 1.1315056201173224,
+ "learning_rate": 1.452518382417425e-05,
+ "loss": 0.9555,
+ "step": 1923
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9755225991546073,
+ "learning_rate": 1.4519625454811135e-05,
+ "loss": 1.0104,
+ "step": 1924
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.8039574392836043,
+ "learning_rate": 1.4514065330077575e-05,
+ "loss": 0.8842,
+ "step": 1925
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 1.2298128351522584,
+ "learning_rate": 1.4508503452133053e-05,
+ "loss": 0.9915,
+ "step": 1926
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9475187118391011,
+ "learning_rate": 1.4502939823137744e-05,
+ "loss": 0.9914,
+ "step": 1927
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 1.0238418500361268,
+ "learning_rate": 1.4497374445252496e-05,
+ "loss": 0.9668,
+ "step": 1928
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.8879339186763638,
+ "learning_rate": 1.4491807320638835e-05,
+ "loss": 0.9628,
+ "step": 1929
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 1.043255910202892,
+ "learning_rate": 1.4486238451458972e-05,
+ "loss": 0.9657,
+ "step": 1930
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.88146449315845,
+ "learning_rate": 1.4480667839875786e-05,
+ "loss": 0.9241,
+ "step": 1931
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 1.0097402614639863,
+ "learning_rate": 1.4475095488052843e-05,
+ "loss": 0.9725,
+ "step": 1932
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9262476296618154,
+ "learning_rate": 1.4469521398154381e-05,
+ "loss": 0.9889,
+ "step": 1933
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.792777027274484,
+ "learning_rate": 1.4463945572345308e-05,
+ "loss": 0.8819,
+ "step": 1934
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.805465899141217,
+ "learning_rate": 1.4458368012791213e-05,
+ "loss": 0.9014,
+ "step": 1935
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.789099481307358,
+ "learning_rate": 1.4452788721658355e-05,
+ "loss": 0.8989,
+ "step": 1936
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.8892189160433801,
+ "learning_rate": 1.4447207701113669e-05,
+ "loss": 0.8155,
+ "step": 1937
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.982443124122306,
+ "learning_rate": 1.4441624953324755e-05,
+ "loss": 0.9903,
+ "step": 1938
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9249251852617494,
+ "learning_rate": 1.4436040480459891e-05,
+ "loss": 0.9345,
+ "step": 1939
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9009637299071784,
+ "learning_rate": 1.443045428468802e-05,
+ "loss": 0.9165,
+ "step": 1940
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.8723614961861461,
+ "learning_rate": 1.4424866368178761e-05,
+ "loss": 0.9423,
+ "step": 1941
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.8112332828650175,
+ "learning_rate": 1.441927673310239e-05,
+ "loss": 0.92,
+ "step": 1942
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 1.0563803958747677,
+ "learning_rate": 1.4413685381629855e-05,
+ "loss": 0.9881,
+ "step": 1943
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.94837879715281,
+ "learning_rate": 1.440809231593278e-05,
+ "loss": 0.9337,
+ "step": 1944
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 1.1196342163276105,
+ "learning_rate": 1.4402497538183444e-05,
+ "loss": 0.9563,
+ "step": 1945
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.8559212941022728,
+ "learning_rate": 1.4396901050554794e-05,
+ "loss": 0.8536,
+ "step": 1946
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.870399943587896,
+ "learning_rate": 1.4391302855220442e-05,
+ "loss": 0.9491,
+ "step": 1947
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9054637858503229,
+ "learning_rate": 1.4385702954354662e-05,
+ "loss": 0.8666,
+ "step": 1948
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.8830062987912204,
+ "learning_rate": 1.438010135013239e-05,
+ "loss": 0.9563,
+ "step": 1949
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 1.0307927102146766,
+ "learning_rate": 1.4374498044729225e-05,
+ "loss": 0.9575,
+ "step": 1950
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.9069737440389796,
+ "learning_rate": 1.4368893040321428e-05,
+ "loss": 0.9934,
+ "step": 1951
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.7924465455993395,
+ "learning_rate": 1.4363286339085915e-05,
+ "loss": 0.9049,
+ "step": 1952
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.9947902888414152,
+ "learning_rate": 1.435767794320027e-05,
+ "loss": 0.9646,
+ "step": 1953
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8787748633302995,
+ "learning_rate": 1.4352067854842724e-05,
+ "loss": 0.9081,
+ "step": 1954
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 1.1394231427703922,
+ "learning_rate": 1.434645607619217e-05,
+ "loss": 0.9342,
+ "step": 1955
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 1.1416652137952745,
+ "learning_rate": 1.434084260942816e-05,
+ "loss": 1.044,
+ "step": 1956
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.9074571642867629,
+ "learning_rate": 1.4335227456730902e-05,
+ "loss": 0.9839,
+ "step": 1957
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.9797782216453558,
+ "learning_rate": 1.4329610620281253e-05,
+ "loss": 0.8726,
+ "step": 1958
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 1.0590030903237138,
+ "learning_rate": 1.4323992102260733e-05,
+ "loss": 0.9164,
+ "step": 1959
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8571251524679723,
+ "learning_rate": 1.4318371904851502e-05,
+ "loss": 0.9377,
+ "step": 1960
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8042233182079012,
+ "learning_rate": 1.4312750030236382e-05,
+ "loss": 0.9228,
+ "step": 1961
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.9866237251720887,
+ "learning_rate": 1.4307126480598852e-05,
+ "loss": 0.9879,
+ "step": 1962
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.9499895157449215,
+ "learning_rate": 1.4301501258123024e-05,
+ "loss": 0.8796,
+ "step": 1963
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.9427281586944805,
+ "learning_rate": 1.4295874364993672e-05,
+ "loss": 0.9563,
+ "step": 1964
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.7837490283424455,
+ "learning_rate": 1.4290245803396221e-05,
+ "loss": 0.8618,
+ "step": 1965
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.7615948817822988,
+ "learning_rate": 1.4284615575516737e-05,
+ "loss": 0.9481,
+ "step": 1966
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8733318088442981,
+ "learning_rate": 1.4278983683541934e-05,
+ "loss": 0.944,
+ "step": 1967
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.7888770614382925,
+ "learning_rate": 1.4273350129659173e-05,
+ "loss": 0.8505,
+ "step": 1968
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8577354419612407,
+ "learning_rate": 1.4267714916056465e-05,
+ "loss": 0.9144,
+ "step": 1969
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.7383101646246054,
+ "learning_rate": 1.426207804492246e-05,
+ "loss": 0.8305,
+ "step": 1970
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8379107800195931,
+ "learning_rate": 1.4256439518446456e-05,
+ "loss": 0.9199,
+ "step": 1971
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 1.5425991074310408,
+ "learning_rate": 1.4250799338818388e-05,
+ "loss": 0.9155,
+ "step": 1972
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 1.016735083022846,
+ "learning_rate": 1.424515750822884e-05,
+ "loss": 0.9858,
+ "step": 1973
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.9187727463612595,
+ "learning_rate": 1.4239514028869032e-05,
+ "loss": 0.9916,
+ "step": 1974
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.7273588661442333,
+ "learning_rate": 1.4233868902930827e-05,
+ "loss": 0.8711,
+ "step": 1975
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.958572799520519,
+ "learning_rate": 1.4228222132606729e-05,
+ "loss": 1.0053,
+ "step": 1976
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8312394585884404,
+ "learning_rate": 1.4222573720089874e-05,
+ "loss": 0.8994,
+ "step": 1977
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 1.051870370595863,
+ "learning_rate": 1.4216923667574042e-05,
+ "loss": 0.9951,
+ "step": 1978
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8905620542250453,
+ "learning_rate": 1.4211271977253653e-05,
+ "loss": 0.8816,
+ "step": 1979
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.7939788016861045,
+ "learning_rate": 1.4205618651323753e-05,
+ "loss": 0.9355,
+ "step": 1980
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8282961796402444,
+ "learning_rate": 1.4199963691980027e-05,
+ "loss": 0.922,
+ "step": 1981
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.9062579731138316,
+ "learning_rate": 1.4194307101418805e-05,
+ "loss": 0.971,
+ "step": 1982
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.9736606006616938,
+ "learning_rate": 1.4188648881837033e-05,
+ "loss": 0.8874,
+ "step": 1983
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8743235062321872,
+ "learning_rate": 1.4182989035432299e-05,
+ "loss": 0.8531,
+ "step": 1984
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8742558633785917,
+ "learning_rate": 1.4177327564402825e-05,
+ "loss": 0.9189,
+ "step": 1985
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.9425796036485377,
+ "learning_rate": 1.4171664470947464e-05,
+ "loss": 0.9864,
+ "step": 1986
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 1.1211345516482556,
+ "learning_rate": 1.416599975726569e-05,
+ "loss": 0.9516,
+ "step": 1987
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8755079314807876,
+ "learning_rate": 1.4160333425557616e-05,
+ "loss": 0.9524,
+ "step": 1988
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.9399390059601492,
+ "learning_rate": 1.4154665478023977e-05,
+ "loss": 0.8558,
+ "step": 1989
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 1.0180762048062242,
+ "learning_rate": 1.4148995916866139e-05,
+ "loss": 0.9397,
+ "step": 1990
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.9360742759788145,
+ "learning_rate": 1.41433247442861e-05,
+ "loss": 0.8549,
+ "step": 1991
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.9260178534746369,
+ "learning_rate": 1.4137651962486472e-05,
+ "loss": 0.9218,
+ "step": 1992
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8609444981410015,
+ "learning_rate": 1.4131977573670499e-05,
+ "loss": 0.8997,
+ "step": 1993
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8339600928030163,
+ "learning_rate": 1.412630158004205e-05,
+ "loss": 0.874,
+ "step": 1994
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.7194081356452551,
+ "learning_rate": 1.4120623983805617e-05,
+ "loss": 0.8414,
+ "step": 1995
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8251133134550331,
+ "learning_rate": 1.4114944787166307e-05,
+ "loss": 0.9349,
+ "step": 1996
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8205915117047229,
+ "learning_rate": 1.4109263992329858e-05,
+ "loss": 0.8964,
+ "step": 1997
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8857206237622188,
+ "learning_rate": 1.4103581601502629e-05,
+ "loss": 1.0074,
+ "step": 1998
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8220871065580929,
+ "learning_rate": 1.409789761689159e-05,
+ "loss": 0.885,
+ "step": 1999
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 1.0091905898008606,
+ "learning_rate": 1.4092212040704336e-05,
+ "loss": 0.9856,
+ "step": 2000
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8444063048123951,
+ "learning_rate": 1.408652487514908e-05,
+ "loss": 0.9006,
+ "step": 2001
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 1.0030105126684379,
+ "learning_rate": 1.408083612243465e-05,
+ "loss": 0.9172,
+ "step": 2002
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8580137669521448,
+ "learning_rate": 1.4075145784770496e-05,
+ "loss": 0.89,
+ "step": 2003
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8763817758432053,
+ "learning_rate": 1.4069453864366678e-05,
+ "loss": 0.9573,
+ "step": 2004
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9267136042695975,
+ "learning_rate": 1.4063760363433867e-05,
+ "loss": 0.9176,
+ "step": 2005
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 1.0036559507217695,
+ "learning_rate": 1.405806528418336e-05,
+ "loss": 0.9799,
+ "step": 2006
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 1.100732038831473,
+ "learning_rate": 1.4052368628827057e-05,
+ "loss": 0.8295,
+ "step": 2007
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9240966350209425,
+ "learning_rate": 1.4046670399577478e-05,
+ "loss": 0.9179,
+ "step": 2008
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.847878896021774,
+ "learning_rate": 1.4040970598647742e-05,
+ "loss": 0.9063,
+ "step": 2009
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 1.1571927472095265,
+ "learning_rate": 1.4035269228251589e-05,
+ "loss": 0.9563,
+ "step": 2010
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.961327897267119,
+ "learning_rate": 1.4029566290603368e-05,
+ "loss": 0.9664,
+ "step": 2011
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9656888729639598,
+ "learning_rate": 1.4023861787918031e-05,
+ "loss": 0.9354,
+ "step": 2012
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8360560613851813,
+ "learning_rate": 1.4018155722411144e-05,
+ "loss": 0.904,
+ "step": 2013
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 1.1779022532073358,
+ "learning_rate": 1.4012448096298874e-05,
+ "loss": 1.049,
+ "step": 2014
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8676988199338543,
+ "learning_rate": 1.4006738911798001e-05,
+ "loss": 0.9345,
+ "step": 2015
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.889609653986335,
+ "learning_rate": 1.40010281711259e-05,
+ "loss": 0.935,
+ "step": 2016
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8467068357086099,
+ "learning_rate": 1.3995315876500565e-05,
+ "loss": 0.941,
+ "step": 2017
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.959093215888042,
+ "learning_rate": 1.3989602030140581e-05,
+ "loss": 0.9353,
+ "step": 2018
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9460388212930191,
+ "learning_rate": 1.398388663426514e-05,
+ "loss": 0.9561,
+ "step": 2019
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 1.062637070665528,
+ "learning_rate": 1.3978169691094037e-05,
+ "loss": 0.9985,
+ "step": 2020
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 1.019248969455229,
+ "learning_rate": 1.3972451202847665e-05,
+ "loss": 0.9691,
+ "step": 2021
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9563067223829539,
+ "learning_rate": 1.3966731171747024e-05,
+ "loss": 0.9612,
+ "step": 2022
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8929233715441107,
+ "learning_rate": 1.3961009600013702e-05,
+ "loss": 0.9203,
+ "step": 2023
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8994152635230828,
+ "learning_rate": 1.3955286489869894e-05,
+ "loss": 0.9565,
+ "step": 2024
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9346745860366049,
+ "learning_rate": 1.394956184353839e-05,
+ "loss": 1.018,
+ "step": 2025
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8314785135760687,
+ "learning_rate": 1.3943835663242577e-05,
+ "loss": 0.8875,
+ "step": 2026
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 1.0486948440977388,
+ "learning_rate": 1.3938107951206438e-05,
+ "loss": 0.9506,
+ "step": 2027
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8553903310522047,
+ "learning_rate": 1.3932378709654548e-05,
+ "loss": 0.9638,
+ "step": 2028
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9728607739240488,
+ "learning_rate": 1.3926647940812081e-05,
+ "loss": 0.9155,
+ "step": 2029
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.802412682137892,
+ "learning_rate": 1.39209156469048e-05,
+ "loss": 0.9332,
+ "step": 2030
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 1.0390899410783163,
+ "learning_rate": 1.3915181830159061e-05,
+ "loss": 0.9457,
+ "step": 2031
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9775315836068712,
+ "learning_rate": 1.3909446492801819e-05,
+ "loss": 0.9055,
+ "step": 2032
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8413444570623176,
+ "learning_rate": 1.3903709637060605e-05,
+ "loss": 0.9337,
+ "step": 2033
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 1.1380572872866588,
+ "learning_rate": 1.3897971265163546e-05,
+ "loss": 1.0123,
+ "step": 2034
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8706384708452394,
+ "learning_rate": 1.3892231379339369e-05,
+ "loss": 0.8948,
+ "step": 2035
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8078253574611688,
+ "learning_rate": 1.3886489981817375e-05,
+ "loss": 0.8797,
+ "step": 2036
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9925684455713385,
+ "learning_rate": 1.3880747074827454e-05,
+ "loss": 0.9285,
+ "step": 2037
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9027583259537797,
+ "learning_rate": 1.3875002660600085e-05,
+ "loss": 0.8611,
+ "step": 2038
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.915386451815617,
+ "learning_rate": 1.386925674136634e-05,
+ "loss": 0.9559,
+ "step": 2039
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 1.0154426459142105,
+ "learning_rate": 1.3863509319357857e-05,
+ "loss": 0.9078,
+ "step": 2040
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 1.020893263005494,
+ "learning_rate": 1.3857760396806876e-05,
+ "loss": 0.9636,
+ "step": 2041
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 1.157603908132479,
+ "learning_rate": 1.3852009975946209e-05,
+ "loss": 0.9804,
+ "step": 2042
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9665388234932779,
+ "learning_rate": 1.3846258059009252e-05,
+ "loss": 0.9772,
+ "step": 2043
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 1.0191998266208375,
+ "learning_rate": 1.384050464822999e-05,
+ "loss": 0.9576,
+ "step": 2044
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9211230193790735,
+ "learning_rate": 1.383474974584297e-05,
+ "loss": 0.9601,
+ "step": 2045
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8436625927115899,
+ "learning_rate": 1.3828993354083342e-05,
+ "loss": 0.8874,
+ "step": 2046
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9398468829595837,
+ "learning_rate": 1.3823235475186816e-05,
+ "loss": 0.9378,
+ "step": 2047
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8836232770152602,
+ "learning_rate": 1.3817476111389685e-05,
+ "loss": 0.938,
+ "step": 2048
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8314927195712102,
+ "learning_rate": 1.3811715264928824e-05,
+ "loss": 0.8972,
+ "step": 2049
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9009364299707033,
+ "learning_rate": 1.3805952938041674e-05,
+ "loss": 0.9061,
+ "step": 2050
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9512035371457199,
+ "learning_rate": 1.3800189132966257e-05,
+ "loss": 0.9252,
+ "step": 2051
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9799480066971844,
+ "learning_rate": 1.3794423851941174e-05,
+ "loss": 0.9245,
+ "step": 2052
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.7910507035424716,
+ "learning_rate": 1.378865709720559e-05,
+ "loss": 0.9099,
+ "step": 2053
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 1.028852089793899,
+ "learning_rate": 1.3782888870999245e-05,
+ "loss": 0.9859,
+ "step": 2054
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8370521950540126,
+ "learning_rate": 1.377711917556245e-05,
+ "loss": 0.9183,
+ "step": 2055
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8788384431273036,
+ "learning_rate": 1.3771348013136096e-05,
+ "loss": 0.9893,
+ "step": 2056
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.844453665030592,
+ "learning_rate": 1.3765575385961627e-05,
+ "loss": 0.9731,
+ "step": 2057
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 1.0220205459364426,
+ "learning_rate": 1.3759801296281072e-05,
+ "loss": 0.9872,
+ "step": 2058
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8583372827190888,
+ "learning_rate": 1.3754025746337014e-05,
+ "loss": 0.941,
+ "step": 2059
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8206358732033752,
+ "learning_rate": 1.3748248738372616e-05,
+ "loss": 0.9567,
+ "step": 2060
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8350144985375577,
+ "learning_rate": 1.3742470274631599e-05,
+ "loss": 0.9283,
+ "step": 2061
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 1.0260855545139542,
+ "learning_rate": 1.3736690357358253e-05,
+ "loss": 0.8992,
+ "step": 2062
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8024535547931305,
+ "learning_rate": 1.3730908988797427e-05,
+ "loss": 0.8404,
+ "step": 2063
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 1.0317181085617486,
+ "learning_rate": 1.3725126171194543e-05,
+ "loss": 0.8498,
+ "step": 2064
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 1.0956177656988373,
+ "learning_rate": 1.371934190679558e-05,
+ "loss": 0.9627,
+ "step": 2065
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9402077389190346,
+ "learning_rate": 1.3713556197847076e-05,
+ "loss": 1.0306,
+ "step": 2066
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9560593740330857,
+ "learning_rate": 1.3707769046596136e-05,
+ "loss": 0.8394,
+ "step": 2067
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9762119033421444,
+ "learning_rate": 1.3701980455290425e-05,
+ "loss": 0.9129,
+ "step": 2068
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9462020367011332,
+ "learning_rate": 1.3696190426178162e-05,
+ "loss": 0.9498,
+ "step": 2069
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 1.023089007611191,
+ "learning_rate": 1.3690398961508128e-05,
+ "loss": 1.0076,
+ "step": 2070
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.903313823109592,
+ "learning_rate": 1.3684606063529662e-05,
+ "loss": 0.9683,
+ "step": 2071
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9231262282697168,
+ "learning_rate": 1.3678811734492659e-05,
+ "loss": 0.9101,
+ "step": 2072
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8620923051842395,
+ "learning_rate": 1.367301597664757e-05,
+ "loss": 0.9181,
+ "step": 2073
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9724540203390146,
+ "learning_rate": 1.36672187922454e-05,
+ "loss": 0.9283,
+ "step": 2074
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.87526774540864,
+ "learning_rate": 1.3661420183537705e-05,
+ "loss": 0.9583,
+ "step": 2075
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9936158946801154,
+ "learning_rate": 1.3655620152776605e-05,
+ "loss": 0.9843,
+ "step": 2076
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9374689285771917,
+ "learning_rate": 1.364981870221476e-05,
+ "loss": 0.9579,
+ "step": 2077
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9935733159199066,
+ "learning_rate": 1.364401583410539e-05,
+ "loss": 0.9993,
+ "step": 2078
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8636503646855689,
+ "learning_rate": 1.3638211550702256e-05,
+ "loss": 0.9309,
+ "step": 2079
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9306902626568633,
+ "learning_rate": 1.363240585425968e-05,
+ "loss": 0.9443,
+ "step": 2080
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9328480738047901,
+ "learning_rate": 1.362659874703253e-05,
+ "loss": 1.0248,
+ "step": 2081
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8771738123021174,
+ "learning_rate": 1.3620790231276213e-05,
+ "loss": 0.9057,
+ "step": 2082
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8457797419925539,
+ "learning_rate": 1.3614980309246692e-05,
+ "loss": 0.9175,
+ "step": 2083
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8604423255057652,
+ "learning_rate": 1.3609168983200474e-05,
+ "loss": 0.919,
+ "step": 2084
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.7618132245335117,
+ "learning_rate": 1.3603356255394613e-05,
+ "loss": 0.8441,
+ "step": 2085
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9656290442534817,
+ "learning_rate": 1.3597542128086702e-05,
+ "loss": 0.9738,
+ "step": 2086
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.910210719341017,
+ "learning_rate": 1.3591726603534885e-05,
+ "loss": 0.8867,
+ "step": 2087
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9433476413269557,
+ "learning_rate": 1.3585909683997842e-05,
+ "loss": 0.9897,
+ "step": 2088
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9998913981653302,
+ "learning_rate": 1.3580091371734798e-05,
+ "loss": 0.9552,
+ "step": 2089
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 1.1272893760182217,
+ "learning_rate": 1.357427166900552e-05,
+ "loss": 0.936,
+ "step": 2090
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9695667620659533,
+ "learning_rate": 1.3568450578070309e-05,
+ "loss": 0.9196,
+ "step": 2091
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 1.0563699865753733,
+ "learning_rate": 1.3562628101190015e-05,
+ "loss": 0.9464,
+ "step": 2092
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 1.0512822354617037,
+ "learning_rate": 1.3556804240626019e-05,
+ "loss": 0.8949,
+ "step": 2093
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8641328042351272,
+ "learning_rate": 1.3550978998640241e-05,
+ "loss": 0.8929,
+ "step": 2094
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.7352432934971339,
+ "learning_rate": 1.3545152377495136e-05,
+ "loss": 0.8602,
+ "step": 2095
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 1.0365473591432508,
+ "learning_rate": 1.3539324379453698e-05,
+ "loss": 0.99,
+ "step": 2096
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9630797455063902,
+ "learning_rate": 1.3533495006779455e-05,
+ "loss": 0.9395,
+ "step": 2097
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9157033189133503,
+ "learning_rate": 1.3527664261736471e-05,
+ "loss": 0.9556,
+ "step": 2098
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8763404164147738,
+ "learning_rate": 1.3521832146589335e-05,
+ "loss": 0.9182,
+ "step": 2099
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8671601810426536,
+ "learning_rate": 1.3515998663603174e-05,
+ "loss": 0.9382,
+ "step": 2100
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9911514266710806,
+ "learning_rate": 1.3510163815043647e-05,
+ "loss": 0.916,
+ "step": 2101
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8527537902425154,
+ "learning_rate": 1.3504327603176943e-05,
+ "loss": 0.9124,
+ "step": 2102
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8086932035918405,
+ "learning_rate": 1.3498490030269782e-05,
+ "loss": 0.8575,
+ "step": 2103
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9729506909184018,
+ "learning_rate": 1.3492651098589398e-05,
+ "loss": 0.9846,
+ "step": 2104
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 1.2058711788784828,
+ "learning_rate": 1.3486810810403578e-05,
+ "loss": 1.0487,
+ "step": 2105
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 1.0201880887312547,
+ "learning_rate": 1.348096916798062e-05,
+ "loss": 0.9223,
+ "step": 2106
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8199658744339089,
+ "learning_rate": 1.3475126173589343e-05,
+ "loss": 0.8093,
+ "step": 2107
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9638803506762078,
+ "learning_rate": 1.3469281829499107e-05,
+ "loss": 0.9318,
+ "step": 2108
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8657124745199996,
+ "learning_rate": 1.3463436137979786e-05,
+ "loss": 0.9515,
+ "step": 2109
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9100122378778769,
+ "learning_rate": 1.3457589101301776e-05,
+ "loss": 0.9243,
+ "step": 2110
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 1.0480835696428203,
+ "learning_rate": 1.3451740721736005e-05,
+ "loss": 0.9053,
+ "step": 2111
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.7936569437396148,
+ "learning_rate": 1.3445891001553905e-05,
+ "loss": 0.9174,
+ "step": 2112
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9517063633156221,
+ "learning_rate": 1.3440039943027452e-05,
+ "loss": 0.971,
+ "step": 2113
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9471740203984249,
+ "learning_rate": 1.3434187548429126e-05,
+ "loss": 0.9239,
+ "step": 2114
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 1.1278599771202846,
+ "learning_rate": 1.3428333820031922e-05,
+ "loss": 0.9818,
+ "step": 2115
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8927044567776891,
+ "learning_rate": 1.3422478760109371e-05,
+ "loss": 0.9093,
+ "step": 2116
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8827983163542394,
+ "learning_rate": 1.3416622370935507e-05,
+ "loss": 0.9345,
+ "step": 2117
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9974482146621254,
+ "learning_rate": 1.3410764654784885e-05,
+ "loss": 0.8699,
+ "step": 2118
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8507978660363839,
+ "learning_rate": 1.3404905613932573e-05,
+ "loss": 0.8159,
+ "step": 2119
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9133857728169197,
+ "learning_rate": 1.3399045250654152e-05,
+ "loss": 0.9172,
+ "step": 2120
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8392325672492333,
+ "learning_rate": 1.3393183567225724e-05,
+ "loss": 0.9434,
+ "step": 2121
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9450855945837672,
+ "learning_rate": 1.3387320565923901e-05,
+ "loss": 0.9196,
+ "step": 2122
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9050002995145253,
+ "learning_rate": 1.33814562490258e-05,
+ "loss": 0.9223,
+ "step": 2123
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.896773243860159,
+ "learning_rate": 1.3375590618809056e-05,
+ "loss": 0.9517,
+ "step": 2124
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9605119051546097,
+ "learning_rate": 1.3369723677551813e-05,
+ "loss": 0.9436,
+ "step": 2125
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 1.1686377562910408,
+ "learning_rate": 1.3363855427532724e-05,
+ "loss": 0.8846,
+ "step": 2126
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9386551282224604,
+ "learning_rate": 1.3357985871030948e-05,
+ "loss": 0.8806,
+ "step": 2127
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.851830143766913,
+ "learning_rate": 1.3352115010326155e-05,
+ "loss": 0.9407,
+ "step": 2128
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 1.012671182964821,
+ "learning_rate": 1.3346242847698516e-05,
+ "loss": 0.9655,
+ "step": 2129
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 1.0540874348167801,
+ "learning_rate": 1.3340369385428713e-05,
+ "loss": 0.9399,
+ "step": 2130
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9080794848861694,
+ "learning_rate": 1.3334494625797936e-05,
+ "loss": 0.9469,
+ "step": 2131
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 1.0779542861242648,
+ "learning_rate": 1.3328618571087867e-05,
+ "loss": 0.933,
+ "step": 2132
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9015655214132624,
+ "learning_rate": 1.33227412235807e-05,
+ "loss": 0.9185,
+ "step": 2133
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9254527944107253,
+ "learning_rate": 1.3316862585559132e-05,
+ "loss": 0.9219,
+ "step": 2134
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8634184222934571,
+ "learning_rate": 1.3310982659306352e-05,
+ "loss": 0.9605,
+ "step": 2135
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 1.0732090424668668,
+ "learning_rate": 1.3305101447106064e-05,
+ "loss": 0.9052,
+ "step": 2136
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8289422071911152,
+ "learning_rate": 1.3299218951242456e-05,
+ "loss": 0.9016,
+ "step": 2137
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8212845421733582,
+ "learning_rate": 1.3293335174000226e-05,
+ "loss": 0.9402,
+ "step": 2138
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8696486413548757,
+ "learning_rate": 1.328745011766456e-05,
+ "loss": 0.9575,
+ "step": 2139
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9030138284532585,
+ "learning_rate": 1.3281563784521154e-05,
+ "loss": 0.9651,
+ "step": 2140
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8930054034239019,
+ "learning_rate": 1.3275676176856185e-05,
+ "loss": 0.9363,
+ "step": 2141
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8733947373441526,
+ "learning_rate": 1.3269787296956333e-05,
+ "loss": 0.9801,
+ "step": 2142
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9884894120237259,
+ "learning_rate": 1.3263897147108778e-05,
+ "loss": 0.9387,
+ "step": 2143
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8771721366236639,
+ "learning_rate": 1.3258005729601178e-05,
+ "loss": 0.9025,
+ "step": 2144
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8934162433915248,
+ "learning_rate": 1.3252113046721692e-05,
+ "loss": 0.9227,
+ "step": 2145
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9288113411421657,
+ "learning_rate": 1.3246219100758974e-05,
+ "loss": 0.9579,
+ "step": 2146
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9957768345151621,
+ "learning_rate": 1.3240323894002166e-05,
+ "loss": 0.9727,
+ "step": 2147
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8977422575556423,
+ "learning_rate": 1.3234427428740895e-05,
+ "loss": 0.812,
+ "step": 2148
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8941503061376238,
+ "learning_rate": 1.3228529707265279e-05,
+ "loss": 0.9106,
+ "step": 2149
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8300140290197217,
+ "learning_rate": 1.322263073186593e-05,
+ "loss": 0.935,
+ "step": 2150
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9379297004350344,
+ "learning_rate": 1.3216730504833938e-05,
+ "loss": 0.9012,
+ "step": 2151
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9151026409538474,
+ "learning_rate": 1.3210829028460883e-05,
+ "loss": 0.9311,
+ "step": 2152
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8390749088793632,
+ "learning_rate": 1.3204926305038832e-05,
+ "loss": 0.9072,
+ "step": 2153
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8950892281852907,
+ "learning_rate": 1.3199022336860335e-05,
+ "loss": 0.8161,
+ "step": 2154
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.901731932228211,
+ "learning_rate": 1.3193117126218425e-05,
+ "loss": 0.9456,
+ "step": 2155
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9083180685523767,
+ "learning_rate": 1.3187210675406617e-05,
+ "loss": 0.922,
+ "step": 2156
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 1.0198118140775685,
+ "learning_rate": 1.318130298671891e-05,
+ "loss": 0.9803,
+ "step": 2157
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8024833717783901,
+ "learning_rate": 1.3175394062449777e-05,
+ "loss": 0.9135,
+ "step": 2158
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 1.1318702244111514,
+ "learning_rate": 1.3169483904894185e-05,
+ "loss": 1.0018,
+ "step": 2159
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.7361656801553551,
+ "learning_rate": 1.3163572516347565e-05,
+ "loss": 0.8265,
+ "step": 2160
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.966866240758402,
+ "learning_rate": 1.3157659899105835e-05,
+ "loss": 0.9364,
+ "step": 2161
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.794657015054268,
+ "learning_rate": 1.315174605546538e-05,
+ "loss": 0.8606,
+ "step": 2162
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.9862857009243148,
+ "learning_rate": 1.3145830987723081e-05,
+ "loss": 0.9638,
+ "step": 2163
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.9369752946655109,
+ "learning_rate": 1.3139914698176273e-05,
+ "loss": 0.9144,
+ "step": 2164
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 1.0568666449967494,
+ "learning_rate": 1.3133997189122777e-05,
+ "loss": 0.8772,
+ "step": 2165
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8739618214407433,
+ "learning_rate": 1.3128078462860887e-05,
+ "loss": 0.8755,
+ "step": 2166
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8566246050296912,
+ "learning_rate": 1.3122158521689367e-05,
+ "loss": 0.9244,
+ "step": 2167
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8511710754712899,
+ "learning_rate": 1.3116237367907454e-05,
+ "loss": 0.895,
+ "step": 2168
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 1.0127171499809435,
+ "learning_rate": 1.3110315003814855e-05,
+ "loss": 0.9012,
+ "step": 2169
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8483280934174003,
+ "learning_rate": 1.3104391431711748e-05,
+ "loss": 0.8873,
+ "step": 2170
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.9150391477403632,
+ "learning_rate": 1.309846665389878e-05,
+ "loss": 0.8914,
+ "step": 2171
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 1.0201957239115922,
+ "learning_rate": 1.309254067267707e-05,
+ "loss": 0.9195,
+ "step": 2172
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.7568426285724813,
+ "learning_rate": 1.3086613490348198e-05,
+ "loss": 0.8847,
+ "step": 2173
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.9130032014846652,
+ "learning_rate": 1.3080685109214208e-05,
+ "loss": 0.9476,
+ "step": 2174
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.9468735783199617,
+ "learning_rate": 1.3074755531577628e-05,
+ "loss": 0.9385,
+ "step": 2175
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8551801806917512,
+ "learning_rate": 1.3068824759741428e-05,
+ "loss": 0.9764,
+ "step": 2176
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8444878675059705,
+ "learning_rate": 1.306289279600905e-05,
+ "loss": 0.9023,
+ "step": 2177
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.9144764402575253,
+ "learning_rate": 1.3056959642684404e-05,
+ "loss": 0.8931,
+ "step": 2178
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8939309904244324,
+ "learning_rate": 1.305102530207186e-05,
+ "loss": 0.9569,
+ "step": 2179
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8682559149021166,
+ "learning_rate": 1.3045089776476246e-05,
+ "loss": 0.8868,
+ "step": 2180
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8351988291792387,
+ "learning_rate": 1.3039153068202853e-05,
+ "loss": 0.8734,
+ "step": 2181
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.9950694497612245,
+ "learning_rate": 1.3033215179557424e-05,
+ "loss": 0.9645,
+ "step": 2182
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.9808033668068006,
+ "learning_rate": 1.3027276112846172e-05,
+ "loss": 0.9593,
+ "step": 2183
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8556344120101488,
+ "learning_rate": 1.3021335870375763e-05,
+ "loss": 0.9209,
+ "step": 2184
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.9307037141925026,
+ "learning_rate": 1.3015394454453316e-05,
+ "loss": 1.006,
+ "step": 2185
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8332972795043385,
+ "learning_rate": 1.3009451867386411e-05,
+ "loss": 0.972,
+ "step": 2186
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8498486138180256,
+ "learning_rate": 1.3003508111483077e-05,
+ "loss": 0.8918,
+ "step": 2187
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 1.0154267592166855,
+ "learning_rate": 1.29975631890518e-05,
+ "loss": 1.0385,
+ "step": 2188
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.9643255405369295,
+ "learning_rate": 1.2991617102401524e-05,
+ "loss": 1.0189,
+ "step": 2189
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.9999963177765913,
+ "learning_rate": 1.2985669853841635e-05,
+ "loss": 0.9502,
+ "step": 2190
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 1.055543509771536,
+ "learning_rate": 1.297972144568198e-05,
+ "loss": 0.9946,
+ "step": 2191
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8317250919203271,
+ "learning_rate": 1.2973771880232853e-05,
+ "loss": 0.9091,
+ "step": 2192
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.7895691954424078,
+ "learning_rate": 1.2967821159804994e-05,
+ "loss": 0.8551,
+ "step": 2193
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 1.0362042124719069,
+ "learning_rate": 1.2961869286709594e-05,
+ "loss": 0.9761,
+ "step": 2194
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.807503085482749,
+ "learning_rate": 1.295591626325829e-05,
+ "loss": 0.8496,
+ "step": 2195
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8072008486265011,
+ "learning_rate": 1.2949962091763174e-05,
+ "loss": 0.8929,
+ "step": 2196
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8690581258452311,
+ "learning_rate": 1.2944006774536773e-05,
+ "loss": 0.934,
+ "step": 2197
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.7956373580751883,
+ "learning_rate": 1.2938050313892062e-05,
+ "loss": 0.8662,
+ "step": 2198
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 1.02111310621287,
+ "learning_rate": 1.2932092712142468e-05,
+ "loss": 0.9334,
+ "step": 2199
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8717684939022758,
+ "learning_rate": 1.292613397160185e-05,
+ "loss": 0.912,
+ "step": 2200
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.7580401836519327,
+ "learning_rate": 1.2920174094584514e-05,
+ "loss": 0.8451,
+ "step": 2201
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.7354593767865946,
+ "learning_rate": 1.2914213083405211e-05,
+ "loss": 0.7894,
+ "step": 2202
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.9068255297786387,
+ "learning_rate": 1.2908250940379124e-05,
+ "loss": 0.9369,
+ "step": 2203
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.7333921022370111,
+ "learning_rate": 1.2902287667821885e-05,
+ "loss": 0.8307,
+ "step": 2204
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.9422968743724666,
+ "learning_rate": 1.289632326804956e-05,
+ "loss": 0.9612,
+ "step": 2205
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.6550919422487774,
+ "learning_rate": 1.2890357743378649e-05,
+ "loss": 0.7924,
+ "step": 2206
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8160761756000248,
+ "learning_rate": 1.2884391096126098e-05,
+ "loss": 0.8763,
+ "step": 2207
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.9061214329104939,
+ "learning_rate": 1.2878423328609281e-05,
+ "loss": 0.8859,
+ "step": 2208
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8939459555110824,
+ "learning_rate": 1.2872454443146015e-05,
+ "loss": 0.8946,
+ "step": 2209
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9367840606138146,
+ "learning_rate": 1.286648444205454e-05,
+ "loss": 0.9106,
+ "step": 2210
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9126460998591084,
+ "learning_rate": 1.2860513327653537e-05,
+ "loss": 0.8996,
+ "step": 2211
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8027713902262512,
+ "learning_rate": 1.2854541102262119e-05,
+ "loss": 0.7973,
+ "step": 2212
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 1.0651955742880965,
+ "learning_rate": 1.284856776819983e-05,
+ "loss": 0.981,
+ "step": 2213
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 1.2831194224703233,
+ "learning_rate": 1.2842593327786649e-05,
+ "loss": 0.9468,
+ "step": 2214
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 1.1021268640452047,
+ "learning_rate": 1.2836617783342968e-05,
+ "loss": 0.8712,
+ "step": 2215
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8831486611833153,
+ "learning_rate": 1.2830641137189628e-05,
+ "loss": 0.9142,
+ "step": 2216
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.900483403576882,
+ "learning_rate": 1.282466339164789e-05,
+ "loss": 0.9413,
+ "step": 2217
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 1.0200303811207974,
+ "learning_rate": 1.2818684549039437e-05,
+ "loss": 0.9141,
+ "step": 2218
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.982807273900127,
+ "learning_rate": 1.2812704611686386e-05,
+ "loss": 0.9387,
+ "step": 2219
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8435029975191141,
+ "learning_rate": 1.2806723581911274e-05,
+ "loss": 0.9205,
+ "step": 2220
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8318831035980068,
+ "learning_rate": 1.2800741462037065e-05,
+ "loss": 0.9073,
+ "step": 2221
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9154503782346017,
+ "learning_rate": 1.2794758254387147e-05,
+ "loss": 0.8904,
+ "step": 2222
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8830248215532113,
+ "learning_rate": 1.2788773961285323e-05,
+ "loss": 0.9398,
+ "step": 2223
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 1.0039121615109634,
+ "learning_rate": 1.2782788585055829e-05,
+ "loss": 0.8373,
+ "step": 2224
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.7819833237413248,
+ "learning_rate": 1.2776802128023317e-05,
+ "loss": 0.8329,
+ "step": 2225
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.7951873459325333,
+ "learning_rate": 1.2770814592512853e-05,
+ "loss": 0.931,
+ "step": 2226
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.7653848177976322,
+ "learning_rate": 1.2764825980849931e-05,
+ "loss": 0.9421,
+ "step": 2227
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9283523838898656,
+ "learning_rate": 1.2758836295360455e-05,
+ "loss": 0.9328,
+ "step": 2228
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8757328387584603,
+ "learning_rate": 1.2752845538370752e-05,
+ "loss": 0.8946,
+ "step": 2229
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 1.1117225459926026,
+ "learning_rate": 1.2746853712207567e-05,
+ "loss": 0.961,
+ "step": 2230
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9067243054740067,
+ "learning_rate": 1.274086081919805e-05,
+ "loss": 0.8418,
+ "step": 2231
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 1.0406832772839323,
+ "learning_rate": 1.273486686166977e-05,
+ "loss": 0.9526,
+ "step": 2232
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8190042989077312,
+ "learning_rate": 1.2728871841950719e-05,
+ "loss": 0.8949,
+ "step": 2233
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.843455189454114,
+ "learning_rate": 1.2722875762369288e-05,
+ "loss": 0.966,
+ "step": 2234
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9128400211362868,
+ "learning_rate": 1.2716878625254287e-05,
+ "loss": 0.9684,
+ "step": 2235
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8811556270079063,
+ "learning_rate": 1.2710880432934934e-05,
+ "loss": 0.9431,
+ "step": 2236
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9670319124586425,
+ "learning_rate": 1.270488118774086e-05,
+ "loss": 0.9217,
+ "step": 2237
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.6988637360254698,
+ "learning_rate": 1.26988808920021e-05,
+ "loss": 0.8693,
+ "step": 2238
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8652315443802078,
+ "learning_rate": 1.26928795480491e-05,
+ "loss": 0.9067,
+ "step": 2239
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.7962169833246877,
+ "learning_rate": 1.2686877158212715e-05,
+ "loss": 0.9165,
+ "step": 2240
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.7999814792080348,
+ "learning_rate": 1.26808737248242e-05,
+ "loss": 0.8628,
+ "step": 2241
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8878929008246196,
+ "learning_rate": 1.2674869250215225e-05,
+ "loss": 0.9566,
+ "step": 2242
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9643444116175804,
+ "learning_rate": 1.2668863736717855e-05,
+ "loss": 0.9864,
+ "step": 2243
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8686630016249888,
+ "learning_rate": 1.2662857186664558e-05,
+ "loss": 0.9201,
+ "step": 2244
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8595322784723836,
+ "learning_rate": 1.2656849602388222e-05,
+ "loss": 0.8776,
+ "step": 2245
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8686109940956778,
+ "learning_rate": 1.2650840986222111e-05,
+ "loss": 0.8966,
+ "step": 2246
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8567876908949325,
+ "learning_rate": 1.2644831340499906e-05,
+ "loss": 0.8575,
+ "step": 2247
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.90192186142703,
+ "learning_rate": 1.2638820667555685e-05,
+ "loss": 0.9649,
+ "step": 2248
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8250028683500116,
+ "learning_rate": 1.2632808969723927e-05,
+ "loss": 0.9171,
+ "step": 2249
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9008503688386137,
+ "learning_rate": 1.26267962493395e-05,
+ "loss": 0.9599,
+ "step": 2250
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9913614323250304,
+ "learning_rate": 1.2620782508737678e-05,
+ "loss": 0.8675,
+ "step": 2251
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9569919582981561,
+ "learning_rate": 1.2614767750254129e-05,
+ "loss": 0.8051,
+ "step": 2252
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9231133392473371,
+ "learning_rate": 1.2608751976224916e-05,
+ "loss": 0.9404,
+ "step": 2253
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9456264298714065,
+ "learning_rate": 1.2602735188986498e-05,
+ "loss": 0.9648,
+ "step": 2254
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8770379992344625,
+ "learning_rate": 1.2596717390875721e-05,
+ "loss": 0.897,
+ "step": 2255
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9504760546021825,
+ "learning_rate": 1.2590698584229834e-05,
+ "loss": 0.9028,
+ "step": 2256
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9563220870749286,
+ "learning_rate": 1.2584678771386467e-05,
+ "loss": 0.9837,
+ "step": 2257
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9448542045546671,
+ "learning_rate": 1.2578657954683651e-05,
+ "loss": 1.005,
+ "step": 2258
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.7810543548466898,
+ "learning_rate": 1.2572636136459799e-05,
+ "loss": 0.8573,
+ "step": 2259
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.7848109531627149,
+ "learning_rate": 1.2566613319053713e-05,
+ "loss": 0.8474,
+ "step": 2260
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8402319429607122,
+ "learning_rate": 1.2560589504804592e-05,
+ "loss": 0.8793,
+ "step": 2261
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.8468642584985647,
+ "learning_rate": 1.2554564696052011e-05,
+ "loss": 0.8891,
+ "step": 2262
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9205537388059336,
+ "learning_rate": 1.2548538895135942e-05,
+ "loss": 0.9479,
+ "step": 2263
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 1.3191982669707438,
+ "learning_rate": 1.254251210439673e-05,
+ "loss": 0.9465,
+ "step": 2264
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 1.0155844198886341,
+ "learning_rate": 1.2536484326175114e-05,
+ "loss": 0.9233,
+ "step": 2265
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.7914661481418371,
+ "learning_rate": 1.2530455562812214e-05,
+ "loss": 0.7637,
+ "step": 2266
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.8576349287537162,
+ "learning_rate": 1.252442581664953e-05,
+ "loss": 0.8873,
+ "step": 2267
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 1.3116892981730255,
+ "learning_rate": 1.2518395090028952e-05,
+ "loss": 0.9261,
+ "step": 2268
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9505863375062338,
+ "learning_rate": 1.2512363385292739e-05,
+ "loss": 0.9286,
+ "step": 2269
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9790558920112058,
+ "learning_rate": 1.2506330704783533e-05,
+ "loss": 0.9397,
+ "step": 2270
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.777067707819939,
+ "learning_rate": 1.2500297050844367e-05,
+ "loss": 0.8604,
+ "step": 2271
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9693193786093736,
+ "learning_rate": 1.2494262425818637e-05,
+ "loss": 0.9279,
+ "step": 2272
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9132165167761025,
+ "learning_rate": 1.2488226832050116e-05,
+ "loss": 0.8659,
+ "step": 2273
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 1.0404534785825161,
+ "learning_rate": 1.2482190271882973e-05,
+ "loss": 0.9227,
+ "step": 2274
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.8939335737641162,
+ "learning_rate": 1.2476152747661727e-05,
+ "loss": 0.8742,
+ "step": 2275
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9025028460582737,
+ "learning_rate": 1.2470114261731288e-05,
+ "loss": 0.9411,
+ "step": 2276
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9037992759277862,
+ "learning_rate": 1.246407481643693e-05,
+ "loss": 0.93,
+ "step": 2277
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9006461039114079,
+ "learning_rate": 1.245803441412431e-05,
+ "loss": 0.9155,
+ "step": 2278
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.8490977988796075,
+ "learning_rate": 1.2451993057139445e-05,
+ "loss": 0.9685,
+ "step": 2279
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9180759161931465,
+ "learning_rate": 1.2445950747828732e-05,
+ "loss": 0.9185,
+ "step": 2280
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.896120846584157,
+ "learning_rate": 1.2439907488538934e-05,
+ "loss": 0.8933,
+ "step": 2281
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9134146033827902,
+ "learning_rate": 1.243386328161718e-05,
+ "loss": 0.9933,
+ "step": 2282
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.8284319600278802,
+ "learning_rate": 1.2427818129410975e-05,
+ "loss": 0.9607,
+ "step": 2283
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.8797605668553792,
+ "learning_rate": 1.2421772034268187e-05,
+ "loss": 0.9565,
+ "step": 2284
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9671130142722008,
+ "learning_rate": 1.2415724998537042e-05,
+ "loss": 0.9196,
+ "step": 2285
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.8870725850010622,
+ "learning_rate": 1.2409677024566145e-05,
+ "loss": 0.927,
+ "step": 2286
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.8795503727652301,
+ "learning_rate": 1.240362811470446e-05,
+ "loss": 0.8702,
+ "step": 2287
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.7695415996567161,
+ "learning_rate": 1.2397578271301312e-05,
+ "loss": 0.9047,
+ "step": 2288
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.8942816075711105,
+ "learning_rate": 1.2391527496706389e-05,
+ "loss": 0.9137,
+ "step": 2289
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9839494866211023,
+ "learning_rate": 1.2385475793269744e-05,
+ "loss": 0.9475,
+ "step": 2290
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.8607761945903216,
+ "learning_rate": 1.2379423163341791e-05,
+ "loss": 0.9513,
+ "step": 2291
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9874676021466612,
+ "learning_rate": 1.2373369609273299e-05,
+ "loss": 0.9573,
+ "step": 2292
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9967685211443729,
+ "learning_rate": 1.2367315133415396e-05,
+ "loss": 0.88,
+ "step": 2293
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9272517188545901,
+ "learning_rate": 1.2361259738119575e-05,
+ "loss": 0.8903,
+ "step": 2294
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9299453745353854,
+ "learning_rate": 1.2355203425737683e-05,
+ "loss": 0.9457,
+ "step": 2295
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9328180096893278,
+ "learning_rate": 1.2349146198621917e-05,
+ "loss": 1.0141,
+ "step": 2296
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9966162094449411,
+ "learning_rate": 1.2343088059124839e-05,
+ "loss": 0.9806,
+ "step": 2297
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.7454570005398844,
+ "learning_rate": 1.2337029009599357e-05,
+ "loss": 0.8621,
+ "step": 2298
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 1.0723523984689818,
+ "learning_rate": 1.2330969052398735e-05,
+ "loss": 1.0161,
+ "step": 2299
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.8777414892988066,
+ "learning_rate": 1.2324908189876597e-05,
+ "loss": 0.8917,
+ "step": 2300
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 1.1967680186387344,
+ "learning_rate": 1.2318846424386907e-05,
+ "loss": 0.9792,
+ "step": 2301
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.919167550777173,
+ "learning_rate": 1.2312783758283981e-05,
+ "loss": 0.9286,
+ "step": 2302
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9323671006879363,
+ "learning_rate": 1.23067201939225e-05,
+ "loss": 0.9162,
+ "step": 2303
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9022285276392306,
+ "learning_rate": 1.2300655733657475e-05,
+ "loss": 0.9074,
+ "step": 2304
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9650623323939015,
+ "learning_rate": 1.2294590379844268e-05,
+ "loss": 0.8816,
+ "step": 2305
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.8453901908969638,
+ "learning_rate": 1.2288524134838602e-05,
+ "loss": 0.8916,
+ "step": 2306
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 1.0463494277731755,
+ "learning_rate": 1.2282457000996533e-05,
+ "loss": 0.9261,
+ "step": 2307
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9707234867884714,
+ "learning_rate": 1.2276388980674465e-05,
+ "loss": 1.0039,
+ "step": 2308
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.7917339614535102,
+ "learning_rate": 1.227032007622915e-05,
+ "loss": 0.865,
+ "step": 2309
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.8993644056063185,
+ "learning_rate": 1.2264250290017675e-05,
+ "loss": 0.9227,
+ "step": 2310
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9074737570841841,
+ "learning_rate": 1.2258179624397477e-05,
+ "loss": 0.8732,
+ "step": 2311
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9243744757860682,
+ "learning_rate": 1.2252108081726337e-05,
+ "loss": 0.978,
+ "step": 2312
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 1.0963824532598228,
+ "learning_rate": 1.224603566436237e-05,
+ "loss": 0.9022,
+ "step": 2313
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9775942897344263,
+ "learning_rate": 1.2239962374664029e-05,
+ "loss": 0.9061,
+ "step": 2314
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 1.0748437593447329,
+ "learning_rate": 1.2233888214990113e-05,
+ "loss": 0.981,
+ "step": 2315
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.941214829389541,
+ "learning_rate": 1.2227813187699757e-05,
+ "loss": 0.9364,
+ "step": 2316
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 1.0147802058068196,
+ "learning_rate": 1.222173729515243e-05,
+ "loss": 0.9382,
+ "step": 2317
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.931701357529382,
+ "learning_rate": 1.2215660539707936e-05,
+ "loss": 0.9342,
+ "step": 2318
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.8625971996241817,
+ "learning_rate": 1.2209582923726424e-05,
+ "loss": 0.9459,
+ "step": 2319
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.7732411891644637,
+ "learning_rate": 1.2203504449568361e-05,
+ "loss": 0.8984,
+ "step": 2320
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9396893140932644,
+ "learning_rate": 1.2197425119594563e-05,
+ "loss": 0.8818,
+ "step": 2321
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.905540988354156,
+ "learning_rate": 1.219134493616617e-05,
+ "loss": 0.9799,
+ "step": 2322
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9244985365741272,
+ "learning_rate": 1.2185263901644653e-05,
+ "loss": 0.9354,
+ "step": 2323
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9628362168750522,
+ "learning_rate": 1.217918201839182e-05,
+ "loss": 0.9355,
+ "step": 2324
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9880630455826348,
+ "learning_rate": 1.2173099288769799e-05,
+ "loss": 0.8397,
+ "step": 2325
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.8050007644375362,
+ "learning_rate": 1.2167015715141057e-05,
+ "loss": 0.8992,
+ "step": 2326
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.7707560559980093,
+ "learning_rate": 1.216093129986838e-05,
+ "loss": 0.8634,
+ "step": 2327
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.8594489969289725,
+ "learning_rate": 1.2154846045314885e-05,
+ "loss": 0.9415,
+ "step": 2328
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.962088656434714,
+ "learning_rate": 1.214875995384402e-05,
+ "loss": 0.9516,
+ "step": 2329
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.8325433940208284,
+ "learning_rate": 1.214267302781955e-05,
+ "loss": 0.9341,
+ "step": 2330
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.8838573252939088,
+ "learning_rate": 1.2136585269605558e-05,
+ "loss": 0.8697,
+ "step": 2331
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.7586705153269148,
+ "learning_rate": 1.2130496681566475e-05,
+ "loss": 0.8863,
+ "step": 2332
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 1.008620085739925,
+ "learning_rate": 1.212440726606703e-05,
+ "loss": 1.0598,
+ "step": 2333
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9048777623965037,
+ "learning_rate": 1.211831702547228e-05,
+ "loss": 0.9518,
+ "step": 2334
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 1.1165940923709488,
+ "learning_rate": 1.2112225962147605e-05,
+ "loss": 0.931,
+ "step": 2335
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.8545296331102793,
+ "learning_rate": 1.210613407845871e-05,
+ "loss": 0.9168,
+ "step": 2336
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 1.1169495708522161,
+ "learning_rate": 1.2100041376771605e-05,
+ "loss": 0.8689,
+ "step": 2337
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.839029030347656,
+ "learning_rate": 1.209394785945263e-05,
+ "loss": 0.8721,
+ "step": 2338
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9421555154811644,
+ "learning_rate": 1.2087853528868432e-05,
+ "loss": 0.9253,
+ "step": 2339
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.8391201168939678,
+ "learning_rate": 1.2081758387385982e-05,
+ "loss": 0.8823,
+ "step": 2340
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9536242397741264,
+ "learning_rate": 1.2075662437372567e-05,
+ "loss": 0.9544,
+ "step": 2341
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9184315531906584,
+ "learning_rate": 1.2069565681195776e-05,
+ "loss": 0.9237,
+ "step": 2342
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.8571690635275195,
+ "learning_rate": 1.206346812122352e-05,
+ "loss": 0.7997,
+ "step": 2343
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.993736493041169,
+ "learning_rate": 1.2057369759824025e-05,
+ "loss": 0.9158,
+ "step": 2344
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9232450969435135,
+ "learning_rate": 1.2051270599365825e-05,
+ "loss": 0.9434,
+ "step": 2345
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.937401762052201,
+ "learning_rate": 1.2045170642217756e-05,
+ "loss": 0.9659,
+ "step": 2346
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9884612109000868,
+ "learning_rate": 1.2039069890748978e-05,
+ "loss": 0.9275,
+ "step": 2347
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.8096746781793203,
+ "learning_rate": 1.2032968347328952e-05,
+ "loss": 0.8827,
+ "step": 2348
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9721901539982749,
+ "learning_rate": 1.2026866014327446e-05,
+ "loss": 0.9053,
+ "step": 2349
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.868185981043627,
+ "learning_rate": 1.2020762894114535e-05,
+ "loss": 0.9154,
+ "step": 2350
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 1.0040657783176368,
+ "learning_rate": 1.20146589890606e-05,
+ "loss": 0.9764,
+ "step": 2351
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.8993919390029462,
+ "learning_rate": 1.2008554301536328e-05,
+ "loss": 0.9335,
+ "step": 2352
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.7915617021866037,
+ "learning_rate": 1.2002448833912712e-05,
+ "loss": 0.9049,
+ "step": 2353
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.941600506227459,
+ "learning_rate": 1.1996342588561042e-05,
+ "loss": 0.9496,
+ "step": 2354
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.8806539928345356,
+ "learning_rate": 1.1990235567852917e-05,
+ "loss": 0.9784,
+ "step": 2355
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.7719462068672606,
+ "learning_rate": 1.1984127774160226e-05,
+ "loss": 0.8674,
+ "step": 2356
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9750477133822913,
+ "learning_rate": 1.1978019209855174e-05,
+ "loss": 0.9517,
+ "step": 2357
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 1.0129274251264657,
+ "learning_rate": 1.1971909877310253e-05,
+ "loss": 0.9528,
+ "step": 2358
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 1.0276780192047423,
+ "learning_rate": 1.1965799778898258e-05,
+ "loss": 0.9619,
+ "step": 2359
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9888950447198691,
+ "learning_rate": 1.1959688916992279e-05,
+ "loss": 0.9426,
+ "step": 2360
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9804139150430459,
+ "learning_rate": 1.1953577293965707e-05,
+ "loss": 0.9743,
+ "step": 2361
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9917794852029977,
+ "learning_rate": 1.1947464912192228e-05,
+ "loss": 0.9499,
+ "step": 2362
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9346199072311586,
+ "learning_rate": 1.1941351774045815e-05,
+ "loss": 0.9202,
+ "step": 2363
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.7889692200039892,
+ "learning_rate": 1.1935237881900743e-05,
+ "loss": 0.8809,
+ "step": 2364
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 1.0290026816597673,
+ "learning_rate": 1.1929123238131579e-05,
+ "loss": 0.9394,
+ "step": 2365
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9155550587591733,
+ "learning_rate": 1.1923007845113178e-05,
+ "loss": 0.9183,
+ "step": 2366
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8280934757123218,
+ "learning_rate": 1.1916891705220689e-05,
+ "loss": 0.8689,
+ "step": 2367
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 1.1462505535112175,
+ "learning_rate": 1.191077482082955e-05,
+ "loss": 0.9282,
+ "step": 2368
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8489004035345884,
+ "learning_rate": 1.1904657194315486e-05,
+ "loss": 0.926,
+ "step": 2369
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.7324528830640139,
+ "learning_rate": 1.1898538828054517e-05,
+ "loss": 0.8437,
+ "step": 2370
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 1.098786163329813,
+ "learning_rate": 1.1892419724422946e-05,
+ "loss": 0.9005,
+ "step": 2371
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9048738472171444,
+ "learning_rate": 1.1886299885797357e-05,
+ "loss": 0.8989,
+ "step": 2372
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 1.10612713008854,
+ "learning_rate": 1.1880179314554629e-05,
+ "loss": 0.9768,
+ "step": 2373
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9252857630708283,
+ "learning_rate": 1.1874058013071923e-05,
+ "loss": 0.9211,
+ "step": 2374
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8514003700485429,
+ "learning_rate": 1.1867935983726676e-05,
+ "loss": 0.8755,
+ "step": 2375
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9311325301676305,
+ "learning_rate": 1.186181322889662e-05,
+ "loss": 0.8945,
+ "step": 2376
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8600593589198032,
+ "learning_rate": 1.1855689750959759e-05,
+ "loss": 0.915,
+ "step": 2377
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 1.054460247599659,
+ "learning_rate": 1.1849565552294379e-05,
+ "loss": 0.9009,
+ "step": 2378
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.7825726964835971,
+ "learning_rate": 1.1843440635279056e-05,
+ "loss": 0.9202,
+ "step": 2379
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8782667464030242,
+ "learning_rate": 1.1837315002292629e-05,
+ "loss": 0.9354,
+ "step": 2380
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9420370586863615,
+ "learning_rate": 1.1831188655714225e-05,
+ "loss": 0.9293,
+ "step": 2381
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 1.160896572804164,
+ "learning_rate": 1.182506159792325e-05,
+ "loss": 0.9205,
+ "step": 2382
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 1.0574638697184164,
+ "learning_rate": 1.1818933831299381e-05,
+ "loss": 0.9217,
+ "step": 2383
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 1.0116185231622183,
+ "learning_rate": 1.1812805358222571e-05,
+ "loss": 0.9726,
+ "step": 2384
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8660501324928412,
+ "learning_rate": 1.180667618107305e-05,
+ "loss": 0.947,
+ "step": 2385
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8972308213571614,
+ "learning_rate": 1.1800546302231317e-05,
+ "loss": 0.9541,
+ "step": 2386
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9740836176917561,
+ "learning_rate": 1.1794415724078147e-05,
+ "loss": 1.0161,
+ "step": 2387
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 1.0951667952404012,
+ "learning_rate": 1.1788284448994588e-05,
+ "loss": 0.9706,
+ "step": 2388
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8276278059262884,
+ "learning_rate": 1.1782152479361956e-05,
+ "loss": 0.9164,
+ "step": 2389
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9422791050884577,
+ "learning_rate": 1.1776019817561834e-05,
+ "loss": 0.9288,
+ "step": 2390
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 1.0147981856724573,
+ "learning_rate": 1.1769886465976086e-05,
+ "loss": 0.8612,
+ "step": 2391
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8108186714213463,
+ "learning_rate": 1.1763752426986823e-05,
+ "loss": 0.8637,
+ "step": 2392
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9030533777656744,
+ "learning_rate": 1.1757617702976443e-05,
+ "loss": 0.849,
+ "step": 2393
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9580950838209914,
+ "learning_rate": 1.17514822963276e-05,
+ "loss": 0.9419,
+ "step": 2394
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.7702737871040894,
+ "learning_rate": 1.1745346209423216e-05,
+ "loss": 0.9012,
+ "step": 2395
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8877065011852157,
+ "learning_rate": 1.1739209444646479e-05,
+ "loss": 0.9031,
+ "step": 2396
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9512991898337169,
+ "learning_rate": 1.1733072004380827e-05,
+ "loss": 0.9506,
+ "step": 2397
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9795734030977572,
+ "learning_rate": 1.1726933891009985e-05,
+ "loss": 0.9609,
+ "step": 2398
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8824455451460888,
+ "learning_rate": 1.1720795106917917e-05,
+ "loss": 0.9016,
+ "step": 2399
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9727053500616915,
+ "learning_rate": 1.171465565448886e-05,
+ "loss": 0.9849,
+ "step": 2400
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8714983960102699,
+ "learning_rate": 1.1708515536107299e-05,
+ "loss": 1.0035,
+ "step": 2401
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.7904429359741996,
+ "learning_rate": 1.1702374754157998e-05,
+ "loss": 0.8357,
+ "step": 2402
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9239708984692373,
+ "learning_rate": 1.1696233311025957e-05,
+ "loss": 0.9104,
+ "step": 2403
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8148168826691347,
+ "learning_rate": 1.1690091209096441e-05,
+ "loss": 0.9029,
+ "step": 2404
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.870782489451877,
+ "learning_rate": 1.1683948450754976e-05,
+ "loss": 0.9301,
+ "step": 2405
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 1.051494396715924,
+ "learning_rate": 1.1677805038387337e-05,
+ "loss": 0.9045,
+ "step": 2406
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9692116029848852,
+ "learning_rate": 1.1671660974379554e-05,
+ "loss": 0.9321,
+ "step": 2407
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.897659128365413,
+ "learning_rate": 1.1665516261117914e-05,
+ "loss": 0.8948,
+ "step": 2408
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9180979807670455,
+ "learning_rate": 1.1659370900988946e-05,
+ "loss": 0.9649,
+ "step": 2409
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8924094087371933,
+ "learning_rate": 1.165322489637944e-05,
+ "loss": 0.9686,
+ "step": 2410
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8289188195130809,
+ "learning_rate": 1.164707824967644e-05,
+ "loss": 0.9305,
+ "step": 2411
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9373337584613777,
+ "learning_rate": 1.1640930963267226e-05,
+ "loss": 0.93,
+ "step": 2412
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.7762393780235348,
+ "learning_rate": 1.1634783039539328e-05,
+ "loss": 0.8451,
+ "step": 2413
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.7838013185687841,
+ "learning_rate": 1.162863448088054e-05,
+ "loss": 0.8454,
+ "step": 2414
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9639296692594781,
+ "learning_rate": 1.1622485289678886e-05,
+ "loss": 0.919,
+ "step": 2415
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8039691890795312,
+ "learning_rate": 1.1616335468322641e-05,
+ "loss": 0.8682,
+ "step": 2416
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8193193101218789,
+ "learning_rate": 1.1610185019200324e-05,
+ "loss": 0.8697,
+ "step": 2417
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9249669703522008,
+ "learning_rate": 1.1604033944700701e-05,
+ "loss": 0.9784,
+ "step": 2418
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.909036611522033,
+ "learning_rate": 1.1597882247212776e-05,
+ "loss": 0.9195,
+ "step": 2419
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8067098598232343,
+ "learning_rate": 1.15917299291258e-05,
+ "loss": 0.9436,
+ "step": 2420
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8585124511047816,
+ "learning_rate": 1.1585576992829261e-05,
+ "loss": 0.9204,
+ "step": 2421
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9351868646055391,
+ "learning_rate": 1.1579423440712887e-05,
+ "loss": 0.9726,
+ "step": 2422
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8808652735933448,
+ "learning_rate": 1.1573269275166652e-05,
+ "loss": 0.9028,
+ "step": 2423
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9059833389151828,
+ "learning_rate": 1.1567114498580758e-05,
+ "loss": 0.9405,
+ "step": 2424
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9629498856687665,
+ "learning_rate": 1.1560959113345649e-05,
+ "loss": 0.9129,
+ "step": 2425
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8720052204637136,
+ "learning_rate": 1.1554803121852005e-05,
+ "loss": 0.908,
+ "step": 2426
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8366220055342847,
+ "learning_rate": 1.1548646526490749e-05,
+ "loss": 0.9286,
+ "step": 2427
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9400754416872955,
+ "learning_rate": 1.1542489329653024e-05,
+ "loss": 0.9263,
+ "step": 2428
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9112230368157715,
+ "learning_rate": 1.153633153373022e-05,
+ "loss": 0.9317,
+ "step": 2429
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8822410754901164,
+ "learning_rate": 1.1530173141113947e-05,
+ "loss": 0.9708,
+ "step": 2430
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8788759327004128,
+ "learning_rate": 1.1524014154196063e-05,
+ "loss": 0.867,
+ "step": 2431
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9917300908341928,
+ "learning_rate": 1.1517854575368644e-05,
+ "loss": 0.9319,
+ "step": 2432
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8649965706468556,
+ "learning_rate": 1.1511694407023994e-05,
+ "loss": 0.8463,
+ "step": 2433
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.7171466088532696,
+ "learning_rate": 1.1505533651554654e-05,
+ "loss": 0.8633,
+ "step": 2434
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8994079257246275,
+ "learning_rate": 1.1499372311353398e-05,
+ "loss": 0.8892,
+ "step": 2435
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.934409689777611,
+ "learning_rate": 1.149321038881321e-05,
+ "loss": 0.9409,
+ "step": 2436
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9147537401355629,
+ "learning_rate": 1.1487047886327314e-05,
+ "loss": 0.9153,
+ "step": 2437
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9201992531935073,
+ "learning_rate": 1.1480884806289151e-05,
+ "loss": 0.9546,
+ "step": 2438
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.894740123105788,
+ "learning_rate": 1.1474721151092397e-05,
+ "loss": 0.9233,
+ "step": 2439
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8595033208454251,
+ "learning_rate": 1.1468556923130943e-05,
+ "loss": 0.8677,
+ "step": 2440
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9990850953496287,
+ "learning_rate": 1.14623921247989e-05,
+ "loss": 0.9033,
+ "step": 2441
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.80162258509165,
+ "learning_rate": 1.1456226758490603e-05,
+ "loss": 0.8522,
+ "step": 2442
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 1.1512931236925705,
+ "learning_rate": 1.1450060826600618e-05,
+ "loss": 0.9087,
+ "step": 2443
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8856249226151223,
+ "learning_rate": 1.1443894331523718e-05,
+ "loss": 0.9191,
+ "step": 2444
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9244246223023082,
+ "learning_rate": 1.1437727275654893e-05,
+ "loss": 0.8689,
+ "step": 2445
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.945424413657258,
+ "learning_rate": 1.1431559661389362e-05,
+ "loss": 0.9457,
+ "step": 2446
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8659502973724246,
+ "learning_rate": 1.1425391491122557e-05,
+ "loss": 0.8955,
+ "step": 2447
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9256327243109714,
+ "learning_rate": 1.141922276725012e-05,
+ "loss": 0.9343,
+ "step": 2448
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8501313896870167,
+ "learning_rate": 1.1413053492167915e-05,
+ "loss": 0.9272,
+ "step": 2449
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9122945426028953,
+ "learning_rate": 1.1406883668272015e-05,
+ "loss": 0.8923,
+ "step": 2450
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.7699343783018004,
+ "learning_rate": 1.140071329795871e-05,
+ "loss": 0.8427,
+ "step": 2451
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8582187745030887,
+ "learning_rate": 1.13945423836245e-05,
+ "loss": 0.9264,
+ "step": 2452
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 1.1938291860294514,
+ "learning_rate": 1.1388370927666102e-05,
+ "loss": 0.9376,
+ "step": 2453
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9804048218156324,
+ "learning_rate": 1.1382198932480429e-05,
+ "loss": 0.9424,
+ "step": 2454
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9206388057962297,
+ "learning_rate": 1.1376026400464616e-05,
+ "loss": 0.8612,
+ "step": 2455
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8036855246615522,
+ "learning_rate": 1.136985333401601e-05,
+ "loss": 0.8687,
+ "step": 2456
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8001987189105646,
+ "learning_rate": 1.1363679735532151e-05,
+ "loss": 0.8955,
+ "step": 2457
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8979648843604973,
+ "learning_rate": 1.1357505607410797e-05,
+ "loss": 0.9465,
+ "step": 2458
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8946428094039034,
+ "learning_rate": 1.1351330952049908e-05,
+ "loss": 0.9064,
+ "step": 2459
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9144929222444373,
+ "learning_rate": 1.1345155771847646e-05,
+ "loss": 0.8163,
+ "step": 2460
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9557501465675763,
+ "learning_rate": 1.1338980069202388e-05,
+ "loss": 0.9097,
+ "step": 2461
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8570136915849398,
+ "learning_rate": 1.1332803846512697e-05,
+ "loss": 0.9295,
+ "step": 2462
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.855576646382349,
+ "learning_rate": 1.1326627106177348e-05,
+ "loss": 0.9629,
+ "step": 2463
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.81447619038449,
+ "learning_rate": 1.132044985059532e-05,
+ "loss": 0.8743,
+ "step": 2464
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9529747391366109,
+ "learning_rate": 1.1314272082165785e-05,
+ "loss": 0.9615,
+ "step": 2465
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8102666542822321,
+ "learning_rate": 1.1308093803288119e-05,
+ "loss": 0.8673,
+ "step": 2466
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8935530920483463,
+ "learning_rate": 1.130191501636189e-05,
+ "loss": 0.8877,
+ "step": 2467
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.926405774997204,
+ "learning_rate": 1.1295735723786872e-05,
+ "loss": 0.9287,
+ "step": 2468
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.942888474563503,
+ "learning_rate": 1.1289555927963032e-05,
+ "loss": 0.9102,
+ "step": 2469
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9162290160368131,
+ "learning_rate": 1.1283375631290528e-05,
+ "loss": 0.9669,
+ "step": 2470
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.8473067870722337,
+ "learning_rate": 1.1277194836169714e-05,
+ "loss": 0.9073,
+ "step": 2471
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 1.0371891476416797,
+ "learning_rate": 1.1271013545001144e-05,
+ "loss": 0.9548,
+ "step": 2472
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.8568391256075264,
+ "learning_rate": 1.1264831760185562e-05,
+ "loss": 0.939,
+ "step": 2473
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 1.05001553258945,
+ "learning_rate": 1.1258649484123895e-05,
+ "loss": 0.9385,
+ "step": 2474
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.8948583875364915,
+ "learning_rate": 1.1252466719217274e-05,
+ "loss": 0.918,
+ "step": 2475
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.8940166872402399,
+ "learning_rate": 1.1246283467867012e-05,
+ "loss": 0.974,
+ "step": 2476
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.8814746308200198,
+ "learning_rate": 1.1240099732474613e-05,
+ "loss": 0.9408,
+ "step": 2477
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.8568835057729686,
+ "learning_rate": 1.1233915515441765e-05,
+ "loss": 0.8711,
+ "step": 2478
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 1.0324075778327733,
+ "learning_rate": 1.1227730819170349e-05,
+ "loss": 0.9759,
+ "step": 2479
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9800290942423504,
+ "learning_rate": 1.1221545646062431e-05,
+ "loss": 0.9136,
+ "step": 2480
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9742522526641997,
+ "learning_rate": 1.121535999852026e-05,
+ "loss": 0.9483,
+ "step": 2481
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.866587396715058,
+ "learning_rate": 1.1209173878946271e-05,
+ "loss": 0.889,
+ "step": 2482
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.8542990632480492,
+ "learning_rate": 1.1202987289743078e-05,
+ "loss": 0.8621,
+ "step": 2483
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9038068252517323,
+ "learning_rate": 1.1196800233313488e-05,
+ "loss": 0.8864,
+ "step": 2484
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9055792416506564,
+ "learning_rate": 1.1190612712060475e-05,
+ "loss": 0.9625,
+ "step": 2485
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.7488042866430171,
+ "learning_rate": 1.1184424728387204e-05,
+ "loss": 0.8115,
+ "step": 2486
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.8285643307979524,
+ "learning_rate": 1.1178236284697017e-05,
+ "loss": 0.9556,
+ "step": 2487
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.8858001716554392,
+ "learning_rate": 1.1172047383393434e-05,
+ "loss": 0.8987,
+ "step": 2488
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.8898845243154941,
+ "learning_rate": 1.1165858026880151e-05,
+ "loss": 0.9275,
+ "step": 2489
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 1.0837169665343633,
+ "learning_rate": 1.1159668217561048e-05,
+ "loss": 0.9527,
+ "step": 2490
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9884515828046143,
+ "learning_rate": 1.115347795784017e-05,
+ "loss": 0.9462,
+ "step": 2491
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.8500678195789728,
+ "learning_rate": 1.1147287250121745e-05,
+ "loss": 0.8821,
+ "step": 2492
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9236208787083816,
+ "learning_rate": 1.1141096096810174e-05,
+ "loss": 0.8708,
+ "step": 2493
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9006687358510446,
+ "learning_rate": 1.1134904500310029e-05,
+ "loss": 0.8836,
+ "step": 2494
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.8915056825227955,
+ "learning_rate": 1.1128712463026048e-05,
+ "loss": 0.9426,
+ "step": 2495
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.671204049922936,
+ "learning_rate": 1.1122519987363156e-05,
+ "loss": 0.7995,
+ "step": 2496
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9088402297755798,
+ "learning_rate": 1.1116327075726436e-05,
+ "loss": 0.9099,
+ "step": 2497
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9759297843694378,
+ "learning_rate": 1.1110133730521142e-05,
+ "loss": 0.9089,
+ "step": 2498
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9295096357487002,
+ "learning_rate": 1.11039399541527e-05,
+ "loss": 0.89,
+ "step": 2499
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9767473281952905,
+ "learning_rate": 1.10977457490267e-05,
+ "loss": 0.9199,
+ "step": 2500
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.822760323717349,
+ "learning_rate": 1.10915511175489e-05,
+ "loss": 0.9192,
+ "step": 2501
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9867890064903402,
+ "learning_rate": 1.1085356062125225e-05,
+ "loss": 0.9213,
+ "step": 2502
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9791577772382069,
+ "learning_rate": 1.1079160585161759e-05,
+ "loss": 0.9191,
+ "step": 2503
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9997639383262091,
+ "learning_rate": 1.107296468906476e-05,
+ "loss": 0.9277,
+ "step": 2504
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.7079840333825845,
+ "learning_rate": 1.106676837624064e-05,
+ "loss": 0.8225,
+ "step": 2505
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9006607326467191,
+ "learning_rate": 1.1060571649095972e-05,
+ "loss": 0.9296,
+ "step": 2506
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.8842182000095816,
+ "learning_rate": 1.10543745100375e-05,
+ "loss": 0.8679,
+ "step": 2507
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 1.1043890880491394,
+ "learning_rate": 1.1048176961472114e-05,
+ "loss": 0.9272,
+ "step": 2508
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9480601505781148,
+ "learning_rate": 1.1041979005806876e-05,
+ "loss": 0.9394,
+ "step": 2509
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 1.0075560449541396,
+ "learning_rate": 1.1035780645449001e-05,
+ "loss": 0.9319,
+ "step": 2510
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.8862951055447905,
+ "learning_rate": 1.1029581882805857e-05,
+ "loss": 0.9331,
+ "step": 2511
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.7870628169230721,
+ "learning_rate": 1.1023382720284973e-05,
+ "loss": 0.8908,
+ "step": 2512
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9570861992825871,
+ "learning_rate": 1.1017183160294033e-05,
+ "loss": 0.9369,
+ "step": 2513
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.961174346938788,
+ "learning_rate": 1.1010983205240878e-05,
+ "loss": 0.8855,
+ "step": 2514
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9144345625655905,
+ "learning_rate": 1.1004782857533488e-05,
+ "loss": 0.9364,
+ "step": 2515
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9584325102763894,
+ "learning_rate": 1.099858211958002e-05,
+ "loss": 0.9412,
+ "step": 2516
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9021245823914934,
+ "learning_rate": 1.0992380993788763e-05,
+ "loss": 0.8482,
+ "step": 2517
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 1.1147423454652747,
+ "learning_rate": 1.0986179482568162e-05,
+ "loss": 1.0222,
+ "step": 2518
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9486015078056638,
+ "learning_rate": 1.0979977588326815e-05,
+ "loss": 0.9276,
+ "step": 2519
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 1.1593612070416686,
+ "learning_rate": 1.0973775313473465e-05,
+ "loss": 0.9413,
+ "step": 2520
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8710061882469988,
+ "learning_rate": 1.0967572660417001e-05,
+ "loss": 0.8537,
+ "step": 2521
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8996327546711663,
+ "learning_rate": 1.0961369631566468e-05,
+ "loss": 0.9361,
+ "step": 2522
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8064475456226737,
+ "learning_rate": 1.0955166229331048e-05,
+ "loss": 0.9153,
+ "step": 2523
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8608021853676151,
+ "learning_rate": 1.0948962456120068e-05,
+ "loss": 0.9235,
+ "step": 2524
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8852260339823464,
+ "learning_rate": 1.0942758314343007e-05,
+ "loss": 0.9461,
+ "step": 2525
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8678316373227288,
+ "learning_rate": 1.0936553806409482e-05,
+ "loss": 0.8729,
+ "step": 2526
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8725130974425622,
+ "learning_rate": 1.0930348934729249e-05,
+ "loss": 0.9332,
+ "step": 2527
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8197278256630408,
+ "learning_rate": 1.0924143701712211e-05,
+ "loss": 0.9052,
+ "step": 2528
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9227071384165294,
+ "learning_rate": 1.0917938109768404e-05,
+ "loss": 0.8721,
+ "step": 2529
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9202310669956766,
+ "learning_rate": 1.0911732161308014e-05,
+ "loss": 0.8468,
+ "step": 2530
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9632191462715535,
+ "learning_rate": 1.0905525858741364e-05,
+ "loss": 0.9707,
+ "step": 2531
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 1.0183887138446068,
+ "learning_rate": 1.08993192044789e-05,
+ "loss": 0.9825,
+ "step": 2532
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 1.02742785347052,
+ "learning_rate": 1.089311220093122e-05,
+ "loss": 0.9257,
+ "step": 2533
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.7652254275329015,
+ "learning_rate": 1.0886904850509052e-05,
+ "loss": 0.8632,
+ "step": 2534
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9097855200707109,
+ "learning_rate": 1.0880697155623264e-05,
+ "loss": 0.9146,
+ "step": 2535
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.956441293096091,
+ "learning_rate": 1.0874489118684846e-05,
+ "loss": 0.9407,
+ "step": 2536
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8103964493668271,
+ "learning_rate": 1.086828074210493e-05,
+ "loss": 0.8975,
+ "step": 2537
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8857215415773282,
+ "learning_rate": 1.0862072028294777e-05,
+ "loss": 0.8588,
+ "step": 2538
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8916396986032709,
+ "learning_rate": 1.0855862979665788e-05,
+ "loss": 0.9125,
+ "step": 2539
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 1.0678840230011677,
+ "learning_rate": 1.0849653598629477e-05,
+ "loss": 0.9093,
+ "step": 2540
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.7773438338658273,
+ "learning_rate": 1.0843443887597495e-05,
+ "loss": 0.9155,
+ "step": 2541
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9101379389107885,
+ "learning_rate": 1.0837233848981632e-05,
+ "loss": 0.8771,
+ "step": 2542
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8538385117114854,
+ "learning_rate": 1.0831023485193787e-05,
+ "loss": 0.8299,
+ "step": 2543
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9030682333200006,
+ "learning_rate": 1.0824812798645997e-05,
+ "loss": 0.9705,
+ "step": 2544
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9288473783663039,
+ "learning_rate": 1.0818601791750418e-05,
+ "loss": 0.9488,
+ "step": 2545
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8950245714539972,
+ "learning_rate": 1.0812390466919337e-05,
+ "loss": 0.9328,
+ "step": 2546
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9564521059009605,
+ "learning_rate": 1.0806178826565162e-05,
+ "loss": 0.9276,
+ "step": 2547
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8761287097782974,
+ "learning_rate": 1.0799966873100419e-05,
+ "loss": 0.9244,
+ "step": 2548
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 1.0412983702717888,
+ "learning_rate": 1.0793754608937758e-05,
+ "loss": 0.9736,
+ "step": 2549
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9437424331698793,
+ "learning_rate": 1.0787542036489955e-05,
+ "loss": 0.9179,
+ "step": 2550
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.7804661163473227,
+ "learning_rate": 1.0781329158169902e-05,
+ "loss": 0.9121,
+ "step": 2551
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8831772952539662,
+ "learning_rate": 1.0775115976390607e-05,
+ "loss": 0.8432,
+ "step": 2552
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9938871865340106,
+ "learning_rate": 1.0768902493565197e-05,
+ "loss": 0.9433,
+ "step": 2553
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8329659565497897,
+ "learning_rate": 1.0762688712106918e-05,
+ "loss": 0.8953,
+ "step": 2554
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8616458461988764,
+ "learning_rate": 1.0756474634429133e-05,
+ "loss": 0.8472,
+ "step": 2555
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.749284698387687,
+ "learning_rate": 1.0750260262945314e-05,
+ "loss": 0.848,
+ "step": 2556
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.7900090795422248,
+ "learning_rate": 1.0744045600069055e-05,
+ "loss": 0.8288,
+ "step": 2557
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.90878870144412,
+ "learning_rate": 1.0737830648214063e-05,
+ "loss": 0.9224,
+ "step": 2558
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8691331482683546,
+ "learning_rate": 1.0731615409794144e-05,
+ "loss": 0.8839,
+ "step": 2559
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8732184108501752,
+ "learning_rate": 1.0725399887223234e-05,
+ "loss": 0.8877,
+ "step": 2560
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8291325762736458,
+ "learning_rate": 1.0719184082915364e-05,
+ "loss": 0.8705,
+ "step": 2561
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9860547678378869,
+ "learning_rate": 1.0712967999284682e-05,
+ "loss": 0.8344,
+ "step": 2562
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8049678356130826,
+ "learning_rate": 1.0706751638745448e-05,
+ "loss": 0.9237,
+ "step": 2563
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.7731868167259505,
+ "learning_rate": 1.0700535003712023e-05,
+ "loss": 0.8839,
+ "step": 2564
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.86604478722542,
+ "learning_rate": 1.069431809659887e-05,
+ "loss": 0.8888,
+ "step": 2565
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8355266940892986,
+ "learning_rate": 1.068810091982057e-05,
+ "loss": 0.8851,
+ "step": 2566
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8424596228485154,
+ "learning_rate": 1.0681883475791803e-05,
+ "loss": 0.8894,
+ "step": 2567
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9438666012713255,
+ "learning_rate": 1.067566576692735e-05,
+ "loss": 0.9061,
+ "step": 2568
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9137475284130079,
+ "learning_rate": 1.0669447795642103e-05,
+ "loss": 0.9658,
+ "step": 2569
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9177491280420521,
+ "learning_rate": 1.066322956435104e-05,
+ "loss": 0.9652,
+ "step": 2570
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8151440461337732,
+ "learning_rate": 1.065701107546926e-05,
+ "loss": 0.9404,
+ "step": 2571
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9517736721987609,
+ "learning_rate": 1.065079233141195e-05,
+ "loss": 0.9008,
+ "step": 2572
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9214817977511858,
+ "learning_rate": 1.0644573334594395e-05,
+ "loss": 0.9013,
+ "step": 2573
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9935522415670198,
+ "learning_rate": 1.0638354087431986e-05,
+ "loss": 0.9906,
+ "step": 2574
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.831657275252258,
+ "learning_rate": 1.0632134592340204e-05,
+ "loss": 0.8538,
+ "step": 2575
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9208221535422952,
+ "learning_rate": 1.0625914851734632e-05,
+ "loss": 0.9353,
+ "step": 2576
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.8362787872462575,
+ "learning_rate": 1.0619694868030943e-05,
+ "loss": 0.8935,
+ "step": 2577
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.8724694745513101,
+ "learning_rate": 1.0613474643644907e-05,
+ "loss": 0.942,
+ "step": 2578
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9772096354169073,
+ "learning_rate": 1.0607254180992391e-05,
+ "loss": 0.9321,
+ "step": 2579
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.8178722945247205,
+ "learning_rate": 1.0601033482489346e-05,
+ "loss": 0.9227,
+ "step": 2580
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.7938438545130856,
+ "learning_rate": 1.0594812550551826e-05,
+ "loss": 0.8659,
+ "step": 2581
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9769194053293457,
+ "learning_rate": 1.058859138759596e-05,
+ "loss": 0.9612,
+ "step": 2582
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9496683326132712,
+ "learning_rate": 1.0582369996037985e-05,
+ "loss": 0.9323,
+ "step": 2583
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.8764871153485599,
+ "learning_rate": 1.0576148378294213e-05,
+ "loss": 0.8985,
+ "step": 2584
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9069334600949017,
+ "learning_rate": 1.056992653678105e-05,
+ "loss": 0.8981,
+ "step": 2585
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9099650409820482,
+ "learning_rate": 1.0563704473914986e-05,
+ "loss": 0.9368,
+ "step": 2586
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9646440392923197,
+ "learning_rate": 1.0557482192112603e-05,
+ "loss": 0.9668,
+ "step": 2587
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9896058753441028,
+ "learning_rate": 1.0551259693790556e-05,
+ "loss": 0.913,
+ "step": 2588
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9437713256693829,
+ "learning_rate": 1.0545036981365601e-05,
+ "loss": 0.988,
+ "step": 2589
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.8363912318332196,
+ "learning_rate": 1.053881405725456e-05,
+ "loss": 0.8804,
+ "step": 2590
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.8074888738043394,
+ "learning_rate": 1.0532590923874349e-05,
+ "loss": 0.8875,
+ "step": 2591
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.7917758946751435,
+ "learning_rate": 1.0526367583641958e-05,
+ "loss": 0.9155,
+ "step": 2592
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 1.0004308869341767,
+ "learning_rate": 1.0520144038974468e-05,
+ "loss": 0.9115,
+ "step": 2593
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9334426047016258,
+ "learning_rate": 1.0513920292289021e-05,
+ "loss": 0.9215,
+ "step": 2594
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.971669182888355,
+ "learning_rate": 1.0507696346002857e-05,
+ "loss": 1.002,
+ "step": 2595
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9048044665335145,
+ "learning_rate": 1.0501472202533285e-05,
+ "loss": 0.9214,
+ "step": 2596
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9450349281998583,
+ "learning_rate": 1.0495247864297684e-05,
+ "loss": 0.8992,
+ "step": 2597
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.7847922346316105,
+ "learning_rate": 1.0489023333713522e-05,
+ "loss": 0.8048,
+ "step": 2598
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.79524184009825,
+ "learning_rate": 1.0482798613198328e-05,
+ "loss": 0.8964,
+ "step": 2599
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9524976603435144,
+ "learning_rate": 1.047657370516972e-05,
+ "loss": 0.9111,
+ "step": 2600
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9081215324462001,
+ "learning_rate": 1.0470348612045376e-05,
+ "loss": 0.949,
+ "step": 2601
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9045225841688138,
+ "learning_rate": 1.0464123336243049e-05,
+ "loss": 0.8287,
+ "step": 2602
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.8380398750759631,
+ "learning_rate": 1.0457897880180566e-05,
+ "loss": 0.9399,
+ "step": 2603
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9087598109748737,
+ "learning_rate": 1.0451672246275826e-05,
+ "loss": 0.9585,
+ "step": 2604
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.8496588018443858,
+ "learning_rate": 1.0445446436946788e-05,
+ "loss": 0.8778,
+ "step": 2605
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.8673556842396951,
+ "learning_rate": 1.0439220454611486e-05,
+ "loss": 0.916,
+ "step": 2606
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9558596086881778,
+ "learning_rate": 1.0432994301688021e-05,
+ "loss": 0.9003,
+ "step": 2607
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.8932203872619231,
+ "learning_rate": 1.0426767980594559e-05,
+ "loss": 0.9011,
+ "step": 2608
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.8709678413237031,
+ "learning_rate": 1.0420541493749332e-05,
+ "loss": 0.9188,
+ "step": 2609
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9443307261781778,
+ "learning_rate": 1.0414314843570634e-05,
+ "loss": 0.9224,
+ "step": 2610
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.8442843612430706,
+ "learning_rate": 1.0408088032476822e-05,
+ "loss": 0.9342,
+ "step": 2611
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9057892841452813,
+ "learning_rate": 1.0401861062886324e-05,
+ "loss": 0.9421,
+ "step": 2612
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.95722829726723,
+ "learning_rate": 1.0395633937217622e-05,
+ "loss": 0.9237,
+ "step": 2613
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.887245273579422,
+ "learning_rate": 1.0389406657889254e-05,
+ "loss": 0.8691,
+ "step": 2614
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9952294014524894,
+ "learning_rate": 1.0383179227319826e-05,
+ "loss": 0.9472,
+ "step": 2615
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9183848153399197,
+ "learning_rate": 1.0376951647928007e-05,
+ "loss": 0.9701,
+ "step": 2616
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9202787300400367,
+ "learning_rate": 1.0370723922132506e-05,
+ "loss": 0.9518,
+ "step": 2617
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 1.1518615287501688,
+ "learning_rate": 1.036449605235211e-05,
+ "loss": 0.9557,
+ "step": 2618
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9091438334788177,
+ "learning_rate": 1.0358268041005644e-05,
+ "loss": 0.8786,
+ "step": 2619
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9425613409631651,
+ "learning_rate": 1.0352039890511997e-05,
+ "loss": 0.9225,
+ "step": 2620
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9035868714083709,
+ "learning_rate": 1.034581160329012e-05,
+ "loss": 0.8795,
+ "step": 2621
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.969566685701797,
+ "learning_rate": 1.0339583181758997e-05,
+ "loss": 0.9597,
+ "step": 2622
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.7752328599278581,
+ "learning_rate": 1.033335462833768e-05,
+ "loss": 0.8672,
+ "step": 2623
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.894538400316396,
+ "learning_rate": 1.0327125945445265e-05,
+ "loss": 0.923,
+ "step": 2624
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 1.0020503476584997,
+ "learning_rate": 1.0320897135500904e-05,
+ "loss": 0.8843,
+ "step": 2625
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.9762706053326204,
+ "learning_rate": 1.0314668200923791e-05,
+ "loss": 0.8628,
+ "step": 2626
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.7306092357943745,
+ "learning_rate": 1.0308439144133177e-05,
+ "loss": 0.8269,
+ "step": 2627
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8450381002190108,
+ "learning_rate": 1.0302209967548354e-05,
+ "loss": 0.8759,
+ "step": 2628
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 1.0484126440621628,
+ "learning_rate": 1.029598067358866e-05,
+ "loss": 0.9987,
+ "step": 2629
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 1.0162609495770896,
+ "learning_rate": 1.0289751264673485e-05,
+ "loss": 0.9406,
+ "step": 2630
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8857328642856026,
+ "learning_rate": 1.0283521743222256e-05,
+ "loss": 0.8905,
+ "step": 2631
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.9613453171218498,
+ "learning_rate": 1.0277292111654447e-05,
+ "loss": 0.8706,
+ "step": 2632
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.9585597791516702,
+ "learning_rate": 1.0271062372389582e-05,
+ "loss": 0.9398,
+ "step": 2633
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 1.02185310005277,
+ "learning_rate": 1.0264832527847212e-05,
+ "loss": 1.015,
+ "step": 2634
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8506999074950125,
+ "learning_rate": 1.0258602580446941e-05,
+ "loss": 0.9413,
+ "step": 2635
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.7756019521705919,
+ "learning_rate": 1.0252372532608405e-05,
+ "loss": 0.7947,
+ "step": 2636
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8242268732820904,
+ "learning_rate": 1.024614238675129e-05,
+ "loss": 0.8543,
+ "step": 2637
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.9557258859558818,
+ "learning_rate": 1.0239912145295303e-05,
+ "loss": 0.9363,
+ "step": 2638
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.885606252981095,
+ "learning_rate": 1.0233681810660207e-05,
+ "loss": 0.9005,
+ "step": 2639
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.87261949669791,
+ "learning_rate": 1.0227451385265788e-05,
+ "loss": 0.9026,
+ "step": 2640
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8351181251793705,
+ "learning_rate": 1.022122087153187e-05,
+ "loss": 0.9176,
+ "step": 2641
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 1.0067856604494778,
+ "learning_rate": 1.0214990271878319e-05,
+ "loss": 0.9134,
+ "step": 2642
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.910586530408183,
+ "learning_rate": 1.0208759588725016e-05,
+ "loss": 0.9316,
+ "step": 2643
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8404782529309832,
+ "learning_rate": 1.0202528824491899e-05,
+ "loss": 0.8693,
+ "step": 2644
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.9355122862456461,
+ "learning_rate": 1.0196297981598921e-05,
+ "loss": 0.9204,
+ "step": 2645
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.874851430526491,
+ "learning_rate": 1.019006706246607e-05,
+ "loss": 0.913,
+ "step": 2646
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.9123862236178606,
+ "learning_rate": 1.018383606951336e-05,
+ "loss": 0.8558,
+ "step": 2647
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8083236091623784,
+ "learning_rate": 1.0177605005160837e-05,
+ "loss": 0.8599,
+ "step": 2648
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.7662563643188285,
+ "learning_rate": 1.0171373871828578e-05,
+ "loss": 0.85,
+ "step": 2649
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8626881415928633,
+ "learning_rate": 1.0165142671936685e-05,
+ "loss": 0.8551,
+ "step": 2650
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.7521555912863335,
+ "learning_rate": 1.0158911407905279e-05,
+ "loss": 0.8779,
+ "step": 2651
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.852450834594269,
+ "learning_rate": 1.0152680082154514e-05,
+ "loss": 0.9091,
+ "step": 2652
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8607550615525535,
+ "learning_rate": 1.0146448697104561e-05,
+ "loss": 0.8538,
+ "step": 2653
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.9225008247575006,
+ "learning_rate": 1.0140217255175626e-05,
+ "loss": 0.9383,
+ "step": 2654
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.9667517957388785,
+ "learning_rate": 1.013398575878792e-05,
+ "loss": 0.9425,
+ "step": 2655
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.9444714811857227,
+ "learning_rate": 1.0127754210361694e-05,
+ "loss": 0.9294,
+ "step": 2656
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.9642924515137425,
+ "learning_rate": 1.0121522612317204e-05,
+ "loss": 0.9386,
+ "step": 2657
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8077968807746465,
+ "learning_rate": 1.011529096707473e-05,
+ "loss": 0.8755,
+ "step": 2658
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 1.018675145764501,
+ "learning_rate": 1.0109059277054574e-05,
+ "loss": 0.8789,
+ "step": 2659
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8959489891860603,
+ "learning_rate": 1.010282754467705e-05,
+ "loss": 0.9555,
+ "step": 2660
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 1.0289885314244605,
+ "learning_rate": 1.0096595772362492e-05,
+ "loss": 0.9455,
+ "step": 2661
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8010121594109141,
+ "learning_rate": 1.0090363962531251e-05,
+ "loss": 0.8189,
+ "step": 2662
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8850716538987777,
+ "learning_rate": 1.0084132117603689e-05,
+ "loss": 0.8895,
+ "step": 2663
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8146922307406169,
+ "learning_rate": 1.0077900240000181e-05,
+ "loss": 0.9185,
+ "step": 2664
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.9555548419652233,
+ "learning_rate": 1.0071668332141115e-05,
+ "loss": 0.9544,
+ "step": 2665
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8097655953213143,
+ "learning_rate": 1.0065436396446899e-05,
+ "loss": 0.8509,
+ "step": 2666
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8715681140386908,
+ "learning_rate": 1.0059204435337938e-05,
+ "loss": 0.938,
+ "step": 2667
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.9550607483205545,
+ "learning_rate": 1.0052972451234656e-05,
+ "loss": 0.9438,
+ "step": 2668
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.7403061380300183,
+ "learning_rate": 1.0046740446557485e-05,
+ "loss": 0.845,
+ "step": 2669
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8548122937206981,
+ "learning_rate": 1.0040508423726865e-05,
+ "loss": 0.9133,
+ "step": 2670
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.822985306694666,
+ "learning_rate": 1.0034276385163238e-05,
+ "loss": 0.8613,
+ "step": 2671
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.9792028962147389,
+ "learning_rate": 1.0028044333287056e-05,
+ "loss": 0.9516,
+ "step": 2672
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 1.0057001649907389,
+ "learning_rate": 1.002181227051878e-05,
+ "loss": 0.9627,
+ "step": 2673
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8175201200990784,
+ "learning_rate": 1.0015580199278873e-05,
+ "loss": 0.889,
+ "step": 2674
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.9916316391604548,
+ "learning_rate": 1.0009348121987795e-05,
+ "loss": 0.9594,
+ "step": 2675
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8684745974465504,
+ "learning_rate": 1.000311604106601e-05,
+ "loss": 0.9809,
+ "step": 2676
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9155440255234769,
+ "learning_rate": 9.996883958933993e-06,
+ "loss": 0.9064,
+ "step": 2677
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.8640568678762447,
+ "learning_rate": 9.99065187801221e-06,
+ "loss": 0.8847,
+ "step": 2678
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9504764124917944,
+ "learning_rate": 9.984419800721132e-06,
+ "loss": 0.9874,
+ "step": 2679
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.968686441097706,
+ "learning_rate": 9.978187729481218e-06,
+ "loss": 0.9961,
+ "step": 2680
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 1.097545469495621,
+ "learning_rate": 9.971955666712945e-06,
+ "loss": 0.8897,
+ "step": 2681
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9265533664930266,
+ "learning_rate": 9.965723614836764e-06,
+ "loss": 0.8999,
+ "step": 2682
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.8529524853563816,
+ "learning_rate": 9.959491576273139e-06,
+ "loss": 0.9192,
+ "step": 2683
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 1.0370520750567518,
+ "learning_rate": 9.95325955344252e-06,
+ "loss": 0.9016,
+ "step": 2684
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9480273223579014,
+ "learning_rate": 9.947027548765347e-06,
+ "loss": 0.8892,
+ "step": 2685
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.8972914419458259,
+ "learning_rate": 9.940795564662064e-06,
+ "loss": 0.9388,
+ "step": 2686
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 1.133889974782131,
+ "learning_rate": 9.934563603553103e-06,
+ "loss": 0.8861,
+ "step": 2687
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.7846840406978326,
+ "learning_rate": 9.928331667858886e-06,
+ "loss": 0.9225,
+ "step": 2688
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9160207276009006,
+ "learning_rate": 9.922099759999822e-06,
+ "loss": 0.8547,
+ "step": 2689
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.8970412714530798,
+ "learning_rate": 9.915867882396314e-06,
+ "loss": 0.9017,
+ "step": 2690
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9819606853619415,
+ "learning_rate": 9.909636037468754e-06,
+ "loss": 0.8598,
+ "step": 2691
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.959621348155528,
+ "learning_rate": 9.90340422763751e-06,
+ "loss": 0.9305,
+ "step": 2692
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 1.0056031158774708,
+ "learning_rate": 9.897172455322953e-06,
+ "loss": 0.8966,
+ "step": 2693
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.933979224935091,
+ "learning_rate": 9.890940722945429e-06,
+ "loss": 0.9015,
+ "step": 2694
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 1.111724034217747,
+ "learning_rate": 9.884709032925274e-06,
+ "loss": 0.8763,
+ "step": 2695
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 1.035387498738903,
+ "learning_rate": 9.878477387682801e-06,
+ "loss": 0.9129,
+ "step": 2696
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9428862283312817,
+ "learning_rate": 9.872245789638308e-06,
+ "loss": 0.8948,
+ "step": 2697
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9925589116215584,
+ "learning_rate": 9.866014241212078e-06,
+ "loss": 0.9153,
+ "step": 2698
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9622771768642312,
+ "learning_rate": 9.859782744824376e-06,
+ "loss": 0.8814,
+ "step": 2699
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9304872266758989,
+ "learning_rate": 9.85355130289544e-06,
+ "loss": 0.961,
+ "step": 2700
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.8932655089327036,
+ "learning_rate": 9.84731991784549e-06,
+ "loss": 0.8428,
+ "step": 2701
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.8086636779098098,
+ "learning_rate": 9.841088592094726e-06,
+ "loss": 0.8532,
+ "step": 2702
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.8452967875903474,
+ "learning_rate": 9.834857328063316e-06,
+ "loss": 0.9471,
+ "step": 2703
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9285138349230967,
+ "learning_rate": 9.828626128171422e-06,
+ "loss": 0.9679,
+ "step": 2704
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.7850938005307868,
+ "learning_rate": 9.822394994839164e-06,
+ "loss": 0.8158,
+ "step": 2705
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9142247032151152,
+ "learning_rate": 9.816163930486643e-06,
+ "loss": 0.9181,
+ "step": 2706
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.824365020661094,
+ "learning_rate": 9.809932937533935e-06,
+ "loss": 0.9214,
+ "step": 2707
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.7450186719300673,
+ "learning_rate": 9.803702018401084e-06,
+ "loss": 0.8249,
+ "step": 2708
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9515052961042607,
+ "learning_rate": 9.797471175508101e-06,
+ "loss": 0.8825,
+ "step": 2709
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.924805724255183,
+ "learning_rate": 9.791240411274982e-06,
+ "loss": 0.9015,
+ "step": 2710
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 1.175898803493173,
+ "learning_rate": 9.785009728121686e-06,
+ "loss": 0.9106,
+ "step": 2711
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9479481777049079,
+ "learning_rate": 9.778779128468133e-06,
+ "loss": 0.9555,
+ "step": 2712
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 1.0911913695730804,
+ "learning_rate": 9.772548614734217e-06,
+ "loss": 0.9524,
+ "step": 2713
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.87388552824401,
+ "learning_rate": 9.766318189339798e-06,
+ "loss": 0.846,
+ "step": 2714
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9989366645175373,
+ "learning_rate": 9.760087854704697e-06,
+ "loss": 0.9321,
+ "step": 2715
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.8908420957462533,
+ "learning_rate": 9.753857613248714e-06,
+ "loss": 0.877,
+ "step": 2716
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.8544682850900619,
+ "learning_rate": 9.747627467391596e-06,
+ "loss": 0.9285,
+ "step": 2717
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 1.0050333585009792,
+ "learning_rate": 9.741397419553062e-06,
+ "loss": 0.9874,
+ "step": 2718
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.8338996086996195,
+ "learning_rate": 9.735167472152793e-06,
+ "loss": 0.8951,
+ "step": 2719
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.957417706634571,
+ "learning_rate": 9.728937627610425e-06,
+ "loss": 0.9587,
+ "step": 2720
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.7800515164985007,
+ "learning_rate": 9.722707888345553e-06,
+ "loss": 0.8651,
+ "step": 2721
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.8038472144653551,
+ "learning_rate": 9.716478256777749e-06,
+ "loss": 0.8878,
+ "step": 2722
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.912356367201173,
+ "learning_rate": 9.710248735326519e-06,
+ "loss": 0.9643,
+ "step": 2723
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9406352227053918,
+ "learning_rate": 9.704019326411344e-06,
+ "loss": 0.9287,
+ "step": 2724
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.8698506239978246,
+ "learning_rate": 9.697790032451651e-06,
+ "loss": 0.8895,
+ "step": 2725
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 1.0442017747831227,
+ "learning_rate": 9.691560855866826e-06,
+ "loss": 0.9219,
+ "step": 2726
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9693225905602304,
+ "learning_rate": 9.685331799076208e-06,
+ "loss": 0.9459,
+ "step": 2727
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9954073787456973,
+ "learning_rate": 9.6791028644991e-06,
+ "loss": 0.9448,
+ "step": 2728
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.779641152911498,
+ "learning_rate": 9.672874054554738e-06,
+ "loss": 0.8448,
+ "step": 2729
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8927948634318709,
+ "learning_rate": 9.666645371662324e-06,
+ "loss": 0.9204,
+ "step": 2730
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9038143111408246,
+ "learning_rate": 9.660416818241007e-06,
+ "loss": 0.9044,
+ "step": 2731
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9075975682196729,
+ "learning_rate": 9.654188396709882e-06,
+ "loss": 0.8678,
+ "step": 2732
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9432930039421822,
+ "learning_rate": 9.647960109488003e-06,
+ "loss": 0.8383,
+ "step": 2733
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8710647976986235,
+ "learning_rate": 9.64173195899436e-06,
+ "loss": 0.9381,
+ "step": 2734
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9918346181120583,
+ "learning_rate": 9.635503947647894e-06,
+ "loss": 0.8816,
+ "step": 2735
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9295134401466785,
+ "learning_rate": 9.629276077867497e-06,
+ "loss": 0.9255,
+ "step": 2736
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9859454340264127,
+ "learning_rate": 9.623048352071998e-06,
+ "loss": 0.935,
+ "step": 2737
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 1.013292235329241,
+ "learning_rate": 9.616820772680174e-06,
+ "loss": 0.9863,
+ "step": 2738
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 1.120477410813826,
+ "learning_rate": 9.610593342110746e-06,
+ "loss": 0.9324,
+ "step": 2739
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9193687922594248,
+ "learning_rate": 9.604366062782381e-06,
+ "loss": 0.8787,
+ "step": 2740
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9159267693554459,
+ "learning_rate": 9.598138937113677e-06,
+ "loss": 0.908,
+ "step": 2741
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9580787692929643,
+ "learning_rate": 9.59191196752318e-06,
+ "loss": 1.0209,
+ "step": 2742
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8277073582461278,
+ "learning_rate": 9.58568515642937e-06,
+ "loss": 0.933,
+ "step": 2743
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8244172635159756,
+ "learning_rate": 9.579458506250668e-06,
+ "loss": 0.936,
+ "step": 2744
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8795723489093242,
+ "learning_rate": 9.573232019405441e-06,
+ "loss": 0.9406,
+ "step": 2745
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9621544806179235,
+ "learning_rate": 9.567005698311982e-06,
+ "loss": 1.0185,
+ "step": 2746
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8621218780247002,
+ "learning_rate": 9.560779545388517e-06,
+ "loss": 0.8546,
+ "step": 2747
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.966305821392136,
+ "learning_rate": 9.554553563053217e-06,
+ "loss": 0.9355,
+ "step": 2748
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8442369405063994,
+ "learning_rate": 9.548327753724181e-06,
+ "loss": 0.8634,
+ "step": 2749
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8642247794460316,
+ "learning_rate": 9.542102119819436e-06,
+ "loss": 0.9376,
+ "step": 2750
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.893929431327071,
+ "learning_rate": 9.535876663756955e-06,
+ "loss": 0.9199,
+ "step": 2751
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8582812650798772,
+ "learning_rate": 9.529651387954628e-06,
+ "loss": 0.8884,
+ "step": 2752
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9921949317796473,
+ "learning_rate": 9.523426294830284e-06,
+ "loss": 0.9579,
+ "step": 2753
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.869543213774848,
+ "learning_rate": 9.517201386801675e-06,
+ "loss": 0.8638,
+ "step": 2754
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9393754312841202,
+ "learning_rate": 9.510976666286484e-06,
+ "loss": 0.9272,
+ "step": 2755
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9466622229984474,
+ "learning_rate": 9.504752135702318e-06,
+ "loss": 0.9234,
+ "step": 2756
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8885316798429939,
+ "learning_rate": 9.498527797466718e-06,
+ "loss": 0.9864,
+ "step": 2757
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8782502025304176,
+ "learning_rate": 9.492303653997146e-06,
+ "loss": 0.9173,
+ "step": 2758
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9152491284120332,
+ "learning_rate": 9.48607970771098e-06,
+ "loss": 0.9389,
+ "step": 2759
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.832979471232123,
+ "learning_rate": 9.479855961025538e-06,
+ "loss": 0.8709,
+ "step": 2760
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 1.186545412621472,
+ "learning_rate": 9.473632416358045e-06,
+ "loss": 0.9639,
+ "step": 2761
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8884032453897417,
+ "learning_rate": 9.467409076125653e-06,
+ "loss": 0.847,
+ "step": 2762
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8006699169210758,
+ "learning_rate": 9.461185942745443e-06,
+ "loss": 0.8459,
+ "step": 2763
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.7766186551333335,
+ "learning_rate": 9.454963018634402e-06,
+ "loss": 0.8324,
+ "step": 2764
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9237476035546434,
+ "learning_rate": 9.448740306209447e-06,
+ "loss": 0.9729,
+ "step": 2765
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 1.130076465150555,
+ "learning_rate": 9.442517807887402e-06,
+ "loss": 0.9291,
+ "step": 2766
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9274629901179156,
+ "learning_rate": 9.436295526085016e-06,
+ "loss": 0.8956,
+ "step": 2767
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9466712361262105,
+ "learning_rate": 9.430073463218952e-06,
+ "loss": 0.8847,
+ "step": 2768
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9150813412542628,
+ "learning_rate": 9.423851621705789e-06,
+ "loss": 0.9804,
+ "step": 2769
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8742599731983802,
+ "learning_rate": 9.41763000396202e-06,
+ "loss": 0.8676,
+ "step": 2770
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9846363309688085,
+ "learning_rate": 9.411408612404043e-06,
+ "loss": 0.9437,
+ "step": 2771
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9531542175536194,
+ "learning_rate": 9.40518744944818e-06,
+ "loss": 0.9508,
+ "step": 2772
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8997787513870119,
+ "learning_rate": 9.398966517510654e-06,
+ "loss": 0.9235,
+ "step": 2773
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.664104262609531,
+ "learning_rate": 9.39274581900761e-06,
+ "loss": 0.8186,
+ "step": 2774
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9201932350458625,
+ "learning_rate": 9.386525356355095e-06,
+ "loss": 0.8796,
+ "step": 2775
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9402996802264322,
+ "learning_rate": 9.380305131969059e-06,
+ "loss": 0.9598,
+ "step": 2776
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8955461641801926,
+ "learning_rate": 9.374085148265372e-06,
+ "loss": 0.9106,
+ "step": 2777
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 1.0874745595922153,
+ "learning_rate": 9.3678654076598e-06,
+ "loss": 0.9892,
+ "step": 2778
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.903339872921699,
+ "learning_rate": 9.361645912568015e-06,
+ "loss": 0.8753,
+ "step": 2779
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9904002465946561,
+ "learning_rate": 9.355426665405607e-06,
+ "loss": 0.9402,
+ "step": 2780
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8469067459385102,
+ "learning_rate": 9.349207668588053e-06,
+ "loss": 0.8425,
+ "step": 2781
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 1.0114768402522494,
+ "learning_rate": 9.342988924530742e-06,
+ "loss": 0.9161,
+ "step": 2782
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9103410534742161,
+ "learning_rate": 9.336770435648963e-06,
+ "loss": 0.9082,
+ "step": 2783
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9699837672709782,
+ "learning_rate": 9.330552204357904e-06,
+ "loss": 0.9396,
+ "step": 2784
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8517617486881591,
+ "learning_rate": 9.32433423307265e-06,
+ "loss": 0.8949,
+ "step": 2785
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8384103194268664,
+ "learning_rate": 9.318116524208198e-06,
+ "loss": 0.964,
+ "step": 2786
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8258252256435029,
+ "learning_rate": 9.311899080179433e-06,
+ "loss": 0.8494,
+ "step": 2787
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9347155651432546,
+ "learning_rate": 9.305681903401133e-06,
+ "loss": 0.8921,
+ "step": 2788
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8955426782634741,
+ "learning_rate": 9.299464996287984e-06,
+ "loss": 0.8505,
+ "step": 2789
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8799098838384357,
+ "learning_rate": 9.293248361254557e-06,
+ "loss": 0.9311,
+ "step": 2790
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9514760362361673,
+ "learning_rate": 9.287032000715318e-06,
+ "loss": 0.8585,
+ "step": 2791
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 1.2196833037454613,
+ "learning_rate": 9.28081591708464e-06,
+ "loss": 0.8586,
+ "step": 2792
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8707921410483079,
+ "learning_rate": 9.27460011277677e-06,
+ "loss": 0.9048,
+ "step": 2793
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9470024633107743,
+ "learning_rate": 9.268384590205858e-06,
+ "loss": 0.9016,
+ "step": 2794
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 1.230101597217662,
+ "learning_rate": 9.262169351785944e-06,
+ "loss": 0.9577,
+ "step": 2795
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.7966496630210943,
+ "learning_rate": 9.255954399930948e-06,
+ "loss": 0.8503,
+ "step": 2796
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.960131248334068,
+ "learning_rate": 9.249739737054686e-06,
+ "loss": 0.9492,
+ "step": 2797
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 1.1034537795974488,
+ "learning_rate": 9.24352536557087e-06,
+ "loss": 0.9407,
+ "step": 2798
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9650956271049898,
+ "learning_rate": 9.237311287893086e-06,
+ "loss": 0.9301,
+ "step": 2799
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9277231690081373,
+ "learning_rate": 9.231097506434808e-06,
+ "loss": 0.8886,
+ "step": 2800
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9053117564958411,
+ "learning_rate": 9.224884023609398e-06,
+ "loss": 0.914,
+ "step": 2801
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8328271726723444,
+ "learning_rate": 9.218670841830098e-06,
+ "loss": 0.8446,
+ "step": 2802
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 1.0950860824950357,
+ "learning_rate": 9.212457963510045e-06,
+ "loss": 0.9847,
+ "step": 2803
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9108256159980279,
+ "learning_rate": 9.206245391062243e-06,
+ "loss": 0.9718,
+ "step": 2804
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8408923512843837,
+ "learning_rate": 9.200033126899585e-06,
+ "loss": 0.9097,
+ "step": 2805
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9072157323548091,
+ "learning_rate": 9.193821173434843e-06,
+ "loss": 0.9807,
+ "step": 2806
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.794629396119892,
+ "learning_rate": 9.187609533080668e-06,
+ "loss": 0.8434,
+ "step": 2807
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.881701854743868,
+ "learning_rate": 9.181398208249583e-06,
+ "loss": 0.9036,
+ "step": 2808
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 1.0559950308849337,
+ "learning_rate": 9.175187201354005e-06,
+ "loss": 0.9313,
+ "step": 2809
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8293751570171252,
+ "learning_rate": 9.168976514806216e-06,
+ "loss": 0.86,
+ "step": 2810
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9534488534755416,
+ "learning_rate": 9.162766151018372e-06,
+ "loss": 0.9765,
+ "step": 2811
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8627872331921922,
+ "learning_rate": 9.156556112402508e-06,
+ "loss": 0.9373,
+ "step": 2812
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9603261805794119,
+ "learning_rate": 9.150346401370528e-06,
+ "loss": 0.9306,
+ "step": 2813
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.7963613769807456,
+ "learning_rate": 9.144137020334214e-06,
+ "loss": 0.895,
+ "step": 2814
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9154259134690524,
+ "learning_rate": 9.137927971705223e-06,
+ "loss": 0.922,
+ "step": 2815
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9419590483955624,
+ "learning_rate": 9.131719257895074e-06,
+ "loss": 0.9185,
+ "step": 2816
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9121695497112485,
+ "learning_rate": 9.125510881315159e-06,
+ "loss": 0.835,
+ "step": 2817
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9862344867334731,
+ "learning_rate": 9.119302844376741e-06,
+ "loss": 0.8965,
+ "step": 2818
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 1.0839036206932122,
+ "learning_rate": 9.113095149490951e-06,
+ "loss": 0.9146,
+ "step": 2819
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8693606257602265,
+ "learning_rate": 9.106887799068782e-06,
+ "loss": 0.9378,
+ "step": 2820
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9156228829443188,
+ "learning_rate": 9.100680795521104e-06,
+ "loss": 0.9269,
+ "step": 2821
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.7762666934128619,
+ "learning_rate": 9.09447414125864e-06,
+ "loss": 0.7938,
+ "step": 2822
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 1.0825736731745133,
+ "learning_rate": 9.088267838691987e-06,
+ "loss": 0.8761,
+ "step": 2823
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8753577519078445,
+ "learning_rate": 9.0820618902316e-06,
+ "loss": 0.8891,
+ "step": 2824
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8354771501831556,
+ "learning_rate": 9.075856298287796e-06,
+ "loss": 0.8822,
+ "step": 2825
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 1.119267435279118,
+ "learning_rate": 9.069651065270753e-06,
+ "loss": 0.9486,
+ "step": 2826
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.93763987645099,
+ "learning_rate": 9.06344619359052e-06,
+ "loss": 0.9732,
+ "step": 2827
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9530326312872509,
+ "learning_rate": 9.057241685656995e-06,
+ "loss": 0.9508,
+ "step": 2828
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9085354026670516,
+ "learning_rate": 9.051037543879933e-06,
+ "loss": 0.8433,
+ "step": 2829
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8443760752540921,
+ "learning_rate": 9.044833770668957e-06,
+ "loss": 0.8847,
+ "step": 2830
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 1.048973249716788,
+ "learning_rate": 9.038630368433537e-06,
+ "loss": 0.8257,
+ "step": 2831
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 1.179033435596135,
+ "learning_rate": 9.032427339583e-06,
+ "loss": 0.9361,
+ "step": 2832
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8288263919061417,
+ "learning_rate": 9.026224686526539e-06,
+ "loss": 0.9023,
+ "step": 2833
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 1.0334949608567428,
+ "learning_rate": 9.020022411673186e-06,
+ "loss": 0.9181,
+ "step": 2834
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9239114578599628,
+ "learning_rate": 9.013820517431841e-06,
+ "loss": 0.8856,
+ "step": 2835
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9794833723909543,
+ "learning_rate": 9.00761900621124e-06,
+ "loss": 0.8985,
+ "step": 2836
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8084656437114732,
+ "learning_rate": 9.00141788041998e-06,
+ "loss": 0.8729,
+ "step": 2837
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9206779727809926,
+ "learning_rate": 8.99521714246651e-06,
+ "loss": 0.9039,
+ "step": 2838
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9466698208250063,
+ "learning_rate": 8.989016794759127e-06,
+ "loss": 0.947,
+ "step": 2839
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8747635898786159,
+ "learning_rate": 8.98281683970597e-06,
+ "loss": 0.9588,
+ "step": 2840
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9499809821242415,
+ "learning_rate": 8.97661727971503e-06,
+ "loss": 0.9875,
+ "step": 2841
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9813451295934587,
+ "learning_rate": 8.970418117194146e-06,
+ "loss": 0.9868,
+ "step": 2842
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8319625168492422,
+ "learning_rate": 8.964219354550999e-06,
+ "loss": 0.8635,
+ "step": 2843
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9634967464584698,
+ "learning_rate": 8.958020994193124e-06,
+ "loss": 0.9198,
+ "step": 2844
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8756053522366924,
+ "learning_rate": 8.951823038527887e-06,
+ "loss": 0.8431,
+ "step": 2845
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.813234190508048,
+ "learning_rate": 8.945625489962503e-06,
+ "loss": 0.9237,
+ "step": 2846
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8894422679044989,
+ "learning_rate": 8.93942835090403e-06,
+ "loss": 0.8921,
+ "step": 2847
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9217423883117492,
+ "learning_rate": 8.933231623759365e-06,
+ "loss": 0.9487,
+ "step": 2848
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9146171666403465,
+ "learning_rate": 8.927035310935241e-06,
+ "loss": 0.9024,
+ "step": 2849
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8417513080210457,
+ "learning_rate": 8.920839414838243e-06,
+ "loss": 0.9186,
+ "step": 2850
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 1.2267848524369835,
+ "learning_rate": 8.914643937874778e-06,
+ "loss": 0.9407,
+ "step": 2851
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8389995837398306,
+ "learning_rate": 8.908448882451104e-06,
+ "loss": 0.8918,
+ "step": 2852
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9433767153496767,
+ "learning_rate": 8.902254250973306e-06,
+ "loss": 0.9566,
+ "step": 2853
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9389439152504334,
+ "learning_rate": 8.896060045847305e-06,
+ "loss": 0.9269,
+ "step": 2854
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.844402795827549,
+ "learning_rate": 8.88986626947886e-06,
+ "loss": 0.8804,
+ "step": 2855
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 1.0600064630310688,
+ "learning_rate": 8.883672924273566e-06,
+ "loss": 0.9598,
+ "step": 2856
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.6784829967907909,
+ "learning_rate": 8.877480012636847e-06,
+ "loss": 0.7513,
+ "step": 2857
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 1.0334544574713502,
+ "learning_rate": 8.871287536973954e-06,
+ "loss": 0.8995,
+ "step": 2858
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8453952904110601,
+ "learning_rate": 8.865095499689978e-06,
+ "loss": 0.8863,
+ "step": 2859
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8539837508497696,
+ "learning_rate": 8.85890390318983e-06,
+ "loss": 0.9112,
+ "step": 2860
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8926906415651625,
+ "learning_rate": 8.852712749878255e-06,
+ "loss": 0.897,
+ "step": 2861
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9204825004812238,
+ "learning_rate": 8.846522042159833e-06,
+ "loss": 0.9986,
+ "step": 2862
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.744174296794978,
+ "learning_rate": 8.840331782438954e-06,
+ "loss": 0.7873,
+ "step": 2863
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 1.1747498673881946,
+ "learning_rate": 8.83414197311985e-06,
+ "loss": 0.8873,
+ "step": 2864
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9157770776285465,
+ "learning_rate": 8.82795261660657e-06,
+ "loss": 0.9341,
+ "step": 2865
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9074716814296028,
+ "learning_rate": 8.821763715302986e-06,
+ "loss": 0.883,
+ "step": 2866
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8922576125374475,
+ "learning_rate": 8.815575271612798e-06,
+ "loss": 0.9238,
+ "step": 2867
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9308080809460032,
+ "learning_rate": 8.809387287939528e-06,
+ "loss": 0.934,
+ "step": 2868
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9147273764859372,
+ "learning_rate": 8.803199766686517e-06,
+ "loss": 0.8992,
+ "step": 2869
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.7531981921776345,
+ "learning_rate": 8.797012710256923e-06,
+ "loss": 0.8735,
+ "step": 2870
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 1.0309196915368306,
+ "learning_rate": 8.790826121053732e-06,
+ "loss": 0.8937,
+ "step": 2871
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9070125605631589,
+ "learning_rate": 8.784640001479741e-06,
+ "loss": 0.9289,
+ "step": 2872
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8559782464324585,
+ "learning_rate": 8.77845435393757e-06,
+ "loss": 0.8791,
+ "step": 2873
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8178121579699371,
+ "learning_rate": 8.772269180829653e-06,
+ "loss": 0.8856,
+ "step": 2874
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9313980584987472,
+ "learning_rate": 8.766084484558237e-06,
+ "loss": 0.9493,
+ "step": 2875
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8333348349809776,
+ "learning_rate": 8.759900267525393e-06,
+ "loss": 0.8542,
+ "step": 2876
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9603039279388584,
+ "learning_rate": 8.753716532132992e-06,
+ "loss": 0.9992,
+ "step": 2877
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.950029970634691,
+ "learning_rate": 8.747533280782725e-06,
+ "loss": 0.8652,
+ "step": 2878
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9773336212744834,
+ "learning_rate": 8.741350515876103e-06,
+ "loss": 0.8776,
+ "step": 2879
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8276593223512142,
+ "learning_rate": 8.73516823981444e-06,
+ "loss": 0.9077,
+ "step": 2880
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9034896037267094,
+ "learning_rate": 8.728986454998858e-06,
+ "loss": 0.9058,
+ "step": 2881
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9392772165873345,
+ "learning_rate": 8.72280516383029e-06,
+ "loss": 0.9747,
+ "step": 2882
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8815737209828033,
+ "learning_rate": 8.716624368709477e-06,
+ "loss": 0.875,
+ "step": 2883
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9606247786553922,
+ "learning_rate": 8.71044407203697e-06,
+ "loss": 0.991,
+ "step": 2884
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9446559513437827,
+ "learning_rate": 8.70426427621313e-06,
+ "loss": 0.8921,
+ "step": 2885
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.8854582313261097,
+ "learning_rate": 8.698084983638111e-06,
+ "loss": 0.943,
+ "step": 2886
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9068908634924546,
+ "learning_rate": 8.691906196711884e-06,
+ "loss": 0.8308,
+ "step": 2887
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.8362728007947356,
+ "learning_rate": 8.685727917834218e-06,
+ "loss": 0.8482,
+ "step": 2888
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9741735340286811,
+ "learning_rate": 8.679550149404685e-06,
+ "loss": 0.9479,
+ "step": 2889
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.8881434627629128,
+ "learning_rate": 8.673372893822653e-06,
+ "loss": 0.9318,
+ "step": 2890
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.8344551673873453,
+ "learning_rate": 8.667196153487308e-06,
+ "loss": 0.9383,
+ "step": 2891
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.8569145363538877,
+ "learning_rate": 8.661019930797615e-06,
+ "loss": 0.9245,
+ "step": 2892
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9448224962204009,
+ "learning_rate": 8.654844228152355e-06,
+ "loss": 0.8776,
+ "step": 2893
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.787582574984948,
+ "learning_rate": 8.648669047950097e-06,
+ "loss": 0.9104,
+ "step": 2894
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 1.0189834574150762,
+ "learning_rate": 8.642494392589206e-06,
+ "loss": 0.935,
+ "step": 2895
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9670673184765097,
+ "learning_rate": 8.63632026446785e-06,
+ "loss": 0.9096,
+ "step": 2896
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9127132772167057,
+ "learning_rate": 8.630146665983993e-06,
+ "loss": 0.9214,
+ "step": 2897
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 1.01447795228686,
+ "learning_rate": 8.623973599535385e-06,
+ "loss": 0.9991,
+ "step": 2898
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.7884176888726323,
+ "learning_rate": 8.617801067519575e-06,
+ "loss": 0.8353,
+ "step": 2899
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9576881407108104,
+ "learning_rate": 8.611629072333905e-06,
+ "loss": 0.9054,
+ "step": 2900
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 1.0163616995408742,
+ "learning_rate": 8.605457616375503e-06,
+ "loss": 0.9164,
+ "step": 2901
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9594349638982683,
+ "learning_rate": 8.599286702041292e-06,
+ "loss": 0.934,
+ "step": 2902
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.8551254277566125,
+ "learning_rate": 8.593116331727987e-06,
+ "loss": 0.7898,
+ "step": 2903
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.8816328371922619,
+ "learning_rate": 8.586946507832088e-06,
+ "loss": 0.9228,
+ "step": 2904
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9348980190557928,
+ "learning_rate": 8.580777232749883e-06,
+ "loss": 0.8273,
+ "step": 2905
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9957423487884581,
+ "learning_rate": 8.574608508877448e-06,
+ "loss": 0.8281,
+ "step": 2906
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9969978056295261,
+ "learning_rate": 8.568440338610638e-06,
+ "loss": 0.976,
+ "step": 2907
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.876867638214715,
+ "learning_rate": 8.562272724345108e-06,
+ "loss": 0.9308,
+ "step": 2908
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9341130376137358,
+ "learning_rate": 8.556105668476287e-06,
+ "loss": 0.9055,
+ "step": 2909
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.8886772995301155,
+ "learning_rate": 8.549939173399385e-06,
+ "loss": 0.9242,
+ "step": 2910
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 1.145987576882531,
+ "learning_rate": 8.5437732415094e-06,
+ "loss": 0.9278,
+ "step": 2911
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 1.001135534362951,
+ "learning_rate": 8.537607875201106e-06,
+ "loss": 0.9237,
+ "step": 2912
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 1.00581843353021,
+ "learning_rate": 8.531443076869058e-06,
+ "loss": 0.8819,
+ "step": 2913
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.917057557817771,
+ "learning_rate": 8.525278848907603e-06,
+ "loss": 0.9391,
+ "step": 2914
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.8010704739390195,
+ "learning_rate": 8.51911519371085e-06,
+ "loss": 0.8336,
+ "step": 2915
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9543683443874903,
+ "learning_rate": 8.512952113672689e-06,
+ "loss": 0.9527,
+ "step": 2916
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9836839930871281,
+ "learning_rate": 8.506789611186794e-06,
+ "loss": 1.0222,
+ "step": 2917
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9246133369200561,
+ "learning_rate": 8.500627688646607e-06,
+ "loss": 0.8902,
+ "step": 2918
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9183078537742924,
+ "learning_rate": 8.494466348445345e-06,
+ "loss": 0.9479,
+ "step": 2919
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.8530193745001196,
+ "learning_rate": 8.48830559297601e-06,
+ "loss": 0.9091,
+ "step": 2920
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9629520731309305,
+ "learning_rate": 8.48214542463136e-06,
+ "loss": 0.9364,
+ "step": 2921
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9150014733977437,
+ "learning_rate": 8.475985845803938e-06,
+ "loss": 0.9356,
+ "step": 2922
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.8261063375868216,
+ "learning_rate": 8.469826858886054e-06,
+ "loss": 0.9337,
+ "step": 2923
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 1.1234486881091612,
+ "learning_rate": 8.463668466269785e-06,
+ "loss": 0.9349,
+ "step": 2924
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.928738141263137,
+ "learning_rate": 8.457510670346976e-06,
+ "loss": 0.9074,
+ "step": 2925
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9477202246359326,
+ "learning_rate": 8.451353473509254e-06,
+ "loss": 0.926,
+ "step": 2926
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.8883431515706409,
+ "learning_rate": 8.445196878147997e-06,
+ "loss": 0.8463,
+ "step": 2927
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 1.0376361772455147,
+ "learning_rate": 8.439040886654354e-06,
+ "loss": 0.9228,
+ "step": 2928
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9247536779865697,
+ "learning_rate": 8.432885501419248e-06,
+ "loss": 0.9573,
+ "step": 2929
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9317466580032667,
+ "learning_rate": 8.426730724833354e-06,
+ "loss": 0.906,
+ "step": 2930
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.82375627690957,
+ "learning_rate": 8.420576559287112e-06,
+ "loss": 0.8826,
+ "step": 2931
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9526502240708186,
+ "learning_rate": 8.414423007170742e-06,
+ "loss": 0.9399,
+ "step": 2932
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.8863593158901537,
+ "learning_rate": 8.408270070874201e-06,
+ "loss": 0.9036,
+ "step": 2933
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9542485239121139,
+ "learning_rate": 8.402117752787225e-06,
+ "loss": 0.8611,
+ "step": 2934
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.8041770339354015,
+ "learning_rate": 8.395966055299302e-06,
+ "loss": 0.9374,
+ "step": 2935
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9375165105895691,
+ "learning_rate": 8.389814980799679e-06,
+ "loss": 0.9006,
+ "step": 2936
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8974970160694713,
+ "learning_rate": 8.38366453167736e-06,
+ "loss": 0.9241,
+ "step": 2937
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8633372528156041,
+ "learning_rate": 8.377514710321117e-06,
+ "loss": 0.8905,
+ "step": 2938
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 1.2666441679371878,
+ "learning_rate": 8.371365519119463e-06,
+ "loss": 0.8391,
+ "step": 2939
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.820787530828404,
+ "learning_rate": 8.365216960460675e-06,
+ "loss": 0.8962,
+ "step": 2940
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9517866276180234,
+ "learning_rate": 8.359069036732781e-06,
+ "loss": 0.8873,
+ "step": 2941
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.772823993517953,
+ "learning_rate": 8.352921750323562e-06,
+ "loss": 0.8076,
+ "step": 2942
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8592855535633733,
+ "learning_rate": 8.346775103620559e-06,
+ "loss": 0.8369,
+ "step": 2943
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.926525611699989,
+ "learning_rate": 8.340629099011057e-06,
+ "loss": 0.9258,
+ "step": 2944
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8708002316010192,
+ "learning_rate": 8.33448373888209e-06,
+ "loss": 0.91,
+ "step": 2945
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8906604500719437,
+ "learning_rate": 8.328339025620449e-06,
+ "loss": 0.9044,
+ "step": 2946
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8196381043263796,
+ "learning_rate": 8.322194961612668e-06,
+ "loss": 0.8912,
+ "step": 2947
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 1.0369291216248417,
+ "learning_rate": 8.316051549245026e-06,
+ "loss": 0.8968,
+ "step": 2948
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9201713446981273,
+ "learning_rate": 8.309908790903562e-06,
+ "loss": 0.8893,
+ "step": 2949
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 1.0316530698872768,
+ "learning_rate": 8.303766688974047e-06,
+ "loss": 0.8754,
+ "step": 2950
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9670015715635107,
+ "learning_rate": 8.297625245842006e-06,
+ "loss": 0.8962,
+ "step": 2951
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 1.127719573890693,
+ "learning_rate": 8.291484463892703e-06,
+ "loss": 0.9977,
+ "step": 2952
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8423958904618566,
+ "learning_rate": 8.285344345511147e-06,
+ "loss": 0.82,
+ "step": 2953
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9449759772993279,
+ "learning_rate": 8.279204893082083e-06,
+ "loss": 0.9218,
+ "step": 2954
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9476765061050989,
+ "learning_rate": 8.273066108990017e-06,
+ "loss": 0.9065,
+ "step": 2955
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8195802129234587,
+ "learning_rate": 8.266927995619175e-06,
+ "loss": 0.8263,
+ "step": 2956
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 1.0156084747070426,
+ "learning_rate": 8.260790555353526e-06,
+ "loss": 0.9908,
+ "step": 2957
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9602270068068365,
+ "learning_rate": 8.254653790576787e-06,
+ "loss": 0.9454,
+ "step": 2958
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8994601338336368,
+ "learning_rate": 8.248517703672405e-06,
+ "loss": 0.8763,
+ "step": 2959
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 1.0717303600310182,
+ "learning_rate": 8.242382297023558e-06,
+ "loss": 0.8798,
+ "step": 2960
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9225476897467404,
+ "learning_rate": 8.23624757301318e-06,
+ "loss": 0.9841,
+ "step": 2961
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8722961671024559,
+ "learning_rate": 8.230113534023917e-06,
+ "loss": 0.9688,
+ "step": 2962
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8820786767795896,
+ "learning_rate": 8.223980182438167e-06,
+ "loss": 0.9081,
+ "step": 2963
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.915948991827674,
+ "learning_rate": 8.217847520638049e-06,
+ "loss": 0.9125,
+ "step": 2964
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.7711659906220023,
+ "learning_rate": 8.211715551005414e-06,
+ "loss": 0.87,
+ "step": 2965
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.7208004616960314,
+ "learning_rate": 8.205584275921854e-06,
+ "loss": 0.8562,
+ "step": 2966
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8564172490588439,
+ "learning_rate": 8.199453697768686e-06,
+ "loss": 0.9142,
+ "step": 2967
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8332655873707439,
+ "learning_rate": 8.193323818926955e-06,
+ "loss": 0.8856,
+ "step": 2968
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9321441739062402,
+ "learning_rate": 8.187194641777432e-06,
+ "loss": 0.9501,
+ "step": 2969
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8628057416575993,
+ "learning_rate": 8.181066168700622e-06,
+ "loss": 0.9378,
+ "step": 2970
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9743743273340152,
+ "learning_rate": 8.174938402076754e-06,
+ "loss": 0.8136,
+ "step": 2971
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9026157758169897,
+ "learning_rate": 8.168811344285776e-06,
+ "loss": 0.8641,
+ "step": 2972
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8727927232715033,
+ "learning_rate": 8.162684997707374e-06,
+ "loss": 0.8211,
+ "step": 2973
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9911478830586278,
+ "learning_rate": 8.156559364720947e-06,
+ "loss": 1.0118,
+ "step": 2974
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.7843381099959617,
+ "learning_rate": 8.150434447705623e-06,
+ "loss": 0.8707,
+ "step": 2975
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8063504227265889,
+ "learning_rate": 8.144310249040246e-06,
+ "loss": 0.8908,
+ "step": 2976
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9773390991304581,
+ "learning_rate": 8.138186771103382e-06,
+ "loss": 0.8714,
+ "step": 2977
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.7600985299663205,
+ "learning_rate": 8.132064016273325e-06,
+ "loss": 0.8824,
+ "step": 2978
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9792340697758263,
+ "learning_rate": 8.12594198692808e-06,
+ "loss": 0.9535,
+ "step": 2979
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9187429072935889,
+ "learning_rate": 8.119820685445373e-06,
+ "loss": 0.9276,
+ "step": 2980
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9969402451636438,
+ "learning_rate": 8.113700114202647e-06,
+ "loss": 0.9076,
+ "step": 2981
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 1.082801295447143,
+ "learning_rate": 8.107580275577059e-06,
+ "loss": 0.8977,
+ "step": 2982
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.7502599206190357,
+ "learning_rate": 8.101461171945483e-06,
+ "loss": 0.8441,
+ "step": 2983
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.850821292645756,
+ "learning_rate": 8.095342805684516e-06,
+ "loss": 0.9256,
+ "step": 2984
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8836354861206998,
+ "learning_rate": 8.089225179170454e-06,
+ "loss": 0.8751,
+ "step": 2985
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8600873590187288,
+ "learning_rate": 8.083108294779313e-06,
+ "loss": 0.845,
+ "step": 2986
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8430653652994249,
+ "learning_rate": 8.076992154886826e-06,
+ "loss": 0.8971,
+ "step": 2987
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.844586736484448,
+ "learning_rate": 8.070876761868426e-06,
+ "loss": 0.886,
+ "step": 2988
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8914725901338992,
+ "learning_rate": 8.064762118099258e-06,
+ "loss": 0.8982,
+ "step": 2989
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9809066551606824,
+ "learning_rate": 8.058648225954188e-06,
+ "loss": 0.9422,
+ "step": 2990
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.926556390850543,
+ "learning_rate": 8.052535087807774e-06,
+ "loss": 0.9251,
+ "step": 2991
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9913252703493083,
+ "learning_rate": 8.046422706034294e-06,
+ "loss": 0.901,
+ "step": 2992
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.7895189537186987,
+ "learning_rate": 8.040311083007725e-06,
+ "loss": 0.8319,
+ "step": 2993
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9064662414334193,
+ "learning_rate": 8.034200221101746e-06,
+ "loss": 0.908,
+ "step": 2994
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9671581149740855,
+ "learning_rate": 8.028090122689747e-06,
+ "loss": 0.9788,
+ "step": 2995
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8589025217308112,
+ "learning_rate": 8.021980790144828e-06,
+ "loss": 0.9424,
+ "step": 2996
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8812776212789514,
+ "learning_rate": 8.015872225839776e-06,
+ "loss": 0.9343,
+ "step": 2997
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9210985637272886,
+ "learning_rate": 8.009764432147086e-06,
+ "loss": 0.8646,
+ "step": 2998
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8524624418946697,
+ "learning_rate": 8.003657411438961e-06,
+ "loss": 0.8807,
+ "step": 2999
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9322844937972518,
+ "learning_rate": 7.997551166087293e-06,
+ "loss": 0.9728,
+ "step": 3000
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8987802060488909,
+ "learning_rate": 7.991445698463672e-06,
+ "loss": 0.9034,
+ "step": 3001
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8033725920340877,
+ "learning_rate": 7.985341010939402e-06,
+ "loss": 0.8597,
+ "step": 3002
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9193113456445283,
+ "learning_rate": 7.979237105885467e-06,
+ "loss": 0.9123,
+ "step": 3003
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8034726161218648,
+ "learning_rate": 7.973133985672558e-06,
+ "loss": 0.8147,
+ "step": 3004
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8906403744671403,
+ "learning_rate": 7.967031652671051e-06,
+ "loss": 0.8896,
+ "step": 3005
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.835282740289833,
+ "learning_rate": 7.960930109251023e-06,
+ "loss": 0.8467,
+ "step": 3006
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.6972517600203672,
+ "learning_rate": 7.954829357782243e-06,
+ "loss": 0.8043,
+ "step": 3007
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8733693253852634,
+ "learning_rate": 7.948729400634178e-06,
+ "loss": 0.8672,
+ "step": 3008
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9411510298654441,
+ "learning_rate": 7.942630240175977e-06,
+ "loss": 0.8477,
+ "step": 3009
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9889369274243794,
+ "learning_rate": 7.936531878776484e-06,
+ "loss": 0.8682,
+ "step": 3010
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8348359655805895,
+ "learning_rate": 7.930434318804229e-06,
+ "loss": 0.9156,
+ "step": 3011
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.950131889080997,
+ "learning_rate": 7.924337562627435e-06,
+ "loss": 0.9302,
+ "step": 3012
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9669111851343594,
+ "learning_rate": 7.918241612614016e-06,
+ "loss": 0.9828,
+ "step": 3013
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.7858071556014813,
+ "learning_rate": 7.91214647113157e-06,
+ "loss": 0.8712,
+ "step": 3014
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.921558614766626,
+ "learning_rate": 7.906052140547373e-06,
+ "loss": 0.8532,
+ "step": 3015
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8645095232352292,
+ "learning_rate": 7.899958623228398e-06,
+ "loss": 0.8658,
+ "step": 3016
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9669464202780016,
+ "learning_rate": 7.893865921541294e-06,
+ "loss": 0.9128,
+ "step": 3017
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9523586546091055,
+ "learning_rate": 7.887774037852395e-06,
+ "loss": 0.8839,
+ "step": 3018
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9974613025553319,
+ "learning_rate": 7.881682974527723e-06,
+ "loss": 0.9103,
+ "step": 3019
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8878953765185238,
+ "learning_rate": 7.875592733932972e-06,
+ "loss": 0.8983,
+ "step": 3020
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.7812720230097898,
+ "learning_rate": 7.869503318433529e-06,
+ "loss": 0.8616,
+ "step": 3021
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9254796511666435,
+ "learning_rate": 7.863414730394444e-06,
+ "loss": 0.9434,
+ "step": 3022
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.833929544183009,
+ "learning_rate": 7.857326972180455e-06,
+ "loss": 0.886,
+ "step": 3023
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9328700838673941,
+ "learning_rate": 7.85124004615598e-06,
+ "loss": 0.9408,
+ "step": 3024
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9465670044584954,
+ "learning_rate": 7.845153954685114e-06,
+ "loss": 0.9217,
+ "step": 3025
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8522265655994306,
+ "learning_rate": 7.839068700131623e-06,
+ "loss": 0.932,
+ "step": 3026
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8937138277620927,
+ "learning_rate": 7.832984284858946e-06,
+ "loss": 0.887,
+ "step": 3027
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9977556779432588,
+ "learning_rate": 7.826900711230204e-06,
+ "loss": 0.9264,
+ "step": 3028
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8606018880527176,
+ "learning_rate": 7.820817981608185e-06,
+ "loss": 0.9251,
+ "step": 3029
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8530278334154929,
+ "learning_rate": 7.814736098355348e-06,
+ "loss": 0.8695,
+ "step": 3030
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8567011058642134,
+ "learning_rate": 7.808655063833832e-06,
+ "loss": 0.9329,
+ "step": 3031
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9280510205178197,
+ "learning_rate": 7.802574880405438e-06,
+ "loss": 0.9373,
+ "step": 3032
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9848501247592024,
+ "learning_rate": 7.79649555043164e-06,
+ "loss": 0.9181,
+ "step": 3033
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 1.0297492133153854,
+ "learning_rate": 7.790417076273581e-06,
+ "loss": 0.8964,
+ "step": 3034
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8697680836383845,
+ "learning_rate": 7.784339460292065e-06,
+ "loss": 0.8573,
+ "step": 3035
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.842781609719924,
+ "learning_rate": 7.77826270484757e-06,
+ "loss": 0.9054,
+ "step": 3036
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.740688518386285,
+ "learning_rate": 7.772186812300244e-06,
+ "loss": 0.7684,
+ "step": 3037
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8696552210515258,
+ "learning_rate": 7.766111785009888e-06,
+ "loss": 0.9298,
+ "step": 3038
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9294178462954537,
+ "learning_rate": 7.760037625335973e-06,
+ "loss": 0.8719,
+ "step": 3039
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9869254713552199,
+ "learning_rate": 7.753964335637634e-06,
+ "loss": 0.9393,
+ "step": 3040
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8975835600692645,
+ "learning_rate": 7.747891918273668e-06,
+ "loss": 0.9443,
+ "step": 3041
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8518753300133183,
+ "learning_rate": 7.741820375602524e-06,
+ "loss": 0.8875,
+ "step": 3042
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8208267461969093,
+ "learning_rate": 7.735749709982329e-06,
+ "loss": 0.8864,
+ "step": 3043
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 1.0689326764845735,
+ "learning_rate": 7.729679923770855e-06,
+ "loss": 0.8713,
+ "step": 3044
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.895283749784096,
+ "learning_rate": 7.723611019325538e-06,
+ "loss": 0.8723,
+ "step": 3045
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9888518115529736,
+ "learning_rate": 7.71754299900347e-06,
+ "loss": 0.9657,
+ "step": 3046
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9619888771560674,
+ "learning_rate": 7.7114758651614e-06,
+ "loss": 0.8995,
+ "step": 3047
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.867741561085671,
+ "learning_rate": 7.705409620155733e-06,
+ "loss": 0.9001,
+ "step": 3048
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 1.0893561569683614,
+ "learning_rate": 7.699344266342529e-06,
+ "loss": 1.0243,
+ "step": 3049
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 1.0069293978702367,
+ "learning_rate": 7.693279806077504e-06,
+ "loss": 0.9075,
+ "step": 3050
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9863133056396031,
+ "learning_rate": 7.68721624171602e-06,
+ "loss": 0.8428,
+ "step": 3051
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.7953276992499636,
+ "learning_rate": 7.681153575613098e-06,
+ "loss": 0.8109,
+ "step": 3052
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8845937432453769,
+ "learning_rate": 7.675091810123404e-06,
+ "loss": 0.9383,
+ "step": 3053
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.938155446305834,
+ "learning_rate": 7.669030947601265e-06,
+ "loss": 0.9052,
+ "step": 3054
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8182078771237827,
+ "learning_rate": 7.662970990400647e-06,
+ "loss": 0.8712,
+ "step": 3055
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8933854877025341,
+ "learning_rate": 7.656911940875163e-06,
+ "loss": 0.9474,
+ "step": 3056
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9449257667933663,
+ "learning_rate": 7.650853801378084e-06,
+ "loss": 0.8568,
+ "step": 3057
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 1.0495579740142698,
+ "learning_rate": 7.644796574262322e-06,
+ "loss": 0.8806,
+ "step": 3058
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8508073914566813,
+ "learning_rate": 7.638740261880423e-06,
+ "loss": 0.8947,
+ "step": 3059
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9335694394677336,
+ "learning_rate": 7.632684866584606e-06,
+ "loss": 0.8983,
+ "step": 3060
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9733709525549523,
+ "learning_rate": 7.626630390726704e-06,
+ "loss": 0.9256,
+ "step": 3061
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.863455601871164,
+ "learning_rate": 7.620576836658212e-06,
+ "loss": 0.9206,
+ "step": 3062
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9586497646142664,
+ "learning_rate": 7.61452420673026e-06,
+ "loss": 0.8627,
+ "step": 3063
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8301599479065566,
+ "learning_rate": 7.608472503293615e-06,
+ "loss": 0.8164,
+ "step": 3064
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8892417177178197,
+ "learning_rate": 7.60242172869869e-06,
+ "loss": 0.9191,
+ "step": 3065
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8788846823907092,
+ "learning_rate": 7.596371885295542e-06,
+ "loss": 0.9064,
+ "step": 3066
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8370510726959535,
+ "learning_rate": 7.590322975433857e-06,
+ "loss": 0.8804,
+ "step": 3067
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9979788275045329,
+ "learning_rate": 7.584275001462961e-06,
+ "loss": 1.0111,
+ "step": 3068
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 1.0235967334786231,
+ "learning_rate": 7.578227965731819e-06,
+ "loss": 0.8809,
+ "step": 3069
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.85254860792901,
+ "learning_rate": 7.572181870589028e-06,
+ "loss": 0.9018,
+ "step": 3070
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9517331058693642,
+ "learning_rate": 7.566136718382821e-06,
+ "loss": 0.8162,
+ "step": 3071
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9077953953883825,
+ "learning_rate": 7.560092511461069e-06,
+ "loss": 0.9436,
+ "step": 3072
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8866841954375853,
+ "learning_rate": 7.55404925217127e-06,
+ "loss": 0.9202,
+ "step": 3073
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.6935749539849467,
+ "learning_rate": 7.548006942860557e-06,
+ "loss": 0.8192,
+ "step": 3074
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 1.0514487012201703,
+ "learning_rate": 7.541965585875695e-06,
+ "loss": 0.9739,
+ "step": 3075
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 1.0378129702046215,
+ "learning_rate": 7.535925183563073e-06,
+ "loss": 0.8681,
+ "step": 3076
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9129414279586064,
+ "learning_rate": 7.529885738268714e-06,
+ "loss": 0.9303,
+ "step": 3077
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 1.0734826745723056,
+ "learning_rate": 7.523847252338274e-06,
+ "loss": 0.8529,
+ "step": 3078
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9364323229864077,
+ "learning_rate": 7.51780972811703e-06,
+ "loss": 0.9117,
+ "step": 3079
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9019671873323886,
+ "learning_rate": 7.511773167949885e-06,
+ "loss": 0.8917,
+ "step": 3080
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 1.058335285683823,
+ "learning_rate": 7.5057375741813685e-06,
+ "loss": 0.964,
+ "step": 3081
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8754304852426065,
+ "learning_rate": 7.499702949155634e-06,
+ "loss": 0.8679,
+ "step": 3082
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8626888810903242,
+ "learning_rate": 7.493669295216467e-06,
+ "loss": 0.8742,
+ "step": 3083
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9446940901693937,
+ "learning_rate": 7.487636614707265e-06,
+ "loss": 0.9437,
+ "step": 3084
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.913647491487289,
+ "learning_rate": 7.48160490997105e-06,
+ "loss": 0.9122,
+ "step": 3085
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9817417389844753,
+ "learning_rate": 7.475574183350471e-06,
+ "loss": 0.9347,
+ "step": 3086
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9144861253958118,
+ "learning_rate": 7.46954443718779e-06,
+ "loss": 0.9046,
+ "step": 3087
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9404858654589225,
+ "learning_rate": 7.463515673824888e-06,
+ "loss": 0.938,
+ "step": 3088
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 1.1091536549587875,
+ "learning_rate": 7.457487895603273e-06,
+ "loss": 0.8852,
+ "step": 3089
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9086534849901398,
+ "learning_rate": 7.451461104864061e-06,
+ "loss": 0.9179,
+ "step": 3090
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8755976989663699,
+ "learning_rate": 7.44543530394799e-06,
+ "loss": 0.8824,
+ "step": 3091
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 1.090908089694531,
+ "learning_rate": 7.439410495195411e-06,
+ "loss": 0.9011,
+ "step": 3092
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8955078572197323,
+ "learning_rate": 7.433386680946288e-06,
+ "loss": 0.9086,
+ "step": 3093
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8913704610752187,
+ "learning_rate": 7.427363863540202e-06,
+ "loss": 0.8652,
+ "step": 3094
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8465243680237361,
+ "learning_rate": 7.421342045316351e-06,
+ "loss": 0.9402,
+ "step": 3095
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 1.0618151973993595,
+ "learning_rate": 7.415321228613534e-06,
+ "loss": 0.9194,
+ "step": 3096
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8799346592072748,
+ "learning_rate": 7.409301415770168e-06,
+ "loss": 0.7974,
+ "step": 3097
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.87647071998454,
+ "learning_rate": 7.403282609124281e-06,
+ "loss": 0.8938,
+ "step": 3098
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.9524293226024881,
+ "learning_rate": 7.397264811013507e-06,
+ "loss": 0.9343,
+ "step": 3099
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.9717560944156921,
+ "learning_rate": 7.391248023775084e-06,
+ "loss": 0.8794,
+ "step": 3100
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 1.0427302635252145,
+ "learning_rate": 7.385232249745873e-06,
+ "loss": 0.9443,
+ "step": 3101
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.820156355881189,
+ "learning_rate": 7.379217491262325e-06,
+ "loss": 0.8806,
+ "step": 3102
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.9267111425838447,
+ "learning_rate": 7.373203750660505e-06,
+ "loss": 0.9299,
+ "step": 3103
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.9198919699398267,
+ "learning_rate": 7.36719103027608e-06,
+ "loss": 0.9293,
+ "step": 3104
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.7851749355372527,
+ "learning_rate": 7.361179332444318e-06,
+ "loss": 0.8619,
+ "step": 3105
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 1.198117464191844,
+ "learning_rate": 7.355168659500094e-06,
+ "loss": 0.958,
+ "step": 3106
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8481077581272879,
+ "learning_rate": 7.3491590137778915e-06,
+ "loss": 0.8884,
+ "step": 3107
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.9082379189614874,
+ "learning_rate": 7.343150397611782e-06,
+ "loss": 0.8709,
+ "step": 3108
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.9828006656871224,
+ "learning_rate": 7.3371428133354435e-06,
+ "loss": 0.8861,
+ "step": 3109
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8277180308231091,
+ "learning_rate": 7.33113626328215e-06,
+ "loss": 0.8016,
+ "step": 3110
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.9207310049046246,
+ "learning_rate": 7.325130749784781e-06,
+ "loss": 0.9476,
+ "step": 3111
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 1.038531133366016,
+ "learning_rate": 7.3191262751758005e-06,
+ "loss": 0.8901,
+ "step": 3112
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.9745778005106581,
+ "learning_rate": 7.3131228417872905e-06,
+ "loss": 0.8654,
+ "step": 3113
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8525421710847984,
+ "learning_rate": 7.307120451950902e-06,
+ "loss": 0.8697,
+ "step": 3114
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8969369945071651,
+ "learning_rate": 7.301119107997905e-06,
+ "loss": 0.9302,
+ "step": 3115
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8704404473958959,
+ "learning_rate": 7.295118812259145e-06,
+ "loss": 0.8736,
+ "step": 3116
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8737931679431764,
+ "learning_rate": 7.289119567065068e-06,
+ "loss": 0.9358,
+ "step": 3117
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 1.1063117572079268,
+ "learning_rate": 7.2831213747457155e-06,
+ "loss": 0.9179,
+ "step": 3118
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8319200429675896,
+ "learning_rate": 7.2771242376307125e-06,
+ "loss": 0.8865,
+ "step": 3119
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.9099261295844384,
+ "learning_rate": 7.271128158049283e-06,
+ "loss": 0.8986,
+ "step": 3120
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.9763697004734214,
+ "learning_rate": 7.2651331383302326e-06,
+ "loss": 0.9185,
+ "step": 3121
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8296121035233821,
+ "learning_rate": 7.2591391808019555e-06,
+ "loss": 0.8959,
+ "step": 3122
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.7389510031745331,
+ "learning_rate": 7.253146287792434e-06,
+ "loss": 0.8506,
+ "step": 3123
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 1.0449629118250001,
+ "learning_rate": 7.247154461629248e-06,
+ "loss": 0.8943,
+ "step": 3124
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8519819117041794,
+ "learning_rate": 7.241163704639547e-06,
+ "loss": 0.8991,
+ "step": 3125
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.821426699566402,
+ "learning_rate": 7.235174019150071e-06,
+ "loss": 0.8482,
+ "step": 3126
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8530305036432776,
+ "learning_rate": 7.229185407487149e-06,
+ "loss": 0.8998,
+ "step": 3127
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8929154538055173,
+ "learning_rate": 7.2231978719766884e-06,
+ "loss": 0.8968,
+ "step": 3128
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.9565356056159762,
+ "learning_rate": 7.217211414944171e-06,
+ "loss": 0.9264,
+ "step": 3129
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8534274162064641,
+ "learning_rate": 7.2112260387146784e-06,
+ "loss": 0.8953,
+ "step": 3130
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8153820143829157,
+ "learning_rate": 7.2052417456128565e-06,
+ "loss": 0.8829,
+ "step": 3131
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8380144354016602,
+ "learning_rate": 7.199258537962936e-06,
+ "loss": 0.8948,
+ "step": 3132
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.798316857097433,
+ "learning_rate": 7.193276418088729e-06,
+ "loss": 0.8475,
+ "step": 3133
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8360752817717011,
+ "learning_rate": 7.187295388313618e-06,
+ "loss": 0.8671,
+ "step": 3134
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.9924444575496794,
+ "learning_rate": 7.181315450960562e-06,
+ "loss": 0.9016,
+ "step": 3135
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.9045718384715163,
+ "learning_rate": 7.175336608352113e-06,
+ "loss": 0.931,
+ "step": 3136
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.783770071965731,
+ "learning_rate": 7.169358862810374e-06,
+ "loss": 0.899,
+ "step": 3137
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.7438253485129284,
+ "learning_rate": 7.163382216657033e-06,
+ "loss": 0.7635,
+ "step": 3138
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.9292054173618102,
+ "learning_rate": 7.1574066722133565e-06,
+ "loss": 0.9126,
+ "step": 3139
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8290249820889951,
+ "learning_rate": 7.151432231800173e-06,
+ "loss": 0.8473,
+ "step": 3140
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8724940677184712,
+ "learning_rate": 7.145458897737882e-06,
+ "loss": 0.8825,
+ "step": 3141
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 1.319161629121295,
+ "learning_rate": 7.139486672346466e-06,
+ "loss": 0.9209,
+ "step": 3142
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8820725742097981,
+ "learning_rate": 7.133515557945463e-06,
+ "loss": 0.9676,
+ "step": 3143
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8919944894918395,
+ "learning_rate": 7.12754555685399e-06,
+ "loss": 0.953,
+ "step": 3144
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.915830556250263,
+ "learning_rate": 7.121576671390722e-06,
+ "loss": 0.8791,
+ "step": 3145
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.7867023519579889,
+ "learning_rate": 7.115608903873905e-06,
+ "loss": 0.8592,
+ "step": 3146
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.7557530543214772,
+ "learning_rate": 7.109642256621353e-06,
+ "loss": 0.8424,
+ "step": 3147
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9538309972611474,
+ "learning_rate": 7.103676731950443e-06,
+ "loss": 0.9423,
+ "step": 3148
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9305136950405283,
+ "learning_rate": 7.0977123321781176e-06,
+ "loss": 0.9213,
+ "step": 3149
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.898409886169043,
+ "learning_rate": 7.091749059620881e-06,
+ "loss": 0.9482,
+ "step": 3150
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9266014619179291,
+ "learning_rate": 7.0857869165947945e-06,
+ "loss": 0.8275,
+ "step": 3151
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.7812282045125521,
+ "learning_rate": 7.079825905415491e-06,
+ "loss": 0.878,
+ "step": 3152
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.867731346965853,
+ "learning_rate": 7.073866028398153e-06,
+ "loss": 0.9008,
+ "step": 3153
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 1.056823648953414,
+ "learning_rate": 7.067907287857535e-06,
+ "loss": 0.86,
+ "step": 3154
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8386302063426734,
+ "learning_rate": 7.061949686107938e-06,
+ "loss": 0.8657,
+ "step": 3155
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8346374978363387,
+ "learning_rate": 7.0559932254632315e-06,
+ "loss": 0.913,
+ "step": 3156
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.942382013735655,
+ "learning_rate": 7.0500379082368305e-06,
+ "loss": 0.8709,
+ "step": 3157
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 1.095435759704348,
+ "learning_rate": 7.044083736741711e-06,
+ "loss": 0.8939,
+ "step": 3158
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8697809001392831,
+ "learning_rate": 7.03813071329041e-06,
+ "loss": 0.9055,
+ "step": 3159
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9794621291830303,
+ "learning_rate": 7.032178840195009e-06,
+ "loss": 0.8622,
+ "step": 3160
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.930007138571353,
+ "learning_rate": 7.026228119767149e-06,
+ "loss": 0.9294,
+ "step": 3161
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8598086834220618,
+ "learning_rate": 7.020278554318023e-06,
+ "loss": 0.8498,
+ "step": 3162
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 1.226756798789789,
+ "learning_rate": 7.014330146158367e-06,
+ "loss": 0.9039,
+ "step": 3163
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.989943171604295,
+ "learning_rate": 7.008382897598477e-06,
+ "loss": 0.9167,
+ "step": 3164
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9308190832090322,
+ "learning_rate": 7.002436810948201e-06,
+ "loss": 0.8719,
+ "step": 3165
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9420845340418191,
+ "learning_rate": 6.996491888516927e-06,
+ "loss": 0.9497,
+ "step": 3166
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9185284293256382,
+ "learning_rate": 6.990548132613592e-06,
+ "loss": 0.9822,
+ "step": 3167
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.7829390437932643,
+ "learning_rate": 6.984605545546686e-06,
+ "loss": 0.9004,
+ "step": 3168
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9761632074396405,
+ "learning_rate": 6.978664129624241e-06,
+ "loss": 0.9686,
+ "step": 3169
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9062920352008544,
+ "learning_rate": 6.972723887153828e-06,
+ "loss": 0.8849,
+ "step": 3170
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8616495007297676,
+ "learning_rate": 6.9667848204425785e-06,
+ "loss": 0.8719,
+ "step": 3171
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8964126320308036,
+ "learning_rate": 6.960846931797152e-06,
+ "loss": 0.8857,
+ "step": 3172
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9264946122652956,
+ "learning_rate": 6.9549102235237565e-06,
+ "loss": 0.9398,
+ "step": 3173
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9094363996417679,
+ "learning_rate": 6.948974697928144e-06,
+ "loss": 0.8851,
+ "step": 3174
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9513388081881399,
+ "learning_rate": 6.943040357315598e-06,
+ "loss": 0.9803,
+ "step": 3175
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9433967810474849,
+ "learning_rate": 6.9371072039909515e-06,
+ "loss": 0.8724,
+ "step": 3176
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8608695133016676,
+ "learning_rate": 6.931175240258576e-06,
+ "loss": 0.9292,
+ "step": 3177
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9530960651699819,
+ "learning_rate": 6.9252444684223765e-06,
+ "loss": 0.8737,
+ "step": 3178
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 1.0395046231050402,
+ "learning_rate": 6.919314890785793e-06,
+ "loss": 0.9723,
+ "step": 3179
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8458885636267541,
+ "learning_rate": 6.913386509651807e-06,
+ "loss": 0.9264,
+ "step": 3180
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8955067088058976,
+ "learning_rate": 6.907459327322934e-06,
+ "loss": 0.9081,
+ "step": 3181
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.7963619009041655,
+ "learning_rate": 6.90153334610122e-06,
+ "loss": 0.7736,
+ "step": 3182
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8721799260952391,
+ "learning_rate": 6.895608568288255e-06,
+ "loss": 0.8856,
+ "step": 3183
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 1.152388337452677,
+ "learning_rate": 6.889684996185148e-06,
+ "loss": 0.9011,
+ "step": 3184
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.875052852556438,
+ "learning_rate": 6.88376263209255e-06,
+ "loss": 0.853,
+ "step": 3185
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8642041460444178,
+ "learning_rate": 6.877841478310639e-06,
+ "loss": 0.9209,
+ "step": 3186
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9067189008349694,
+ "learning_rate": 6.871921537139117e-06,
+ "loss": 0.8886,
+ "step": 3187
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8949740080122288,
+ "learning_rate": 6.866002810877224e-06,
+ "loss": 0.8575,
+ "step": 3188
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8072031212579807,
+ "learning_rate": 6.860085301823729e-06,
+ "loss": 0.9466,
+ "step": 3189
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8708895794962733,
+ "learning_rate": 6.854169012276923e-06,
+ "loss": 0.8599,
+ "step": 3190
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9153443616477932,
+ "learning_rate": 6.848253944534622e-06,
+ "loss": 0.9016,
+ "step": 3191
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9449029470758108,
+ "learning_rate": 6.84234010089417e-06,
+ "loss": 0.7901,
+ "step": 3192
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 1.053527491468743,
+ "learning_rate": 6.836427483652436e-06,
+ "loss": 0.9721,
+ "step": 3193
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8664564982897314,
+ "learning_rate": 6.830516095105817e-06,
+ "loss": 0.9024,
+ "step": 3194
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8810373719043834,
+ "learning_rate": 6.824605937550224e-06,
+ "loss": 0.9008,
+ "step": 3195
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.7886223100754801,
+ "learning_rate": 6.818697013281093e-06,
+ "loss": 0.846,
+ "step": 3196
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.812628989973114,
+ "learning_rate": 6.8127893245933864e-06,
+ "loss": 0.8481,
+ "step": 3197
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.866900863830601,
+ "learning_rate": 6.806882873781579e-06,
+ "loss": 0.8875,
+ "step": 3198
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8943349204273193,
+ "learning_rate": 6.800977663139666e-06,
+ "loss": 0.952,
+ "step": 3199
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8564433978090882,
+ "learning_rate": 6.795073694961171e-06,
+ "loss": 0.8304,
+ "step": 3200
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8315503097138466,
+ "learning_rate": 6.789170971539119e-06,
+ "loss": 0.8363,
+ "step": 3201
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8096126777115353,
+ "learning_rate": 6.783269495166066e-06,
+ "loss": 0.8559,
+ "step": 3202
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9747919050753229,
+ "learning_rate": 6.777369268134076e-06,
+ "loss": 0.9068,
+ "step": 3203
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8585355916807423,
+ "learning_rate": 6.771470292734723e-06,
+ "loss": 0.8832,
+ "step": 3204
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8890707984729356,
+ "learning_rate": 6.7655725712591055e-06,
+ "loss": 0.8589,
+ "step": 3205
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 1.0661202486493144,
+ "learning_rate": 6.759676105997834e-06,
+ "loss": 0.9119,
+ "step": 3206
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9405660104630943,
+ "learning_rate": 6.753780899241027e-06,
+ "loss": 0.9044,
+ "step": 3207
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8367646811805161,
+ "learning_rate": 6.747886953278311e-06,
+ "loss": 0.9263,
+ "step": 3208
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9893393958324957,
+ "learning_rate": 6.741994270398826e-06,
+ "loss": 0.9209,
+ "step": 3209
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9350973298039797,
+ "learning_rate": 6.736102852891227e-06,
+ "loss": 0.8402,
+ "step": 3210
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.866409124933096,
+ "learning_rate": 6.730212703043666e-06,
+ "loss": 0.9116,
+ "step": 3211
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9178950934585832,
+ "learning_rate": 6.7243238231438176e-06,
+ "loss": 0.8705,
+ "step": 3212
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8149867710860199,
+ "learning_rate": 6.718436215478849e-06,
+ "loss": 0.8652,
+ "step": 3213
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 1.0009053893488602,
+ "learning_rate": 6.712549882335442e-06,
+ "loss": 0.8752,
+ "step": 3214
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9409873711131781,
+ "learning_rate": 6.70666482599978e-06,
+ "loss": 0.9029,
+ "step": 3215
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9812311940038729,
+ "learning_rate": 6.7007810487575475e-06,
+ "loss": 0.8897,
+ "step": 3216
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8806357632510843,
+ "learning_rate": 6.694898552893941e-06,
+ "loss": 0.9084,
+ "step": 3217
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8468073305406082,
+ "learning_rate": 6.6890173406936485e-06,
+ "loss": 0.7731,
+ "step": 3218
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 1.1001199693783135,
+ "learning_rate": 6.683137414440872e-06,
+ "loss": 0.96,
+ "step": 3219
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8498287756216577,
+ "learning_rate": 6.677258776419304e-06,
+ "loss": 0.845,
+ "step": 3220
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8062988634178215,
+ "learning_rate": 6.671381428912138e-06,
+ "loss": 0.9022,
+ "step": 3221
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9057542872573875,
+ "learning_rate": 6.66550537420207e-06,
+ "loss": 0.9051,
+ "step": 3222
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8990148419123214,
+ "learning_rate": 6.659630614571287e-06,
+ "loss": 0.8986,
+ "step": 3223
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8680757495365716,
+ "learning_rate": 6.653757152301488e-06,
+ "loss": 0.906,
+ "step": 3224
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.7594808796323015,
+ "learning_rate": 6.647884989673849e-06,
+ "loss": 0.8297,
+ "step": 3225
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8225326600006523,
+ "learning_rate": 6.642014128969055e-06,
+ "loss": 0.8706,
+ "step": 3226
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8995796233995702,
+ "learning_rate": 6.63614457246728e-06,
+ "loss": 0.9397,
+ "step": 3227
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9165732231002232,
+ "learning_rate": 6.630276322448188e-06,
+ "loss": 0.8998,
+ "step": 3228
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.908972191513797,
+ "learning_rate": 6.624409381190946e-06,
+ "loss": 0.9211,
+ "step": 3229
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 1.001488427731204,
+ "learning_rate": 6.618543750974202e-06,
+ "loss": 0.8943,
+ "step": 3230
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9928219338627889,
+ "learning_rate": 6.6126794340761025e-06,
+ "loss": 0.8631,
+ "step": 3231
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8930406631901596,
+ "learning_rate": 6.606816432774279e-06,
+ "loss": 0.9568,
+ "step": 3232
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 1.113876608551708,
+ "learning_rate": 6.600954749345851e-06,
+ "loss": 0.9144,
+ "step": 3233
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.895346908663339,
+ "learning_rate": 6.595094386067428e-06,
+ "loss": 0.9374,
+ "step": 3234
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8311582516467093,
+ "learning_rate": 6.589235345215117e-06,
+ "loss": 0.8193,
+ "step": 3235
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9798043650661699,
+ "learning_rate": 6.583377629064494e-06,
+ "loss": 0.9819,
+ "step": 3236
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8707960089295761,
+ "learning_rate": 6.5775212398906295e-06,
+ "loss": 0.907,
+ "step": 3237
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 1.100050827008296,
+ "learning_rate": 6.571666179968079e-06,
+ "loss": 0.9208,
+ "step": 3238
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9073346495015958,
+ "learning_rate": 6.565812451570881e-06,
+ "loss": 0.9239,
+ "step": 3239
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9669736853429974,
+ "learning_rate": 6.5599600569725495e-06,
+ "loss": 0.9053,
+ "step": 3240
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9335190633902349,
+ "learning_rate": 6.554108998446096e-06,
+ "loss": 0.9217,
+ "step": 3241
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8321186344952952,
+ "learning_rate": 6.548259278263999e-06,
+ "loss": 0.8223,
+ "step": 3242
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8754883224712815,
+ "learning_rate": 6.542410898698226e-06,
+ "loss": 0.9157,
+ "step": 3243
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8349280795664766,
+ "learning_rate": 6.536563862020218e-06,
+ "loss": 0.8593,
+ "step": 3244
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 1.0830970781978273,
+ "learning_rate": 6.530718170500896e-06,
+ "loss": 0.9515,
+ "step": 3245
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9038972483968136,
+ "learning_rate": 6.524873826410658e-06,
+ "loss": 0.8754,
+ "step": 3246
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8756406283704826,
+ "learning_rate": 6.519030832019383e-06,
+ "loss": 0.9035,
+ "step": 3247
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8863111467130707,
+ "learning_rate": 6.513189189596422e-06,
+ "loss": 0.8736,
+ "step": 3248
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.8878081775063752,
+ "learning_rate": 6.507348901410604e-06,
+ "loss": 0.8879,
+ "step": 3249
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9322607572611155,
+ "learning_rate": 6.501509969730224e-06,
+ "loss": 0.9829,
+ "step": 3250
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9229288939212942,
+ "learning_rate": 6.495672396823061e-06,
+ "loss": 0.8361,
+ "step": 3251
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.7614400839920861,
+ "learning_rate": 6.489836184956353e-06,
+ "loss": 0.8946,
+ "step": 3252
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.962302176364048,
+ "learning_rate": 6.484001336396828e-06,
+ "loss": 0.8738,
+ "step": 3253
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9255266287091252,
+ "learning_rate": 6.478167853410668e-06,
+ "loss": 0.8776,
+ "step": 3254
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9594207264288369,
+ "learning_rate": 6.472335738263534e-06,
+ "loss": 0.8925,
+ "step": 3255
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.8826277693067501,
+ "learning_rate": 6.466504993220548e-06,
+ "loss": 0.8854,
+ "step": 3256
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 1.0043926962250158,
+ "learning_rate": 6.460675620546305e-06,
+ "loss": 0.9604,
+ "step": 3257
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.8808295764027383,
+ "learning_rate": 6.454847622504867e-06,
+ "loss": 0.8862,
+ "step": 3258
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.8615785343420977,
+ "learning_rate": 6.4490210013597635e-06,
+ "loss": 0.8996,
+ "step": 3259
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.8923283004057868,
+ "learning_rate": 6.4431957593739845e-06,
+ "loss": 0.8885,
+ "step": 3260
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9501948584509429,
+ "learning_rate": 6.4373718988099896e-06,
+ "loss": 0.8947,
+ "step": 3261
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 1.0034537547985931,
+ "learning_rate": 6.431549421929694e-06,
+ "loss": 0.9398,
+ "step": 3262
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.7991235447346864,
+ "learning_rate": 6.4257283309944804e-06,
+ "loss": 0.7453,
+ "step": 3263
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 1.0237356481445665,
+ "learning_rate": 6.419908628265203e-06,
+ "loss": 0.8621,
+ "step": 3264
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9270513668056444,
+ "learning_rate": 6.414090316002161e-06,
+ "loss": 0.9018,
+ "step": 3265
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.8479982940713884,
+ "learning_rate": 6.4082733964651166e-06,
+ "loss": 0.7733,
+ "step": 3266
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9594350472570427,
+ "learning_rate": 6.4024578719133e-06,
+ "loss": 0.9283,
+ "step": 3267
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.7872219892547018,
+ "learning_rate": 6.396643744605391e-06,
+ "loss": 0.8897,
+ "step": 3268
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.981772336992416,
+ "learning_rate": 6.390831016799527e-06,
+ "loss": 0.903,
+ "step": 3269
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9760894652545553,
+ "learning_rate": 6.385019690753311e-06,
+ "loss": 0.9394,
+ "step": 3270
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9163084362814138,
+ "learning_rate": 6.379209768723791e-06,
+ "loss": 0.9802,
+ "step": 3271
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9129707109450066,
+ "learning_rate": 6.373401252967475e-06,
+ "loss": 0.8756,
+ "step": 3272
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9346634296238862,
+ "learning_rate": 6.367594145740324e-06,
+ "loss": 0.8876,
+ "step": 3273
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.8573209127722409,
+ "learning_rate": 6.361788449297748e-06,
+ "loss": 0.9411,
+ "step": 3274
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 1.1461716331777836,
+ "learning_rate": 6.355984165894613e-06,
+ "loss": 0.9323,
+ "step": 3275
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 1.124717892477342,
+ "learning_rate": 6.350181297785242e-06,
+ "loss": 0.9554,
+ "step": 3276
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.8731406719377184,
+ "learning_rate": 6.344379847223398e-06,
+ "loss": 0.9253,
+ "step": 3277
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.8681069935684773,
+ "learning_rate": 6.338579816462298e-06,
+ "loss": 0.86,
+ "step": 3278
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9529648625777252,
+ "learning_rate": 6.332781207754605e-06,
+ "loss": 0.968,
+ "step": 3279
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 1.0169042732983236,
+ "learning_rate": 6.326984023352435e-06,
+ "loss": 0.9259,
+ "step": 3280
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9291263889801283,
+ "learning_rate": 6.321188265507342e-06,
+ "loss": 0.8896,
+ "step": 3281
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.8652796633882472,
+ "learning_rate": 6.31539393647034e-06,
+ "loss": 0.9151,
+ "step": 3282
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 1.0989508823272882,
+ "learning_rate": 6.309601038491874e-06,
+ "loss": 0.9179,
+ "step": 3283
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.80919985324805,
+ "learning_rate": 6.303809573821842e-06,
+ "loss": 0.8538,
+ "step": 3284
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.7817323101895023,
+ "learning_rate": 6.298019544709579e-06,
+ "loss": 0.8658,
+ "step": 3285
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.825467080869915,
+ "learning_rate": 6.292230953403866e-06,
+ "loss": 0.8759,
+ "step": 3286
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.8808874075038633,
+ "learning_rate": 6.286443802152926e-06,
+ "loss": 0.8605,
+ "step": 3287
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.8842098272447734,
+ "learning_rate": 6.280658093204422e-06,
+ "loss": 0.8883,
+ "step": 3288
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9321305545714378,
+ "learning_rate": 6.274873828805459e-06,
+ "loss": 0.9111,
+ "step": 3289
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.8335970611850612,
+ "learning_rate": 6.269091011202576e-06,
+ "loss": 0.8916,
+ "step": 3290
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 1.0056988995508562,
+ "learning_rate": 6.263309642641751e-06,
+ "loss": 0.9146,
+ "step": 3291
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9366802953362328,
+ "learning_rate": 6.257529725368405e-06,
+ "loss": 0.871,
+ "step": 3292
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9835642456560029,
+ "learning_rate": 6.251751261627386e-06,
+ "loss": 0.9502,
+ "step": 3293
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9909810001533016,
+ "learning_rate": 6.245974253662988e-06,
+ "loss": 0.9711,
+ "step": 3294
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9772797854240282,
+ "learning_rate": 6.240198703718932e-06,
+ "loss": 0.9121,
+ "step": 3295
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9191430186515294,
+ "learning_rate": 6.234424614038375e-06,
+ "loss": 0.9109,
+ "step": 3296
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9442372435701061,
+ "learning_rate": 6.2286519868639095e-06,
+ "loss": 0.9528,
+ "step": 3297
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 1.0828132026723065,
+ "learning_rate": 6.222880824437549e-06,
+ "loss": 0.9741,
+ "step": 3298
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 1.0425060423186634,
+ "learning_rate": 6.217111129000759e-06,
+ "loss": 0.9251,
+ "step": 3299
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9939427724604535,
+ "learning_rate": 6.211342902794413e-06,
+ "loss": 0.9615,
+ "step": 3300
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 1.0709194747261073,
+ "learning_rate": 6.205576148058828e-06,
+ "loss": 0.8744,
+ "step": 3301
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.8692419531508614,
+ "learning_rate": 6.199810867033745e-06,
+ "loss": 0.9191,
+ "step": 3302
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9147638629214966,
+ "learning_rate": 6.19404706195833e-06,
+ "loss": 0.9312,
+ "step": 3303
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 1.336183878351168,
+ "learning_rate": 6.188284735071177e-06,
+ "loss": 0.9113,
+ "step": 3304
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.767747365788594,
+ "learning_rate": 6.182523888610316e-06,
+ "loss": 0.8828,
+ "step": 3305
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.8013724589912433,
+ "learning_rate": 6.176764524813187e-06,
+ "loss": 0.8864,
+ "step": 3306
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.8038368097561518,
+ "learning_rate": 6.171006645916662e-06,
+ "loss": 0.8496,
+ "step": 3307
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.7372647028306176,
+ "learning_rate": 6.165250254157032e-06,
+ "loss": 0.8084,
+ "step": 3308
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 1.1111603718791627,
+ "learning_rate": 6.159495351770017e-06,
+ "loss": 0.9726,
+ "step": 3309
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9140283971012301,
+ "learning_rate": 6.153741940990749e-06,
+ "loss": 0.9411,
+ "step": 3310
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 1.0068704482869333,
+ "learning_rate": 6.1479900240537956e-06,
+ "loss": 0.9066,
+ "step": 3311
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 1.158059516546419,
+ "learning_rate": 6.142239603193128e-06,
+ "loss": 0.9694,
+ "step": 3312
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.7684388697508447,
+ "learning_rate": 6.136490680642146e-06,
+ "loss": 0.8641,
+ "step": 3313
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9502013498861337,
+ "learning_rate": 6.130743258633667e-06,
+ "loss": 0.9401,
+ "step": 3314
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9935841058924265,
+ "learning_rate": 6.124997339399916e-06,
+ "loss": 0.9308,
+ "step": 3315
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.887957567770159,
+ "learning_rate": 6.119252925172549e-06,
+ "loss": 0.8984,
+ "step": 3316
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 1.0171068829241392,
+ "learning_rate": 6.113510018182628e-06,
+ "loss": 0.895,
+ "step": 3317
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9844102370676482,
+ "learning_rate": 6.107768620660633e-06,
+ "loss": 0.9476,
+ "step": 3318
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.7994543878681762,
+ "learning_rate": 6.102028734836456e-06,
+ "loss": 0.8659,
+ "step": 3319
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9536635985725865,
+ "learning_rate": 6.0962903629394e-06,
+ "loss": 0.8841,
+ "step": 3320
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 1.0260922613161438,
+ "learning_rate": 6.090553507198187e-06,
+ "loss": 0.9875,
+ "step": 3321
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9285081006115551,
+ "learning_rate": 6.0848181698409384e-06,
+ "loss": 0.9077,
+ "step": 3322
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.8558549149558954,
+ "learning_rate": 6.079084353095202e-06,
+ "loss": 0.8606,
+ "step": 3323
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9273874590394057,
+ "learning_rate": 6.07335205918792e-06,
+ "loss": 0.9213,
+ "step": 3324
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.8119799939792245,
+ "learning_rate": 6.067621290345455e-06,
+ "loss": 0.8365,
+ "step": 3325
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.8286164780478104,
+ "learning_rate": 6.061892048793568e-06,
+ "loss": 0.8337,
+ "step": 3326
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9665081994804595,
+ "learning_rate": 6.056164336757426e-06,
+ "loss": 0.9553,
+ "step": 3327
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9398621025319048,
+ "learning_rate": 6.050438156461613e-06,
+ "loss": 0.9324,
+ "step": 3328
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9205647530985112,
+ "learning_rate": 6.044713510130108e-06,
+ "loss": 0.8776,
+ "step": 3329
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9597637278577276,
+ "learning_rate": 6.038990399986302e-06,
+ "loss": 0.9598,
+ "step": 3330
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 1.0622050440423265,
+ "learning_rate": 6.03326882825298e-06,
+ "loss": 0.9359,
+ "step": 3331
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9546111945962177,
+ "learning_rate": 6.027548797152336e-06,
+ "loss": 0.884,
+ "step": 3332
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.8147346221091533,
+ "learning_rate": 6.021830308905963e-06,
+ "loss": 0.8514,
+ "step": 3333
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9476591290817805,
+ "learning_rate": 6.016113365734861e-06,
+ "loss": 0.8823,
+ "step": 3334
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.7697781681624081,
+ "learning_rate": 6.0103979698594215e-06,
+ "loss": 0.8188,
+ "step": 3335
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.8380004631866774,
+ "learning_rate": 6.004684123499436e-06,
+ "loss": 0.8763,
+ "step": 3336
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9847565034185128,
+ "learning_rate": 5.998971828874102e-06,
+ "loss": 0.9596,
+ "step": 3337
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9537746778246229,
+ "learning_rate": 5.993261088202005e-06,
+ "loss": 0.9939,
+ "step": 3338
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9336493198264489,
+ "learning_rate": 5.987551903701128e-06,
+ "loss": 0.8761,
+ "step": 3339
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9830557131025129,
+ "learning_rate": 5.9818442775888595e-06,
+ "loss": 0.9447,
+ "step": 3340
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9501202029008352,
+ "learning_rate": 5.97613821208197e-06,
+ "loss": 0.8117,
+ "step": 3341
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.8121481938176214,
+ "learning_rate": 5.970433709396635e-06,
+ "loss": 0.8216,
+ "step": 3342
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.8980032297592268,
+ "learning_rate": 5.964730771748415e-06,
+ "loss": 0.88,
+ "step": 3343
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9215562969639635,
+ "learning_rate": 5.959029401352262e-06,
+ "loss": 0.9375,
+ "step": 3344
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9530706788338669,
+ "learning_rate": 5.953329600422524e-06,
+ "loss": 0.9565,
+ "step": 3345
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9341692476239544,
+ "learning_rate": 5.947631371172943e-06,
+ "loss": 0.8829,
+ "step": 3346
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.8433878370268778,
+ "learning_rate": 5.941934715816642e-06,
+ "loss": 0.8587,
+ "step": 3347
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 1.0107100707401744,
+ "learning_rate": 5.936239636566137e-06,
+ "loss": 0.9015,
+ "step": 3348
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9462242224813096,
+ "learning_rate": 5.930546135633327e-06,
+ "loss": 0.9422,
+ "step": 3349
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.8435469696030137,
+ "learning_rate": 5.924854215229509e-06,
+ "loss": 0.9209,
+ "step": 3350
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9257406462746642,
+ "learning_rate": 5.919163877565351e-06,
+ "loss": 0.9302,
+ "step": 3351
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9063894710732844,
+ "learning_rate": 5.9134751248509236e-06,
+ "loss": 0.9544,
+ "step": 3352
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9105088917858691,
+ "learning_rate": 5.9077879592956675e-06,
+ "loss": 0.9326,
+ "step": 3353
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8595235276291397,
+ "learning_rate": 5.902102383108415e-06,
+ "loss": 0.9248,
+ "step": 3354
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8126304130933923,
+ "learning_rate": 5.896418398497377e-06,
+ "loss": 0.9073,
+ "step": 3355
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8138664165499582,
+ "learning_rate": 5.890736007670144e-06,
+ "loss": 0.7843,
+ "step": 3356
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 1.0070584504088194,
+ "learning_rate": 5.885055212833696e-06,
+ "loss": 0.9664,
+ "step": 3357
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8544585209072542,
+ "learning_rate": 5.879376016194387e-06,
+ "loss": 0.9101,
+ "step": 3358
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9155485542383524,
+ "learning_rate": 5.873698419957952e-06,
+ "loss": 0.883,
+ "step": 3359
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9940606811836806,
+ "learning_rate": 5.8680224263295045e-06,
+ "loss": 0.9228,
+ "step": 3360
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9437517206434091,
+ "learning_rate": 5.862348037513533e-06,
+ "loss": 0.9266,
+ "step": 3361
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8609221173533104,
+ "learning_rate": 5.856675255713905e-06,
+ "loss": 0.838,
+ "step": 3362
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9434007416909821,
+ "learning_rate": 5.851004083133862e-06,
+ "loss": 0.9064,
+ "step": 3363
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 1.0290108872454056,
+ "learning_rate": 5.8453345219760275e-06,
+ "loss": 0.9372,
+ "step": 3364
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8781260878318194,
+ "learning_rate": 5.839666574442389e-06,
+ "loss": 0.845,
+ "step": 3365
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.7702037206071264,
+ "learning_rate": 5.834000242734317e-06,
+ "loss": 0.82,
+ "step": 3366
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9472885689463841,
+ "learning_rate": 5.828335529052541e-06,
+ "loss": 0.8872,
+ "step": 3367
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8874860454985124,
+ "learning_rate": 5.822672435597172e-06,
+ "loss": 0.8784,
+ "step": 3368
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8367233331672315,
+ "learning_rate": 5.817010964567702e-06,
+ "loss": 0.8681,
+ "step": 3369
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9883916160905318,
+ "learning_rate": 5.811351118162969e-06,
+ "loss": 0.8989,
+ "step": 3370
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8722316268623705,
+ "learning_rate": 5.805692898581196e-06,
+ "loss": 0.8807,
+ "step": 3371
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8775362998064364,
+ "learning_rate": 5.800036308019974e-06,
+ "loss": 0.953,
+ "step": 3372
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.7946634593097527,
+ "learning_rate": 5.79438134867625e-06,
+ "loss": 0.8761,
+ "step": 3373
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8844695809108578,
+ "learning_rate": 5.788728022746348e-06,
+ "loss": 0.8683,
+ "step": 3374
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9326174041987463,
+ "learning_rate": 5.783076332425957e-06,
+ "loss": 0.9111,
+ "step": 3375
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.7642844190506682,
+ "learning_rate": 5.777426279910125e-06,
+ "loss": 0.8927,
+ "step": 3376
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8053230945516049,
+ "learning_rate": 5.771777867393275e-06,
+ "loss": 0.8583,
+ "step": 3377
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.946573280704107,
+ "learning_rate": 5.766131097069174e-06,
+ "loss": 0.9214,
+ "step": 3378
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8626890508507832,
+ "learning_rate": 5.760485971130969e-06,
+ "loss": 0.8129,
+ "step": 3379
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8596996597665849,
+ "learning_rate": 5.7548424917711596e-06,
+ "loss": 0.8744,
+ "step": 3380
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8776164716085274,
+ "learning_rate": 5.749200661181611e-06,
+ "loss": 0.8434,
+ "step": 3381
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 1.005509266985471,
+ "learning_rate": 5.7435604815535475e-06,
+ "loss": 0.9409,
+ "step": 3382
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9743735277936009,
+ "learning_rate": 5.7379219550775415e-06,
+ "loss": 0.9028,
+ "step": 3383
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9151985407085292,
+ "learning_rate": 5.732285083943537e-06,
+ "loss": 0.9299,
+ "step": 3384
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8440116883810425,
+ "learning_rate": 5.726649870340833e-06,
+ "loss": 0.8652,
+ "step": 3385
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9900867577380902,
+ "learning_rate": 5.721016316458068e-06,
+ "loss": 0.9247,
+ "step": 3386
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9626858705845855,
+ "learning_rate": 5.715384424483268e-06,
+ "loss": 0.9017,
+ "step": 3387
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9798722326172483,
+ "learning_rate": 5.709754196603781e-06,
+ "loss": 0.9243,
+ "step": 3388
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 1.017737212119303,
+ "learning_rate": 5.704125635006329e-06,
+ "loss": 0.9333,
+ "step": 3389
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.969046163856431,
+ "learning_rate": 5.6984987418769825e-06,
+ "loss": 0.9003,
+ "step": 3390
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.7089081712102512,
+ "learning_rate": 5.692873519401154e-06,
+ "loss": 0.7972,
+ "step": 3391
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 1.0159028505390155,
+ "learning_rate": 5.6872499697636195e-06,
+ "loss": 0.9637,
+ "step": 3392
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9326793210567529,
+ "learning_rate": 5.681628095148502e-06,
+ "loss": 0.9484,
+ "step": 3393
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8514463972750765,
+ "learning_rate": 5.6760078977392706e-06,
+ "loss": 0.8651,
+ "step": 3394
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 1.049771217877223,
+ "learning_rate": 5.67038937971875e-06,
+ "loss": 0.9004,
+ "step": 3395
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.7209213774076711,
+ "learning_rate": 5.664772543269101e-06,
+ "loss": 0.8291,
+ "step": 3396
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9663480024894006,
+ "learning_rate": 5.659157390571842e-06,
+ "loss": 0.8783,
+ "step": 3397
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 1.0748229237280895,
+ "learning_rate": 5.653543923807833e-06,
+ "loss": 0.9402,
+ "step": 3398
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 1.005878011868789,
+ "learning_rate": 5.6479321451572785e-06,
+ "loss": 0.9077,
+ "step": 3399
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9011630561254075,
+ "learning_rate": 5.642322056799732e-06,
+ "loss": 0.8952,
+ "step": 3400
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 1.1193408050557743,
+ "learning_rate": 5.636713660914087e-06,
+ "loss": 1.0096,
+ "step": 3401
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 1.101405618492286,
+ "learning_rate": 5.631106959678575e-06,
+ "loss": 0.9389,
+ "step": 3402
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9475734112345786,
+ "learning_rate": 5.625501955270777e-06,
+ "loss": 0.8692,
+ "step": 3403
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 1.2023077285184303,
+ "learning_rate": 5.619898649867612e-06,
+ "loss": 0.9241,
+ "step": 3404
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8490728639623257,
+ "learning_rate": 5.614297045645339e-06,
+ "loss": 0.9202,
+ "step": 3405
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9737075166729794,
+ "learning_rate": 5.6086971447795625e-06,
+ "loss": 0.9002,
+ "step": 3406
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 1.0102837495005053,
+ "learning_rate": 5.603098949445209e-06,
+ "loss": 0.8761,
+ "step": 3407
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.873161086030156,
+ "learning_rate": 5.597502461816557e-06,
+ "loss": 0.8266,
+ "step": 3408
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9661948113920387,
+ "learning_rate": 5.5919076840672215e-06,
+ "loss": 0.9593,
+ "step": 3409
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.952311111394339,
+ "learning_rate": 5.5863146183701454e-06,
+ "loss": 0.9037,
+ "step": 3410
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9993243482108803,
+ "learning_rate": 5.580723266897616e-06,
+ "loss": 0.9224,
+ "step": 3411
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8324360584686835,
+ "learning_rate": 5.575133631821243e-06,
+ "loss": 0.8121,
+ "step": 3412
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 1.0682116747233388,
+ "learning_rate": 5.5695457153119806e-06,
+ "loss": 0.9308,
+ "step": 3413
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.7986522384159873,
+ "learning_rate": 5.563959519540114e-06,
+ "loss": 0.8701,
+ "step": 3414
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8644591382554868,
+ "learning_rate": 5.558375046675244e-06,
+ "loss": 0.8844,
+ "step": 3415
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9705469050856875,
+ "learning_rate": 5.552792298886335e-06,
+ "loss": 0.9435,
+ "step": 3416
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8030372860824314,
+ "learning_rate": 5.547211278341646e-06,
+ "loss": 0.8828,
+ "step": 3417
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 1.014873471767318,
+ "learning_rate": 5.541631987208789e-06,
+ "loss": 0.9233,
+ "step": 3418
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9161763853357491,
+ "learning_rate": 5.536054427654698e-06,
+ "loss": 0.8159,
+ "step": 3419
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.856965638509794,
+ "learning_rate": 5.530478601845624e-06,
+ "loss": 0.8874,
+ "step": 3420
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 1.0425395595947131,
+ "learning_rate": 5.52490451194716e-06,
+ "loss": 0.9189,
+ "step": 3421
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8880815421138911,
+ "learning_rate": 5.519332160124215e-06,
+ "loss": 0.8874,
+ "step": 3422
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8485731127474028,
+ "learning_rate": 5.513761548541032e-06,
+ "loss": 0.8559,
+ "step": 3423
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8615152400129891,
+ "learning_rate": 5.508192679361169e-06,
+ "loss": 0.9138,
+ "step": 3424
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9770857467762206,
+ "learning_rate": 5.502625554747508e-06,
+ "loss": 0.9296,
+ "step": 3425
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8721669442302464,
+ "learning_rate": 5.497060176862259e-06,
+ "loss": 0.8836,
+ "step": 3426
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8437100573198295,
+ "learning_rate": 5.491496547866948e-06,
+ "loss": 0.9058,
+ "step": 3427
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9201560209677323,
+ "learning_rate": 5.485934669922428e-06,
+ "loss": 0.9015,
+ "step": 3428
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.7943515859818367,
+ "learning_rate": 5.480374545188866e-06,
+ "loss": 0.8488,
+ "step": 3429
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 1.1848453656826603,
+ "learning_rate": 5.474816175825754e-06,
+ "loss": 0.9261,
+ "step": 3430
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.6916057405289044,
+ "learning_rate": 5.469259563991894e-06,
+ "loss": 0.7851,
+ "step": 3431
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.7751840845134919,
+ "learning_rate": 5.46370471184541e-06,
+ "loss": 0.8706,
+ "step": 3432
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 1.1048606617126446,
+ "learning_rate": 5.458151621543744e-06,
+ "loss": 0.8476,
+ "step": 3433
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 1.1955957046056491,
+ "learning_rate": 5.452600295243653e-06,
+ "loss": 0.9248,
+ "step": 3434
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8214911260216353,
+ "learning_rate": 5.4470507351012116e-06,
+ "loss": 0.8425,
+ "step": 3435
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9478794539977947,
+ "learning_rate": 5.441502943271797e-06,
+ "loss": 0.9477,
+ "step": 3436
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9237745726439537,
+ "learning_rate": 5.4359569219101115e-06,
+ "loss": 0.9152,
+ "step": 3437
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9901683414325779,
+ "learning_rate": 5.430412673170167e-06,
+ "loss": 0.9568,
+ "step": 3438
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8629495060985444,
+ "learning_rate": 5.424870199205283e-06,
+ "loss": 0.923,
+ "step": 3439
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.901315763691827,
+ "learning_rate": 5.4193295021681e-06,
+ "loss": 0.8619,
+ "step": 3440
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8598993833740752,
+ "learning_rate": 5.413790584210551e-06,
+ "loss": 0.8478,
+ "step": 3441
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9830815371446981,
+ "learning_rate": 5.408253447483892e-06,
+ "loss": 0.9587,
+ "step": 3442
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9108769108808722,
+ "learning_rate": 5.402718094138688e-06,
+ "loss": 0.897,
+ "step": 3443
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 1.180401739791126,
+ "learning_rate": 5.397184526324792e-06,
+ "loss": 0.9519,
+ "step": 3444
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8650951591031756,
+ "learning_rate": 5.391652746191398e-06,
+ "loss": 0.9322,
+ "step": 3445
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.7978040458964462,
+ "learning_rate": 5.38612275588697e-06,
+ "loss": 0.9363,
+ "step": 3446
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9857910038109453,
+ "learning_rate": 5.380594557559298e-06,
+ "loss": 0.9757,
+ "step": 3447
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 1.0469742863993499,
+ "learning_rate": 5.375068153355474e-06,
+ "loss": 0.8857,
+ "step": 3448
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8684083143914649,
+ "learning_rate": 5.369543545421883e-06,
+ "loss": 0.9735,
+ "step": 3449
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9619029330079326,
+ "learning_rate": 5.364020735904223e-06,
+ "loss": 0.9339,
+ "step": 3450
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8452340682502787,
+ "learning_rate": 5.358499726947488e-06,
+ "loss": 0.8801,
+ "step": 3451
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9216605665459061,
+ "learning_rate": 5.352980520695974e-06,
+ "loss": 0.8933,
+ "step": 3452
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9573668387838171,
+ "learning_rate": 5.347463119293283e-06,
+ "loss": 0.9458,
+ "step": 3453
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 1.1123828008567809,
+ "learning_rate": 5.341947524882301e-06,
+ "loss": 1.0189,
+ "step": 3454
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.7799207423793345,
+ "learning_rate": 5.336433739605227e-06,
+ "loss": 0.8433,
+ "step": 3455
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9978239359608753,
+ "learning_rate": 5.330921765603549e-06,
+ "loss": 0.9548,
+ "step": 3456
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8877842351599056,
+ "learning_rate": 5.325411605018056e-06,
+ "loss": 0.8651,
+ "step": 3457
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8498171853637153,
+ "learning_rate": 5.31990325998883e-06,
+ "loss": 0.8913,
+ "step": 3458
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8677557509648534,
+ "learning_rate": 5.314396732655253e-06,
+ "loss": 0.9245,
+ "step": 3459
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.7876558975530807,
+ "learning_rate": 5.308892025155989e-06,
+ "loss": 0.8575,
+ "step": 3460
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8988856371919549,
+ "learning_rate": 5.303389139629007e-06,
+ "loss": 0.9101,
+ "step": 3461
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.7702792353501093,
+ "learning_rate": 5.297888078211564e-06,
+ "loss": 0.7773,
+ "step": 3462
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 1.0425983489613044,
+ "learning_rate": 5.2923888430402085e-06,
+ "loss": 0.9101,
+ "step": 3463
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9702026580483017,
+ "learning_rate": 5.286891436250785e-06,
+ "loss": 0.8841,
+ "step": 3464
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.938621341124513,
+ "learning_rate": 5.281395859978414e-06,
+ "loss": 0.9387,
+ "step": 3465
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.917526090070358,
+ "learning_rate": 5.2759021163575184e-06,
+ "loss": 0.8938,
+ "step": 3466
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 1.0450012948924594,
+ "learning_rate": 5.27041020752181e-06,
+ "loss": 0.9181,
+ "step": 3467
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 1.0748790354613298,
+ "learning_rate": 5.26492013560427e-06,
+ "loss": 0.8597,
+ "step": 3468
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9187536500879364,
+ "learning_rate": 5.259431902737195e-06,
+ "loss": 0.9202,
+ "step": 3469
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 1.0014879474178906,
+ "learning_rate": 5.2539455110521385e-06,
+ "loss": 0.9597,
+ "step": 3470
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.882044465734256,
+ "learning_rate": 5.248460962679958e-06,
+ "loss": 0.911,
+ "step": 3471
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9004670266382829,
+ "learning_rate": 5.24297825975079e-06,
+ "loss": 0.8858,
+ "step": 3472
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8306911476871505,
+ "learning_rate": 5.237497404394044e-06,
+ "loss": 0.8999,
+ "step": 3473
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8913704251594962,
+ "learning_rate": 5.232018398738436e-06,
+ "loss": 0.8846,
+ "step": 3474
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9629493517986126,
+ "learning_rate": 5.226541244911936e-06,
+ "loss": 0.9177,
+ "step": 3475
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9322762849895656,
+ "learning_rate": 5.221065945041811e-06,
+ "loss": 0.8872,
+ "step": 3476
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9157068821660157,
+ "learning_rate": 5.215592501254609e-06,
+ "loss": 0.9044,
+ "step": 3477
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9299916317898786,
+ "learning_rate": 5.210120915676147e-06,
+ "loss": 0.9175,
+ "step": 3478
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9492664707809794,
+ "learning_rate": 5.2046511904315265e-06,
+ "loss": 0.8981,
+ "step": 3479
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.752313027702845,
+ "learning_rate": 5.199183327645128e-06,
+ "loss": 0.8523,
+ "step": 3480
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9462743046685494,
+ "learning_rate": 5.193717329440604e-06,
+ "loss": 0.8856,
+ "step": 3481
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8622878800414172,
+ "learning_rate": 5.188253197940889e-06,
+ "loss": 0.8037,
+ "step": 3482
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9723519003250821,
+ "learning_rate": 5.182790935268185e-06,
+ "loss": 0.9007,
+ "step": 3483
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 1.1132024580277993,
+ "learning_rate": 5.177330543543971e-06,
+ "loss": 0.886,
+ "step": 3484
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 1.218071464406349,
+ "learning_rate": 5.171872024889004e-06,
+ "loss": 0.9158,
+ "step": 3485
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.841544800055019,
+ "learning_rate": 5.166415381423306e-06,
+ "loss": 0.9115,
+ "step": 3486
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 1.0313558388714028,
+ "learning_rate": 5.160960615266179e-06,
+ "loss": 0.9216,
+ "step": 3487
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8598936069898658,
+ "learning_rate": 5.155507728536191e-06,
+ "loss": 0.8526,
+ "step": 3488
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8501831376800932,
+ "learning_rate": 5.150056723351173e-06,
+ "loss": 0.8443,
+ "step": 3489
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8261694114785857,
+ "learning_rate": 5.14460760182824e-06,
+ "loss": 0.8292,
+ "step": 3490
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9169323447691452,
+ "learning_rate": 5.139160366083765e-06,
+ "loss": 0.8935,
+ "step": 3491
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8924186076730839,
+ "learning_rate": 5.133715018233393e-06,
+ "loss": 0.8515,
+ "step": 3492
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8536900006938354,
+ "learning_rate": 5.128271560392037e-06,
+ "loss": 0.875,
+ "step": 3493
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.78616365378165,
+ "learning_rate": 5.122829994673866e-06,
+ "loss": 0.8538,
+ "step": 3494
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9796149128469018,
+ "learning_rate": 5.117390323192326e-06,
+ "loss": 0.9023,
+ "step": 3495
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9537109643837329,
+ "learning_rate": 5.111952548060126e-06,
+ "loss": 0.8677,
+ "step": 3496
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 1.0113114525340483,
+ "learning_rate": 5.106516671389224e-06,
+ "loss": 0.9101,
+ "step": 3497
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8818485387427726,
+ "learning_rate": 5.101082695290866e-06,
+ "loss": 0.8817,
+ "step": 3498
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8490731104838284,
+ "learning_rate": 5.0956506218755344e-06,
+ "loss": 0.8968,
+ "step": 3499
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9062471249705372,
+ "learning_rate": 5.09022045325299e-06,
+ "loss": 0.9487,
+ "step": 3500
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8966273313807227,
+ "learning_rate": 5.0847921915322486e-06,
+ "loss": 0.91,
+ "step": 3501
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 1.0164216103994779,
+ "learning_rate": 5.07936583882158e-06,
+ "loss": 0.8968,
+ "step": 3502
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.969816784008669,
+ "learning_rate": 5.073941397228518e-06,
+ "loss": 0.9295,
+ "step": 3503
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9186888582933629,
+ "learning_rate": 5.068518868859854e-06,
+ "loss": 0.961,
+ "step": 3504
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9719789691069631,
+ "learning_rate": 5.063098255821637e-06,
+ "loss": 0.9147,
+ "step": 3505
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8443109595221848,
+ "learning_rate": 5.0576795602191734e-06,
+ "loss": 0.8007,
+ "step": 3506
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 1.0285499892269294,
+ "learning_rate": 5.052262784157014e-06,
+ "loss": 0.9377,
+ "step": 3507
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.848800503785638,
+ "learning_rate": 5.046847929738971e-06,
+ "loss": 0.846,
+ "step": 3508
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9289613009230611,
+ "learning_rate": 5.041434999068127e-06,
+ "loss": 0.8885,
+ "step": 3509
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9742100804060939,
+ "learning_rate": 5.036023994246787e-06,
+ "loss": 0.8607,
+ "step": 3510
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9279770257742669,
+ "learning_rate": 5.030614917376532e-06,
+ "loss": 0.8565,
+ "step": 3511
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.948576769450609,
+ "learning_rate": 5.025207770558176e-06,
+ "loss": 0.9564,
+ "step": 3512
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.8738299741755271,
+ "learning_rate": 5.0198025558917985e-06,
+ "loss": 0.8759,
+ "step": 3513
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.0576587162071487,
+ "learning_rate": 5.014399275476721e-06,
+ "loss": 0.9377,
+ "step": 3514
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.7807765494674233,
+ "learning_rate": 5.008997931411517e-06,
+ "loss": 0.8174,
+ "step": 3515
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.0736030632697564,
+ "learning_rate": 5.003598525794002e-06,
+ "loss": 0.9407,
+ "step": 3516
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9528980831443683,
+ "learning_rate": 4.998201060721253e-06,
+ "loss": 0.9393,
+ "step": 3517
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.8724255799327322,
+ "learning_rate": 4.992805538289571e-06,
+ "loss": 0.8755,
+ "step": 3518
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.8320551639912551,
+ "learning_rate": 4.987411960594521e-06,
+ "loss": 0.8651,
+ "step": 3519
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9031541070879855,
+ "learning_rate": 4.982020329730904e-06,
+ "loss": 0.9217,
+ "step": 3520
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9416704158419092,
+ "learning_rate": 4.976630647792771e-06,
+ "loss": 0.8387,
+ "step": 3521
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9104537436743043,
+ "learning_rate": 4.971242916873412e-06,
+ "loss": 0.8829,
+ "step": 3522
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.0406748404431048,
+ "learning_rate": 4.965857139065354e-06,
+ "loss": 0.8229,
+ "step": 3523
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.1918054177782331,
+ "learning_rate": 4.9604733164603755e-06,
+ "loss": 1.05,
+ "step": 3524
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.0500021260919787,
+ "learning_rate": 4.955091451149495e-06,
+ "loss": 0.9417,
+ "step": 3525
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9798488709150643,
+ "learning_rate": 4.9497115452229535e-06,
+ "loss": 0.9418,
+ "step": 3526
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.8378640528558038,
+ "learning_rate": 4.9443336007702614e-06,
+ "loss": 0.839,
+ "step": 3527
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9169392255615451,
+ "learning_rate": 4.938957619880138e-06,
+ "loss": 0.9173,
+ "step": 3528
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9630003863193658,
+ "learning_rate": 4.9335836046405575e-06,
+ "loss": 0.9257,
+ "step": 3529
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.0750064623369548,
+ "learning_rate": 4.928211557138728e-06,
+ "loss": 0.9082,
+ "step": 3530
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.2048749619137136,
+ "learning_rate": 4.922841479461083e-06,
+ "loss": 0.9164,
+ "step": 3531
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.0203452135898181,
+ "learning_rate": 4.917473373693305e-06,
+ "loss": 0.848,
+ "step": 3532
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.0157634466515504,
+ "learning_rate": 4.9121072419203016e-06,
+ "loss": 0.9171,
+ "step": 3533
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.8420438484394206,
+ "learning_rate": 4.906743086226218e-06,
+ "loss": 0.9127,
+ "step": 3534
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.8714977975687543,
+ "learning_rate": 4.901380908694434e-06,
+ "loss": 0.8889,
+ "step": 3535
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9561030187654711,
+ "learning_rate": 4.8960207114075495e-06,
+ "loss": 0.9149,
+ "step": 3536
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.8655696062325442,
+ "learning_rate": 4.890662496447407e-06,
+ "loss": 0.8512,
+ "step": 3537
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.1995418153027713,
+ "learning_rate": 4.8853062658950765e-06,
+ "loss": 0.9337,
+ "step": 3538
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.8345348055716388,
+ "learning_rate": 4.879952021830856e-06,
+ "loss": 0.8593,
+ "step": 3539
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9469728302461272,
+ "learning_rate": 4.874599766334276e-06,
+ "loss": 0.9105,
+ "step": 3540
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.7108020023733196,
+ "learning_rate": 4.8692495014840825e-06,
+ "loss": 0.8494,
+ "step": 3541
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9570948535842642,
+ "learning_rate": 4.863901229358261e-06,
+ "loss": 0.888,
+ "step": 3542
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9003542739663155,
+ "learning_rate": 4.858554952034019e-06,
+ "loss": 0.9285,
+ "step": 3543
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9129049815418941,
+ "learning_rate": 4.853210671587789e-06,
+ "loss": 0.8085,
+ "step": 3544
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9280821059460705,
+ "learning_rate": 4.847868390095227e-06,
+ "loss": 0.9347,
+ "step": 3545
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.0447361437896092,
+ "learning_rate": 4.842528109631218e-06,
+ "loss": 0.9781,
+ "step": 3546
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.007431358107507,
+ "learning_rate": 4.837189832269858e-06,
+ "loss": 0.9104,
+ "step": 3547
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.761600653498451,
+ "learning_rate": 4.8318535600844775e-06,
+ "loss": 0.848,
+ "step": 3548
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.86557119556584,
+ "learning_rate": 4.8265192951476206e-06,
+ "loss": 0.9265,
+ "step": 3549
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.017409110173373,
+ "learning_rate": 4.8211870395310556e-06,
+ "loss": 0.8872,
+ "step": 3550
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.8784477424717826,
+ "learning_rate": 4.815856795305772e-06,
+ "loss": 0.9062,
+ "step": 3551
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.984678411366913,
+ "learning_rate": 4.81052856454197e-06,
+ "loss": 0.8783,
+ "step": 3552
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.8168804136984219,
+ "learning_rate": 4.805202349309074e-06,
+ "loss": 0.8347,
+ "step": 3553
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.230707134890969,
+ "learning_rate": 4.7998781516757295e-06,
+ "loss": 0.9391,
+ "step": 3554
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.927832533010434,
+ "learning_rate": 4.794555973709783e-06,
+ "loss": 0.8698,
+ "step": 3555
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.8985962191097416,
+ "learning_rate": 4.789235817478322e-06,
+ "loss": 0.8865,
+ "step": 3556
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.7628225331553036,
+ "learning_rate": 4.783917685047621e-06,
+ "loss": 0.8535,
+ "step": 3557
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.0530196419991154,
+ "learning_rate": 4.778601578483187e-06,
+ "loss": 0.9481,
+ "step": 3558
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.8505827536068966,
+ "learning_rate": 4.773287499849737e-06,
+ "loss": 0.7899,
+ "step": 3559
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.8592761492230445,
+ "learning_rate": 4.767975451211191e-06,
+ "loss": 0.8766,
+ "step": 3560
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8549943462053834,
+ "learning_rate": 4.762665434630692e-06,
+ "loss": 0.889,
+ "step": 3561
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8582391234373967,
+ "learning_rate": 4.757357452170588e-06,
+ "loss": 0.8579,
+ "step": 3562
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9547029286790802,
+ "learning_rate": 4.752051505892438e-06,
+ "loss": 0.9169,
+ "step": 3563
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.863217768449546,
+ "learning_rate": 4.746747597857014e-06,
+ "loss": 0.8767,
+ "step": 3564
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9146603782370458,
+ "learning_rate": 4.741445730124287e-06,
+ "loss": 0.9497,
+ "step": 3565
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9196974301843103,
+ "learning_rate": 4.736145904753445e-06,
+ "loss": 0.8861,
+ "step": 3566
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9557480114421328,
+ "learning_rate": 4.730848123802877e-06,
+ "loss": 0.8835,
+ "step": 3567
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9104180211533642,
+ "learning_rate": 4.725552389330183e-06,
+ "loss": 0.9101,
+ "step": 3568
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8632313053918585,
+ "learning_rate": 4.720258703392161e-06,
+ "loss": 0.9004,
+ "step": 3569
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9260839455836017,
+ "learning_rate": 4.714967068044826e-06,
+ "loss": 0.9357,
+ "step": 3570
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9090808377784159,
+ "learning_rate": 4.7096774853433765e-06,
+ "loss": 0.8252,
+ "step": 3571
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9913037418742416,
+ "learning_rate": 4.704389957342237e-06,
+ "loss": 0.9041,
+ "step": 3572
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.851799669945169,
+ "learning_rate": 4.699104486095008e-06,
+ "loss": 0.9213,
+ "step": 3573
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.919488826319392,
+ "learning_rate": 4.69382107365452e-06,
+ "loss": 0.9516,
+ "step": 3574
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8447172980269063,
+ "learning_rate": 4.6885397220727855e-06,
+ "loss": 0.8193,
+ "step": 3575
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9834920335283767,
+ "learning_rate": 4.683260433401016e-06,
+ "loss": 0.9533,
+ "step": 3576
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 1.013062596277445,
+ "learning_rate": 4.677983209689631e-06,
+ "loss": 0.8396,
+ "step": 3577
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.973626006187746,
+ "learning_rate": 4.6727080529882394e-06,
+ "loss": 0.854,
+ "step": 3578
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9454478664953159,
+ "learning_rate": 4.667434965345654e-06,
+ "loss": 0.9091,
+ "step": 3579
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.919546248047086,
+ "learning_rate": 4.6621639488098856e-06,
+ "loss": 0.9519,
+ "step": 3580
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9886482423401748,
+ "learning_rate": 4.656895005428127e-06,
+ "loss": 0.8573,
+ "step": 3581
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8844947281806744,
+ "learning_rate": 4.651628137246781e-06,
+ "loss": 0.8831,
+ "step": 3582
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9195399134259411,
+ "learning_rate": 4.6463633463114395e-06,
+ "loss": 0.9275,
+ "step": 3583
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8832282550046601,
+ "learning_rate": 4.641100634666877e-06,
+ "loss": 0.8965,
+ "step": 3584
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.7755744881842765,
+ "learning_rate": 4.635840004357086e-06,
+ "loss": 0.7934,
+ "step": 3585
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9326589562514537,
+ "learning_rate": 4.630581457425222e-06,
+ "loss": 0.858,
+ "step": 3586
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9542507937829572,
+ "learning_rate": 4.625324995913648e-06,
+ "loss": 0.9047,
+ "step": 3587
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8937640400868299,
+ "learning_rate": 4.620070621863917e-06,
+ "loss": 0.8765,
+ "step": 3588
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9699888873370087,
+ "learning_rate": 4.614818337316759e-06,
+ "loss": 0.8805,
+ "step": 3589
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9230127756400321,
+ "learning_rate": 4.609568144312107e-06,
+ "loss": 0.819,
+ "step": 3590
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8737618354540571,
+ "learning_rate": 4.6043200448890724e-06,
+ "loss": 0.88,
+ "step": 3591
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8538828166058943,
+ "learning_rate": 4.599074041085958e-06,
+ "loss": 0.8519,
+ "step": 3592
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 1.3215944655457885,
+ "learning_rate": 4.593830134940256e-06,
+ "loss": 0.875,
+ "step": 3593
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 1.0077055159975417,
+ "learning_rate": 4.588588328488629e-06,
+ "loss": 0.9125,
+ "step": 3594
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.873401485821927,
+ "learning_rate": 4.5833486237669414e-06,
+ "loss": 0.9075,
+ "step": 3595
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8818663016823189,
+ "learning_rate": 4.578111022810231e-06,
+ "loss": 0.8976,
+ "step": 3596
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9326040787549489,
+ "learning_rate": 4.5728755276527225e-06,
+ "loss": 0.9326,
+ "step": 3597
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.6718054616921444,
+ "learning_rate": 4.567642140327823e-06,
+ "loss": 0.7996,
+ "step": 3598
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8957026193299594,
+ "learning_rate": 4.562410862868123e-06,
+ "loss": 0.9123,
+ "step": 3599
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.7320437879618403,
+ "learning_rate": 4.557181697305383e-06,
+ "loss": 0.8548,
+ "step": 3600
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8683517744441273,
+ "learning_rate": 4.551954645670557e-06,
+ "loss": 0.8725,
+ "step": 3601
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8847525424074256,
+ "learning_rate": 4.546729709993762e-06,
+ "loss": 0.9144,
+ "step": 3602
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.909666832325789,
+ "learning_rate": 4.541506892304314e-06,
+ "loss": 0.8982,
+ "step": 3603
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9802842671920442,
+ "learning_rate": 4.536286194630694e-06,
+ "loss": 0.8473,
+ "step": 3604
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.7915695493847694,
+ "learning_rate": 4.531067619000553e-06,
+ "loss": 0.8109,
+ "step": 3605
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8858358371523316,
+ "learning_rate": 4.525851167440731e-06,
+ "loss": 0.9083,
+ "step": 3606
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 1.0016265029103604,
+ "learning_rate": 4.52063684197724e-06,
+ "loss": 0.9003,
+ "step": 3607
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8981930715099556,
+ "learning_rate": 4.515424644635254e-06,
+ "loss": 0.9083,
+ "step": 3608
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8255039931415044,
+ "learning_rate": 4.510214577439146e-06,
+ "loss": 0.8856,
+ "step": 3609
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.855645467956242,
+ "learning_rate": 4.5050066424124324e-06,
+ "loss": 0.8586,
+ "step": 3610
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9436723158455018,
+ "learning_rate": 4.49980084157782e-06,
+ "loss": 0.8329,
+ "step": 3611
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9755445794722121,
+ "learning_rate": 4.494597176957186e-06,
+ "loss": 0.9376,
+ "step": 3612
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9273158389191944,
+ "learning_rate": 4.489395650571562e-06,
+ "loss": 0.9061,
+ "step": 3613
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9215188409295219,
+ "learning_rate": 4.4841962644411765e-06,
+ "loss": 0.8865,
+ "step": 3614
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.967506317454882,
+ "learning_rate": 4.4789990205854e-06,
+ "loss": 0.8873,
+ "step": 3615
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8604527308695761,
+ "learning_rate": 4.473803921022784e-06,
+ "loss": 0.9015,
+ "step": 3616
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8256845675856916,
+ "learning_rate": 4.468610967771051e-06,
+ "loss": 0.871,
+ "step": 3617
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9000936146825755,
+ "learning_rate": 4.4634201628470766e-06,
+ "loss": 0.9217,
+ "step": 3618
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8592376043744393,
+ "learning_rate": 4.458231508266912e-06,
+ "loss": 0.8417,
+ "step": 3619
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9685441995930987,
+ "learning_rate": 4.453045006045773e-06,
+ "loss": 0.9404,
+ "step": 3620
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8454383433942781,
+ "learning_rate": 4.447860658198035e-06,
+ "loss": 0.8963,
+ "step": 3621
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9249400638460467,
+ "learning_rate": 4.442678466737245e-06,
+ "loss": 0.8715,
+ "step": 3622
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9643101473914554,
+ "learning_rate": 4.4374984336760975e-06,
+ "loss": 0.8994,
+ "step": 3623
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8843503186780189,
+ "learning_rate": 4.432320561026461e-06,
+ "loss": 0.8907,
+ "step": 3624
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9200710208123296,
+ "learning_rate": 4.427144850799363e-06,
+ "loss": 0.9304,
+ "step": 3625
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9552543674744298,
+ "learning_rate": 4.421971305004989e-06,
+ "loss": 0.9535,
+ "step": 3626
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.968030250245091,
+ "learning_rate": 4.416799925652684e-06,
+ "loss": 0.8622,
+ "step": 3627
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8650031437396379,
+ "learning_rate": 4.411630714750956e-06,
+ "loss": 0.8618,
+ "step": 3628
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8778782072026667,
+ "learning_rate": 4.4064636743074605e-06,
+ "loss": 0.8525,
+ "step": 3629
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 1.1072428127959597,
+ "learning_rate": 4.40129880632902e-06,
+ "loss": 0.9185,
+ "step": 3630
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.909317764651399,
+ "learning_rate": 4.396136112821608e-06,
+ "loss": 0.8654,
+ "step": 3631
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8521648959061848,
+ "learning_rate": 4.390975595790358e-06,
+ "loss": 0.8821,
+ "step": 3632
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9395599613042558,
+ "learning_rate": 4.385817257239556e-06,
+ "loss": 0.8634,
+ "step": 3633
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9519217321516676,
+ "learning_rate": 4.380661099172636e-06,
+ "loss": 0.8705,
+ "step": 3634
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.968085791215669,
+ "learning_rate": 4.375507123592194e-06,
+ "loss": 0.8764,
+ "step": 3635
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9412718300744525,
+ "learning_rate": 4.370355332499977e-06,
+ "loss": 0.9213,
+ "step": 3636
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9137980428032713,
+ "learning_rate": 4.365205727896872e-06,
+ "loss": 0.9273,
+ "step": 3637
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.6519028128529283,
+ "learning_rate": 4.36005831178294e-06,
+ "loss": 0.7965,
+ "step": 3638
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8202311466952675,
+ "learning_rate": 4.354913086157367e-06,
+ "loss": 0.8718,
+ "step": 3639
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 1.0289697032115919,
+ "learning_rate": 4.349770053018502e-06,
+ "loss": 0.8648,
+ "step": 3640
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9888059882518984,
+ "learning_rate": 4.344629214363845e-06,
+ "loss": 0.9252,
+ "step": 3641
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 1.062500981065704,
+ "learning_rate": 4.339490572190031e-06,
+ "loss": 0.8811,
+ "step": 3642
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8271425347190752,
+ "learning_rate": 4.334354128492851e-06,
+ "loss": 0.8298,
+ "step": 3643
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 1.1525897331279629,
+ "learning_rate": 4.329219885267244e-06,
+ "loss": 0.8341,
+ "step": 3644
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8754748020528021,
+ "learning_rate": 4.324087844507289e-06,
+ "loss": 0.811,
+ "step": 3645
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8956038071027671,
+ "learning_rate": 4.318958008206214e-06,
+ "loss": 0.9099,
+ "step": 3646
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9105125073309388,
+ "learning_rate": 4.313830378356384e-06,
+ "loss": 0.9078,
+ "step": 3647
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9846714006143011,
+ "learning_rate": 4.3087049569493136e-06,
+ "loss": 0.9495,
+ "step": 3648
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8898304315799884,
+ "learning_rate": 4.303581745975656e-06,
+ "loss": 0.8255,
+ "step": 3649
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9846922272472171,
+ "learning_rate": 4.2984607474252084e-06,
+ "loss": 0.8476,
+ "step": 3650
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8737467243862848,
+ "learning_rate": 4.293341963286912e-06,
+ "loss": 0.8575,
+ "step": 3651
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8551569355687991,
+ "learning_rate": 4.288225395548835e-06,
+ "loss": 0.9171,
+ "step": 3652
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 1.0060747251716518,
+ "learning_rate": 4.283111046198198e-06,
+ "loss": 0.8679,
+ "step": 3653
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8621218642512847,
+ "learning_rate": 4.277998917221354e-06,
+ "loss": 0.9173,
+ "step": 3654
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9715160176918434,
+ "learning_rate": 4.272889010603798e-06,
+ "loss": 0.8337,
+ "step": 3655
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8765010419824666,
+ "learning_rate": 4.267781328330155e-06,
+ "loss": 0.8877,
+ "step": 3656
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.7833574855213274,
+ "learning_rate": 4.262675872384197e-06,
+ "loss": 0.8347,
+ "step": 3657
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8611646854513554,
+ "learning_rate": 4.257572644748813e-06,
+ "loss": 0.8863,
+ "step": 3658
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8946638088467714,
+ "learning_rate": 4.252471647406045e-06,
+ "loss": 0.8666,
+ "step": 3659
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9009900500063068,
+ "learning_rate": 4.2473728823370605e-06,
+ "loss": 0.867,
+ "step": 3660
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9518306446370538,
+ "learning_rate": 4.242276351522161e-06,
+ "loss": 0.8915,
+ "step": 3661
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9812559709055975,
+ "learning_rate": 4.237182056940784e-06,
+ "loss": 0.9443,
+ "step": 3662
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.850129243043673,
+ "learning_rate": 4.232090000571488e-06,
+ "loss": 0.9138,
+ "step": 3663
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.7929788463165887,
+ "learning_rate": 4.2270001843919714e-06,
+ "loss": 0.8653,
+ "step": 3664
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8811424669031181,
+ "learning_rate": 4.221912610379065e-06,
+ "loss": 0.8096,
+ "step": 3665
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8896115887659922,
+ "learning_rate": 4.216827280508712e-06,
+ "loss": 0.8645,
+ "step": 3666
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9238072936348655,
+ "learning_rate": 4.211744196756011e-06,
+ "loss": 0.9175,
+ "step": 3667
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8575211788698613,
+ "learning_rate": 4.206663361095164e-06,
+ "loss": 0.9168,
+ "step": 3668
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9961048630487186,
+ "learning_rate": 4.201584775499509e-06,
+ "loss": 0.894,
+ "step": 3669
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9364828786132331,
+ "learning_rate": 4.196508441941516e-06,
+ "loss": 0.9498,
+ "step": 3670
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8557525875880314,
+ "learning_rate": 4.191434362392768e-06,
+ "loss": 0.8834,
+ "step": 3671
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.7465234278774255,
+ "learning_rate": 4.186362538823981e-06,
+ "loss": 0.7581,
+ "step": 3672
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 1.0441284821899917,
+ "learning_rate": 4.181292973204992e-06,
+ "loss": 0.8156,
+ "step": 3673
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8541623284036627,
+ "learning_rate": 4.1762256675047655e-06,
+ "loss": 0.8623,
+ "step": 3674
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9314759176837049,
+ "learning_rate": 4.171160623691384e-06,
+ "loss": 0.8624,
+ "step": 3675
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.7738345748275365,
+ "learning_rate": 4.166097843732048e-06,
+ "loss": 0.8071,
+ "step": 3676
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 1.0502808710711233,
+ "learning_rate": 4.161037329593085e-06,
+ "loss": 0.9185,
+ "step": 3677
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9624173301340887,
+ "learning_rate": 4.155979083239942e-06,
+ "loss": 0.9636,
+ "step": 3678
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 1.0671638564561505,
+ "learning_rate": 4.1509231066371815e-06,
+ "loss": 0.8894,
+ "step": 3679
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8506539202106217,
+ "learning_rate": 4.1458694017484915e-06,
+ "loss": 0.8199,
+ "step": 3680
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8824345879063262,
+ "learning_rate": 4.140817970536664e-06,
+ "loss": 0.8406,
+ "step": 3681
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8670283831534198,
+ "learning_rate": 4.135768814963622e-06,
+ "loss": 0.8598,
+ "step": 3682
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9740382190337725,
+ "learning_rate": 4.130721936990399e-06,
+ "loss": 0.8864,
+ "step": 3683
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 1.038045285775576,
+ "learning_rate": 4.1256773385771444e-06,
+ "loss": 0.8607,
+ "step": 3684
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8796107308648102,
+ "learning_rate": 4.120635021683122e-06,
+ "loss": 0.8795,
+ "step": 3685
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8135501191770208,
+ "learning_rate": 4.115594988266711e-06,
+ "loss": 0.8811,
+ "step": 3686
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9422800006768904,
+ "learning_rate": 4.1105572402853976e-06,
+ "loss": 0.9114,
+ "step": 3687
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 1.0563904231148622,
+ "learning_rate": 4.1055217796957895e-06,
+ "loss": 0.9047,
+ "step": 3688
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9743667412318251,
+ "learning_rate": 4.100488608453599e-06,
+ "loss": 0.8691,
+ "step": 3689
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.912896943805015,
+ "learning_rate": 4.095457728513652e-06,
+ "loss": 0.8444,
+ "step": 3690
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9309317896956503,
+ "learning_rate": 4.09042914182989e-06,
+ "loss": 0.8514,
+ "step": 3691
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8899955444001727,
+ "learning_rate": 4.08540285035535e-06,
+ "loss": 0.8736,
+ "step": 3692
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 1.1507668624737333,
+ "learning_rate": 4.0803788560421885e-06,
+ "loss": 0.8964,
+ "step": 3693
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9678083861215927,
+ "learning_rate": 4.075357160841671e-06,
+ "loss": 0.9272,
+ "step": 3694
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9188195767360579,
+ "learning_rate": 4.070337766704155e-06,
+ "loss": 0.8804,
+ "step": 3695
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.869403803685755,
+ "learning_rate": 4.065320675579132e-06,
+ "loss": 0.8146,
+ "step": 3696
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9021898867146443,
+ "learning_rate": 4.0603058894151685e-06,
+ "loss": 0.8502,
+ "step": 3697
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 1.0581369795813984,
+ "learning_rate": 4.055293410159954e-06,
+ "loss": 0.8897,
+ "step": 3698
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.970168825713092,
+ "learning_rate": 4.050283239760282e-06,
+ "loss": 0.8907,
+ "step": 3699
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.868714997732219,
+ "learning_rate": 4.045275380162038e-06,
+ "loss": 0.8378,
+ "step": 3700
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 1.105015776049482,
+ "learning_rate": 4.04026983331022e-06,
+ "loss": 0.9041,
+ "step": 3701
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9166576639363467,
+ "learning_rate": 4.035266601148924e-06,
+ "loss": 0.8837,
+ "step": 3702
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 1.0362019327067018,
+ "learning_rate": 4.03026568562135e-06,
+ "loss": 0.947,
+ "step": 3703
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9224286147142222,
+ "learning_rate": 4.025267088669797e-06,
+ "loss": 0.8797,
+ "step": 3704
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9790592463351313,
+ "learning_rate": 4.020270812235656e-06,
+ "loss": 0.8821,
+ "step": 3705
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8967074029017275,
+ "learning_rate": 4.015276858259427e-06,
+ "loss": 0.8708,
+ "step": 3706
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8484150230651468,
+ "learning_rate": 4.010285228680705e-06,
+ "loss": 0.8294,
+ "step": 3707
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8906581283251357,
+ "learning_rate": 4.005295925438181e-06,
+ "loss": 0.8891,
+ "step": 3708
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8936506974123093,
+ "learning_rate": 4.000308950469646e-06,
+ "loss": 0.913,
+ "step": 3709
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.976798839104124,
+ "learning_rate": 3.995324305711976e-06,
+ "loss": 0.8496,
+ "step": 3710
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8313037620095529,
+ "learning_rate": 3.990341993101154e-06,
+ "loss": 0.8452,
+ "step": 3711
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8763795340947595,
+ "learning_rate": 3.985362014572256e-06,
+ "loss": 0.9,
+ "step": 3712
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8855051717952711,
+ "learning_rate": 3.9803843720594385e-06,
+ "loss": 0.9288,
+ "step": 3713
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.911714889970517,
+ "learning_rate": 3.97540906749597e-06,
+ "loss": 0.8818,
+ "step": 3714
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9642610130649074,
+ "learning_rate": 3.970436102814203e-06,
+ "loss": 0.927,
+ "step": 3715
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 1.088904292924162,
+ "learning_rate": 3.965465479945569e-06,
+ "loss": 0.8459,
+ "step": 3716
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9484766597574151,
+ "learning_rate": 3.9604972008206085e-06,
+ "loss": 0.9588,
+ "step": 3717
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 1.319476922702796,
+ "learning_rate": 3.955531267368942e-06,
+ "loss": 0.8339,
+ "step": 3718
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9094710397946618,
+ "learning_rate": 3.950567681519279e-06,
+ "loss": 0.8279,
+ "step": 3719
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 1.0670769304223973,
+ "learning_rate": 3.945606445199427e-06,
+ "loss": 0.962,
+ "step": 3720
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.902022579528049,
+ "learning_rate": 3.940647560336262e-06,
+ "loss": 0.8559,
+ "step": 3721
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 1.0879261141181404,
+ "learning_rate": 3.935691028855763e-06,
+ "loss": 0.8914,
+ "step": 3722
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9768311946293221,
+ "learning_rate": 3.930736852682993e-06,
+ "loss": 0.8868,
+ "step": 3723
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.995014398280607,
+ "learning_rate": 3.9257850337420856e-06,
+ "loss": 0.8997,
+ "step": 3724
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 1.086378818739939,
+ "learning_rate": 3.920835573956285e-06,
+ "loss": 1.0138,
+ "step": 3725
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9072120853209293,
+ "learning_rate": 3.915888475247894e-06,
+ "loss": 0.838,
+ "step": 3726
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.8944818816763047,
+ "learning_rate": 3.910943739538313e-06,
+ "loss": 0.843,
+ "step": 3727
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.887414896353494,
+ "learning_rate": 3.906001368748023e-06,
+ "loss": 0.8405,
+ "step": 3728
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9071563125303521,
+ "learning_rate": 3.901061364796574e-06,
+ "loss": 0.8688,
+ "step": 3729
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9468193201052529,
+ "learning_rate": 3.8961237296026155e-06,
+ "loss": 0.8674,
+ "step": 3730
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9489145365417119,
+ "learning_rate": 3.891188465083865e-06,
+ "loss": 0.89,
+ "step": 3731
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.8841238683831928,
+ "learning_rate": 3.886255573157121e-06,
+ "loss": 0.8566,
+ "step": 3732
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.849586270418284,
+ "learning_rate": 3.88132505573827e-06,
+ "loss": 0.9073,
+ "step": 3733
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.8556714443775648,
+ "learning_rate": 3.876396914742258e-06,
+ "loss": 0.9178,
+ "step": 3734
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 1.0071523406950287,
+ "learning_rate": 3.871471152083121e-06,
+ "loss": 0.871,
+ "step": 3735
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.8090934902247617,
+ "learning_rate": 3.866547769673968e-06,
+ "loss": 0.8786,
+ "step": 3736
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.7782592851221304,
+ "learning_rate": 3.861626769426988e-06,
+ "loss": 0.8415,
+ "step": 3737
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9180548623713223,
+ "learning_rate": 3.8567081532534374e-06,
+ "loss": 0.8944,
+ "step": 3738
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9356932893834743,
+ "learning_rate": 3.851791923063655e-06,
+ "loss": 0.8429,
+ "step": 3739
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9364320056026952,
+ "learning_rate": 3.846878080767039e-06,
+ "loss": 0.861,
+ "step": 3740
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9460475861296517,
+ "learning_rate": 3.841966628272079e-06,
+ "loss": 0.9219,
+ "step": 3741
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9059453261456984,
+ "learning_rate": 3.837057567486314e-06,
+ "loss": 0.8831,
+ "step": 3742
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 1.0830084578853572,
+ "learning_rate": 3.832150900316377e-06,
+ "loss": 0.9647,
+ "step": 3743
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9417066203729135,
+ "learning_rate": 3.827246628667962e-06,
+ "loss": 0.9004,
+ "step": 3744
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.8675209367711789,
+ "learning_rate": 3.822344754445826e-06,
+ "loss": 0.8551,
+ "step": 3745
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.8383371044739671,
+ "learning_rate": 3.817445279553801e-06,
+ "loss": 0.8697,
+ "step": 3746
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.896975626808194,
+ "learning_rate": 3.8125482058947905e-06,
+ "loss": 0.8411,
+ "step": 3747
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9631984873338313,
+ "learning_rate": 3.8076535353707523e-06,
+ "loss": 0.8276,
+ "step": 3748
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9013302310877005,
+ "learning_rate": 3.8027612698827344e-06,
+ "loss": 0.9086,
+ "step": 3749
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.8654966212586118,
+ "learning_rate": 3.7978714113308246e-06,
+ "loss": 0.8791,
+ "step": 3750
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9333767603847324,
+ "learning_rate": 3.7929839616141917e-06,
+ "loss": 0.8529,
+ "step": 3751
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9179758734883217,
+ "learning_rate": 3.788098922631067e-06,
+ "loss": 0.9202,
+ "step": 3752
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9229409872103111,
+ "learning_rate": 3.7832162962787355e-06,
+ "loss": 0.8882,
+ "step": 3753
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 1.0139239200914962,
+ "learning_rate": 3.7783360844535653e-06,
+ "loss": 0.8768,
+ "step": 3754
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.8305625168363362,
+ "learning_rate": 3.773458289050963e-06,
+ "loss": 0.785,
+ "step": 3755
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9940432238969626,
+ "learning_rate": 3.768582911965414e-06,
+ "loss": 0.898,
+ "step": 3756
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.8831650295924641,
+ "learning_rate": 3.763709955090461e-06,
+ "loss": 0.8713,
+ "step": 3757
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 1.0273323162697248,
+ "learning_rate": 3.7588394203186963e-06,
+ "loss": 0.93,
+ "step": 3758
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.8402393066588723,
+ "learning_rate": 3.753971309541784e-06,
+ "loss": 0.9176,
+ "step": 3759
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9020695278691016,
+ "learning_rate": 3.7491056246504433e-06,
+ "loss": 0.9211,
+ "step": 3760
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9009607918800239,
+ "learning_rate": 3.7442423675344474e-06,
+ "loss": 0.9042,
+ "step": 3761
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.8580907122523047,
+ "learning_rate": 3.739381540082635e-06,
+ "loss": 0.8688,
+ "step": 3762
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 1.0207892189622805,
+ "learning_rate": 3.7345231441828876e-06,
+ "loss": 0.9277,
+ "step": 3763
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9646886351451739,
+ "learning_rate": 3.729667181722154e-06,
+ "loss": 0.895,
+ "step": 3764
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.844103034268996,
+ "learning_rate": 3.7248136545864345e-06,
+ "loss": 0.8389,
+ "step": 3765
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.8795968771573153,
+ "learning_rate": 3.719962564660783e-06,
+ "loss": 0.9113,
+ "step": 3766
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 1.0311629852878772,
+ "learning_rate": 3.7151139138293056e-06,
+ "loss": 0.9685,
+ "step": 3767
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9008176806044091,
+ "learning_rate": 3.7102677039751667e-06,
+ "loss": 0.8292,
+ "step": 3768
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9876712228116549,
+ "learning_rate": 3.705423936980572e-06,
+ "loss": 0.8982,
+ "step": 3769
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8662282545257485,
+ "learning_rate": 3.700582614726791e-06,
+ "loss": 0.9215,
+ "step": 3770
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.877429447426921,
+ "learning_rate": 3.6957437390941274e-06,
+ "loss": 0.871,
+ "step": 3771
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9509863016409777,
+ "learning_rate": 3.6909073119619555e-06,
+ "loss": 0.8889,
+ "step": 3772
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9126104542246645,
+ "learning_rate": 3.6860733352086866e-06,
+ "loss": 0.8137,
+ "step": 3773
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9830157855203383,
+ "learning_rate": 3.6812418107117765e-06,
+ "loss": 0.8587,
+ "step": 3774
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8835491755842453,
+ "learning_rate": 3.6764127403477347e-06,
+ "loss": 0.8573,
+ "step": 3775
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8986686975313434,
+ "learning_rate": 3.6715861259921226e-06,
+ "loss": 0.8854,
+ "step": 3776
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.925232639208042,
+ "learning_rate": 3.6667619695195287e-06,
+ "loss": 0.887,
+ "step": 3777
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8889916711790783,
+ "learning_rate": 3.6619402728036157e-06,
+ "loss": 0.8327,
+ "step": 3778
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8844815945929162,
+ "learning_rate": 3.657121037717064e-06,
+ "loss": 0.9186,
+ "step": 3779
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9206190350339126,
+ "learning_rate": 3.652304266131612e-06,
+ "loss": 0.8743,
+ "step": 3780
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9228023702640018,
+ "learning_rate": 3.6474899599180426e-06,
+ "loss": 0.8922,
+ "step": 3781
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 1.0192228075406065,
+ "learning_rate": 3.642678120946168e-06,
+ "loss": 0.9031,
+ "step": 3782
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9149993805947546,
+ "learning_rate": 3.6378687510848576e-06,
+ "loss": 0.8891,
+ "step": 3783
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8161914582243244,
+ "learning_rate": 3.6330618522020124e-06,
+ "loss": 0.9129,
+ "step": 3784
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8442882576276589,
+ "learning_rate": 3.6282574261645776e-06,
+ "loss": 0.8699,
+ "step": 3785
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.937560353472258,
+ "learning_rate": 3.62345547483854e-06,
+ "loss": 0.8278,
+ "step": 3786
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9279691052881753,
+ "learning_rate": 3.618656000088916e-06,
+ "loss": 0.9297,
+ "step": 3787
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8434983729293868,
+ "learning_rate": 3.6138590037797695e-06,
+ "loss": 0.828,
+ "step": 3788
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8572199091834554,
+ "learning_rate": 3.6090644877741986e-06,
+ "loss": 0.8668,
+ "step": 3789
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8270755574423517,
+ "learning_rate": 3.6042724539343378e-06,
+ "loss": 0.8988,
+ "step": 3790
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.924326821040318,
+ "learning_rate": 3.599482904121361e-06,
+ "loss": 0.8947,
+ "step": 3791
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9984656300391526,
+ "learning_rate": 3.594695840195468e-06,
+ "loss": 0.8627,
+ "step": 3792
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9244615897474393,
+ "learning_rate": 3.5899112640159017e-06,
+ "loss": 0.9139,
+ "step": 3793
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8220097609234314,
+ "learning_rate": 3.585129177440938e-06,
+ "loss": 0.9084,
+ "step": 3794
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9956606576720209,
+ "learning_rate": 3.580349582327882e-06,
+ "loss": 0.9089,
+ "step": 3795
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9076291400880901,
+ "learning_rate": 3.575572480533076e-06,
+ "loss": 0.8554,
+ "step": 3796
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8625138392020574,
+ "learning_rate": 3.570797873911892e-06,
+ "loss": 0.8523,
+ "step": 3797
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 1.053483439962704,
+ "learning_rate": 3.566025764318728e-06,
+ "loss": 0.8772,
+ "step": 3798
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9282207856467962,
+ "learning_rate": 3.5612561536070213e-06,
+ "loss": 0.881,
+ "step": 3799
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9230812349575398,
+ "learning_rate": 3.5564890436292243e-06,
+ "loss": 0.9451,
+ "step": 3800
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9694542363185262,
+ "learning_rate": 3.5517244362368363e-06,
+ "loss": 0.9329,
+ "step": 3801
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9840594327042361,
+ "learning_rate": 3.5469623332803795e-06,
+ "loss": 0.9131,
+ "step": 3802
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8896833948392223,
+ "learning_rate": 3.5422027366093893e-06,
+ "loss": 0.9036,
+ "step": 3803
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9454777156720028,
+ "learning_rate": 3.5374456480724427e-06,
+ "loss": 0.9143,
+ "step": 3804
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8921333290905952,
+ "learning_rate": 3.532691069517142e-06,
+ "loss": 0.857,
+ "step": 3805
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.7135657626973516,
+ "learning_rate": 3.5279390027901004e-06,
+ "loss": 0.83,
+ "step": 3806
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 1.1027756347210746,
+ "learning_rate": 3.5231894497369802e-06,
+ "loss": 0.871,
+ "step": 3807
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8528195294796473,
+ "learning_rate": 3.5184424122024406e-06,
+ "loss": 0.8525,
+ "step": 3808
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9336178795936354,
+ "learning_rate": 3.5136978920301822e-06,
+ "loss": 0.8834,
+ "step": 3809
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8806797277091982,
+ "learning_rate": 3.508955891062924e-06,
+ "loss": 0.8245,
+ "step": 3810
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8683615528003018,
+ "learning_rate": 3.5042164111423983e-06,
+ "loss": 0.8264,
+ "step": 3811
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9440372185903397,
+ "learning_rate": 3.4994794541093667e-06,
+ "loss": 0.9524,
+ "step": 3812
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 1.1584916221881574,
+ "learning_rate": 3.4947450218036106e-06,
+ "loss": 0.8967,
+ "step": 3813
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8858977602295007,
+ "learning_rate": 3.4900131160639283e-06,
+ "loss": 0.8261,
+ "step": 3814
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9012923918685176,
+ "learning_rate": 3.485283738728139e-06,
+ "loss": 0.8689,
+ "step": 3815
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9050659875092949,
+ "learning_rate": 3.4805568916330747e-06,
+ "loss": 0.8779,
+ "step": 3816
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9245864252851095,
+ "learning_rate": 3.4758325766145896e-06,
+ "loss": 0.9252,
+ "step": 3817
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8432005259251041,
+ "learning_rate": 3.471110795507554e-06,
+ "loss": 0.877,
+ "step": 3818
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8981035108024167,
+ "learning_rate": 3.4663915501458523e-06,
+ "loss": 0.8993,
+ "step": 3819
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 1.0600438461788986,
+ "learning_rate": 3.4616748423623893e-06,
+ "loss": 0.8758,
+ "step": 3820
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.0540241762688183,
+ "learning_rate": 3.4569606739890737e-06,
+ "loss": 0.9104,
+ "step": 3821
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9115841602144225,
+ "learning_rate": 3.452249046856836e-06,
+ "loss": 0.9174,
+ "step": 3822
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.0016106992660168,
+ "learning_rate": 3.4475399627956197e-06,
+ "loss": 0.882,
+ "step": 3823
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.1025168971628794,
+ "learning_rate": 3.4428334236343774e-06,
+ "loss": 0.8971,
+ "step": 3824
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.0077975820945424,
+ "learning_rate": 3.438129431201075e-06,
+ "loss": 0.9041,
+ "step": 3825
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9622346034155563,
+ "learning_rate": 3.433427987322693e-06,
+ "loss": 0.8852,
+ "step": 3826
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9901312244363298,
+ "learning_rate": 3.4287290938252103e-06,
+ "loss": 0.9628,
+ "step": 3827
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.8865845180571811,
+ "learning_rate": 3.424032752533627e-06,
+ "loss": 0.8573,
+ "step": 3828
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9971330704212689,
+ "learning_rate": 3.4193389652719478e-06,
+ "loss": 0.8595,
+ "step": 3829
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.0212239287622955,
+ "learning_rate": 3.4146477338631856e-06,
+ "loss": 0.8652,
+ "step": 3830
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.0018839210365473,
+ "learning_rate": 3.4099590601293632e-06,
+ "loss": 0.8818,
+ "step": 3831
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9634015411273907,
+ "learning_rate": 3.4052729458915024e-06,
+ "loss": 0.9257,
+ "step": 3832
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9051038008041504,
+ "learning_rate": 3.4005893929696377e-06,
+ "loss": 0.864,
+ "step": 3833
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9773506088648896,
+ "learning_rate": 3.3959084031828114e-06,
+ "loss": 0.8858,
+ "step": 3834
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.0271759026403005,
+ "learning_rate": 3.3912299783490567e-06,
+ "loss": 0.9403,
+ "step": 3835
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9675445160786589,
+ "learning_rate": 3.3865541202854314e-06,
+ "loss": 0.8929,
+ "step": 3836
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9584455722139147,
+ "learning_rate": 3.3818808308079753e-06,
+ "loss": 0.8911,
+ "step": 3837
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9138810258332464,
+ "learning_rate": 3.3772101117317437e-06,
+ "loss": 0.9271,
+ "step": 3838
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.021130646460032,
+ "learning_rate": 3.372541964870795e-06,
+ "loss": 0.9575,
+ "step": 3839
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.8581462216709524,
+ "learning_rate": 3.367876392038174e-06,
+ "loss": 0.9059,
+ "step": 3840
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.2142157453153244,
+ "learning_rate": 3.363213395045941e-06,
+ "loss": 0.9043,
+ "step": 3841
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.904892437735682,
+ "learning_rate": 3.3585529757051504e-06,
+ "loss": 0.8587,
+ "step": 3842
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.6982794348245435,
+ "learning_rate": 3.353895135825854e-06,
+ "loss": 0.8143,
+ "step": 3843
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.901569538506316,
+ "learning_rate": 3.3492398772171074e-06,
+ "loss": 0.9083,
+ "step": 3844
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.0673496105059448,
+ "learning_rate": 3.344587201686952e-06,
+ "loss": 0.9181,
+ "step": 3845
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.8687097006222544,
+ "learning_rate": 3.3399371110424372e-06,
+ "loss": 0.9455,
+ "step": 3846
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9077193266238223,
+ "learning_rate": 3.3352896070896057e-06,
+ "loss": 0.9256,
+ "step": 3847
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.8722578281757957,
+ "learning_rate": 3.330644691633492e-06,
+ "loss": 0.9152,
+ "step": 3848
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.8528602763484596,
+ "learning_rate": 3.3260023664781326e-06,
+ "loss": 0.9078,
+ "step": 3849
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.8345205411570323,
+ "learning_rate": 3.321362633426547e-06,
+ "loss": 0.8108,
+ "step": 3850
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9559890862890268,
+ "learning_rate": 3.316725494280757e-06,
+ "loss": 0.9015,
+ "step": 3851
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9670124868351486,
+ "learning_rate": 3.3120909508417754e-06,
+ "loss": 0.8538,
+ "step": 3852
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.0259181840795324,
+ "learning_rate": 3.307459004909599e-06,
+ "loss": 0.9078,
+ "step": 3853
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.8985185254919762,
+ "learning_rate": 3.3028296582832285e-06,
+ "loss": 0.912,
+ "step": 3854
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.8807026809368774,
+ "learning_rate": 3.2982029127606517e-06,
+ "loss": 0.8238,
+ "step": 3855
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9674412742423509,
+ "learning_rate": 3.2935787701388346e-06,
+ "loss": 0.8398,
+ "step": 3856
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.8801883586144479,
+ "learning_rate": 3.2889572322137454e-06,
+ "loss": 0.8291,
+ "step": 3857
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.8129415192200613,
+ "learning_rate": 3.2843383007803364e-06,
+ "loss": 0.8318,
+ "step": 3858
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.0665443183875298,
+ "learning_rate": 3.279721977632546e-06,
+ "loss": 0.8963,
+ "step": 3859
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.0169379053982934,
+ "learning_rate": 3.275108264563306e-06,
+ "loss": 0.8996,
+ "step": 3860
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9653918063759743,
+ "learning_rate": 3.270497163364521e-06,
+ "loss": 0.9263,
+ "step": 3861
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.0804833222073364,
+ "learning_rate": 3.2658886758270947e-06,
+ "loss": 1.01,
+ "step": 3862
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.7825848052851337,
+ "learning_rate": 3.2612828037409116e-06,
+ "loss": 0.8095,
+ "step": 3863
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.8740520318111609,
+ "learning_rate": 3.256679548894831e-06,
+ "loss": 0.8568,
+ "step": 3864
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9248697002242029,
+ "learning_rate": 3.252078913076718e-06,
+ "loss": 0.8778,
+ "step": 3865
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9166993605718344,
+ "learning_rate": 3.247480898073395e-06,
+ "loss": 0.9255,
+ "step": 3866
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.942994888452231,
+ "learning_rate": 3.242885505670681e-06,
+ "loss": 0.8851,
+ "step": 3867
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.8579958740991174,
+ "learning_rate": 3.238292737653379e-06,
+ "loss": 0.7956,
+ "step": 3868
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.0671331629788738,
+ "learning_rate": 3.233702595805258e-06,
+ "loss": 0.8741,
+ "step": 3869
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.0403988775450346,
+ "learning_rate": 3.229115081909082e-06,
+ "loss": 0.9403,
+ "step": 3870
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.8396367235125283,
+ "learning_rate": 3.224530197746587e-06,
+ "loss": 0.8832,
+ "step": 3871
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.941308923849184,
+ "learning_rate": 3.2199479450984892e-06,
+ "loss": 0.9302,
+ "step": 3872
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.7109457997345888,
+ "learning_rate": 3.2153683257444856e-06,
+ "loss": 0.8001,
+ "step": 3873
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9168204463099244,
+ "learning_rate": 3.210791341463243e-06,
+ "loss": 0.8222,
+ "step": 3874
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 1.1756587432014478,
+ "learning_rate": 3.206216994032411e-06,
+ "loss": 0.8802,
+ "step": 3875
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9711068750938427,
+ "learning_rate": 3.2016452852286127e-06,
+ "loss": 0.9294,
+ "step": 3876
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8328199420707374,
+ "learning_rate": 3.1970762168274495e-06,
+ "loss": 0.8489,
+ "step": 3877
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.970375815132571,
+ "learning_rate": 3.1925097906034962e-06,
+ "loss": 0.8944,
+ "step": 3878
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8361310216799411,
+ "learning_rate": 3.187946008330295e-06,
+ "loss": 0.8675,
+ "step": 3879
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8372116888064216,
+ "learning_rate": 3.1833848717803674e-06,
+ "loss": 0.8857,
+ "step": 3880
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9518388836739485,
+ "learning_rate": 3.178826382725212e-06,
+ "loss": 0.8927,
+ "step": 3881
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.7763825392554807,
+ "learning_rate": 3.1742705429352827e-06,
+ "loss": 0.8024,
+ "step": 3882
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9627172510352615,
+ "learning_rate": 3.1697173541800254e-06,
+ "loss": 0.9271,
+ "step": 3883
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 1.0784742072275648,
+ "learning_rate": 3.165166818227845e-06,
+ "loss": 0.9468,
+ "step": 3884
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9122477128885562,
+ "learning_rate": 3.1606189368461117e-06,
+ "loss": 0.8504,
+ "step": 3885
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 1.0019713180779037,
+ "learning_rate": 3.156073711801172e-06,
+ "loss": 0.9236,
+ "step": 3886
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 1.0756036994552267,
+ "learning_rate": 3.151531144858344e-06,
+ "loss": 0.9483,
+ "step": 3887
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9158802438241802,
+ "learning_rate": 3.146991237781899e-06,
+ "loss": 0.843,
+ "step": 3888
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 1.144773394801962,
+ "learning_rate": 3.142453992335096e-06,
+ "loss": 0.8776,
+ "step": 3889
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9731819597349555,
+ "learning_rate": 3.137919410280139e-06,
+ "loss": 0.8764,
+ "step": 3890
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8748329635929616,
+ "learning_rate": 3.1333874933782114e-06,
+ "loss": 0.902,
+ "step": 3891
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9314269659571606,
+ "learning_rate": 3.128858243389461e-06,
+ "loss": 0.909,
+ "step": 3892
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8906883757821964,
+ "learning_rate": 3.124331662072987e-06,
+ "loss": 0.9506,
+ "step": 3893
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8106075732357755,
+ "learning_rate": 3.119807751186872e-06,
+ "loss": 0.8466,
+ "step": 3894
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.7576273502711641,
+ "learning_rate": 3.1152865124881436e-06,
+ "loss": 0.8018,
+ "step": 3895
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8414156539000981,
+ "learning_rate": 3.110767947732801e-06,
+ "loss": 0.8405,
+ "step": 3896
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9285031095149952,
+ "learning_rate": 3.106252058675806e-06,
+ "loss": 0.9048,
+ "step": 3897
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 1.052669313469202,
+ "learning_rate": 3.101738847071072e-06,
+ "loss": 0.8893,
+ "step": 3898
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.854607871535346,
+ "learning_rate": 3.097228314671481e-06,
+ "loss": 0.9223,
+ "step": 3899
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9140545124332589,
+ "learning_rate": 3.092720463228872e-06,
+ "loss": 0.9431,
+ "step": 3900
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9498296331245554,
+ "learning_rate": 3.0882152944940423e-06,
+ "loss": 0.8876,
+ "step": 3901
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8881688900569868,
+ "learning_rate": 3.0837128102167514e-06,
+ "loss": 0.9169,
+ "step": 3902
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9882043928296107,
+ "learning_rate": 3.079213012145705e-06,
+ "loss": 0.8815,
+ "step": 3903
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8758226010035889,
+ "learning_rate": 3.0747159020285766e-06,
+ "loss": 0.8993,
+ "step": 3904
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8950531987539941,
+ "learning_rate": 3.0702214816119925e-06,
+ "loss": 0.8729,
+ "step": 3905
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9594547393814911,
+ "learning_rate": 3.065729752641532e-06,
+ "loss": 0.8942,
+ "step": 3906
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 1.0051751598929568,
+ "learning_rate": 3.0612407168617352e-06,
+ "loss": 0.9542,
+ "step": 3907
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8311213445322736,
+ "learning_rate": 3.0567543760160866e-06,
+ "loss": 0.8194,
+ "step": 3908
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 1.0236288594295317,
+ "learning_rate": 3.05227073184703e-06,
+ "loss": 0.8839,
+ "step": 3909
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8903513302834326,
+ "learning_rate": 3.047789786095967e-06,
+ "loss": 0.8664,
+ "step": 3910
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 1.0508218390928494,
+ "learning_rate": 3.0433115405032334e-06,
+ "loss": 0.9473,
+ "step": 3911
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.7852377591241896,
+ "learning_rate": 3.0388359968081395e-06,
+ "loss": 0.8157,
+ "step": 3912
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 1.3317701777457944,
+ "learning_rate": 3.034363156748933e-06,
+ "loss": 0.9152,
+ "step": 3913
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.97264052334293,
+ "learning_rate": 3.0298930220628086e-06,
+ "loss": 0.8268,
+ "step": 3914
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8431080760763496,
+ "learning_rate": 3.025425594485919e-06,
+ "loss": 0.8334,
+ "step": 3915
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9659543078257832,
+ "learning_rate": 3.0209608757533626e-06,
+ "loss": 0.9043,
+ "step": 3916
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8277431354918725,
+ "learning_rate": 3.0164988675991768e-06,
+ "loss": 0.9059,
+ "step": 3917
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9681457535692266,
+ "learning_rate": 3.0120395717563655e-06,
+ "loss": 0.8347,
+ "step": 3918
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 1.0696919917523706,
+ "learning_rate": 3.00758298995686e-06,
+ "loss": 0.9063,
+ "step": 3919
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8832360160073505,
+ "learning_rate": 3.0031291239315473e-06,
+ "loss": 0.8955,
+ "step": 3920
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9513983119172031,
+ "learning_rate": 2.9986779754102613e-06,
+ "loss": 0.8222,
+ "step": 3921
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 1.0298065915415877,
+ "learning_rate": 2.9942295461217698e-06,
+ "loss": 0.9406,
+ "step": 3922
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9344331046185815,
+ "learning_rate": 2.9897838377937947e-06,
+ "loss": 0.8591,
+ "step": 3923
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9697692593743283,
+ "learning_rate": 2.985340852152999e-06,
+ "loss": 0.9087,
+ "step": 3924
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.8768939678506315,
+ "learning_rate": 2.9809005909249866e-06,
+ "loss": 0.9502,
+ "step": 3925
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9118163255293353,
+ "learning_rate": 2.9764630558343064e-06,
+ "loss": 0.9045,
+ "step": 3926
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.7673649652691692,
+ "learning_rate": 2.9720282486044407e-06,
+ "loss": 0.8274,
+ "step": 3927
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9151544803470711,
+ "learning_rate": 2.9675961709578194e-06,
+ "loss": 0.9434,
+ "step": 3928
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.921591784931005,
+ "learning_rate": 2.9631668246158105e-06,
+ "loss": 0.8549,
+ "step": 3929
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 1.0492749845033982,
+ "learning_rate": 2.958740211298722e-06,
+ "loss": 0.9197,
+ "step": 3930
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9882213471656914,
+ "learning_rate": 2.9543163327258016e-06,
+ "loss": 0.912,
+ "step": 3931
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.8140070065617956,
+ "learning_rate": 2.949895190615227e-06,
+ "loss": 0.8183,
+ "step": 3932
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9866899907692384,
+ "learning_rate": 2.9454767866841225e-06,
+ "loss": 0.8712,
+ "step": 3933
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9893039822513295,
+ "learning_rate": 2.941061122648545e-06,
+ "loss": 0.9179,
+ "step": 3934
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9258487979575096,
+ "learning_rate": 2.9366482002234874e-06,
+ "loss": 0.9134,
+ "step": 3935
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9759343029344542,
+ "learning_rate": 2.932238021122877e-06,
+ "loss": 0.8791,
+ "step": 3936
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9033871906886145,
+ "learning_rate": 2.9278305870595814e-06,
+ "loss": 0.8056,
+ "step": 3937
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 1.0131907727090592,
+ "learning_rate": 2.92342589974539e-06,
+ "loss": 0.9067,
+ "step": 3938
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.8632993518217443,
+ "learning_rate": 2.919023960891039e-06,
+ "loss": 0.8436,
+ "step": 3939
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9046033199438586,
+ "learning_rate": 2.9146247722061806e-06,
+ "loss": 0.9821,
+ "step": 3940
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.887536273847992,
+ "learning_rate": 2.910228335399419e-06,
+ "loss": 0.8661,
+ "step": 3941
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9516740042440787,
+ "learning_rate": 2.90583465217828e-06,
+ "loss": 0.843,
+ "step": 3942
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.7621048480641931,
+ "learning_rate": 2.9014437242492133e-06,
+ "loss": 0.8682,
+ "step": 3943
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9378964700898026,
+ "learning_rate": 2.897055553317607e-06,
+ "loss": 0.9173,
+ "step": 3944
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9375154849104759,
+ "learning_rate": 2.89267014108778e-06,
+ "loss": 0.9205,
+ "step": 3945
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9078507796143372,
+ "learning_rate": 2.8882874892629654e-06,
+ "loss": 0.8917,
+ "step": 3946
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.7924118884903499,
+ "learning_rate": 2.883907599545348e-06,
+ "loss": 0.8755,
+ "step": 3947
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9845009472008538,
+ "learning_rate": 2.8795304736360184e-06,
+ "loss": 0.9097,
+ "step": 3948
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9330035909593539,
+ "learning_rate": 2.8751561132350025e-06,
+ "loss": 0.8708,
+ "step": 3949
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.7318427202512602,
+ "learning_rate": 2.8707845200412567e-06,
+ "loss": 0.8349,
+ "step": 3950
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9082116900829754,
+ "learning_rate": 2.866415695752649e-06,
+ "loss": 0.9196,
+ "step": 3951
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9088991120971996,
+ "learning_rate": 2.862049642065986e-06,
+ "loss": 0.9053,
+ "step": 3952
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.8610842604456892,
+ "learning_rate": 2.857686360676991e-06,
+ "loss": 0.8836,
+ "step": 3953
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9041538069995724,
+ "learning_rate": 2.853325853280312e-06,
+ "loss": 0.8356,
+ "step": 3954
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.8539247020789565,
+ "learning_rate": 2.8489681215695242e-06,
+ "loss": 0.9016,
+ "step": 3955
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.909722474364938,
+ "learning_rate": 2.8446131672371136e-06,
+ "loss": 0.9056,
+ "step": 3956
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9037994274043482,
+ "learning_rate": 2.840260991974497e-06,
+ "loss": 0.8243,
+ "step": 3957
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.8707120164808277,
+ "learning_rate": 2.83591159747201e-06,
+ "loss": 0.8917,
+ "step": 3958
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9462037466305331,
+ "learning_rate": 2.8315649854189066e-06,
+ "loss": 0.9,
+ "step": 3959
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9059820462435377,
+ "learning_rate": 2.8272211575033635e-06,
+ "loss": 0.8936,
+ "step": 3960
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9792666135086842,
+ "learning_rate": 2.8228801154124687e-06,
+ "loss": 0.9378,
+ "step": 3961
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.8921530343106873,
+ "learning_rate": 2.8185418608322344e-06,
+ "loss": 0.8279,
+ "step": 3962
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9150192931369385,
+ "learning_rate": 2.814206395447593e-06,
+ "loss": 0.9013,
+ "step": 3963
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.8238345876329904,
+ "learning_rate": 2.80987372094238e-06,
+ "loss": 0.8487,
+ "step": 3964
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9002758978851323,
+ "learning_rate": 2.805543838999364e-06,
+ "loss": 0.8564,
+ "step": 3965
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.7934565646830434,
+ "learning_rate": 2.801216751300223e-06,
+ "loss": 0.8001,
+ "step": 3966
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9701515328523692,
+ "learning_rate": 2.796892459525541e-06,
+ "loss": 0.9406,
+ "step": 3967
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.999615143967618,
+ "learning_rate": 2.7925709653548295e-06,
+ "loss": 0.8838,
+ "step": 3968
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9995070391192742,
+ "learning_rate": 2.788252270466497e-06,
+ "loss": 0.9814,
+ "step": 3969
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9313000390350881,
+ "learning_rate": 2.783936376537886e-06,
+ "loss": 0.8649,
+ "step": 3970
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9073792564934218,
+ "learning_rate": 2.7796232852452378e-06,
+ "loss": 0.9364,
+ "step": 3971
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9311758925066174,
+ "learning_rate": 2.775312998263703e-06,
+ "loss": 0.8874,
+ "step": 3972
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.8825963778289101,
+ "learning_rate": 2.771005517267349e-06,
+ "loss": 0.8729,
+ "step": 3973
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 1.0156201367078397,
+ "learning_rate": 2.7667008439291552e-06,
+ "loss": 0.9193,
+ "step": 3974
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.8974079974149981,
+ "learning_rate": 2.762398979920998e-06,
+ "loss": 0.9169,
+ "step": 3975
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 1.1233402822593637,
+ "learning_rate": 2.7580999269136854e-06,
+ "loss": 0.8839,
+ "step": 3976
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9969838223507892,
+ "learning_rate": 2.7538036865769093e-06,
+ "loss": 0.873,
+ "step": 3977
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.936130371584019,
+ "learning_rate": 2.749510260579282e-06,
+ "loss": 0.9124,
+ "step": 3978
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9999080935634322,
+ "learning_rate": 2.7452196505883265e-06,
+ "loss": 0.8821,
+ "step": 3979
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.7232369934331282,
+ "learning_rate": 2.7409318582704594e-06,
+ "loss": 0.7474,
+ "step": 3980
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.867562927995819,
+ "learning_rate": 2.736646885291011e-06,
+ "loss": 0.9341,
+ "step": 3981
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8499419441123263,
+ "learning_rate": 2.7323647333142176e-06,
+ "loss": 0.9247,
+ "step": 3982
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8675976700902036,
+ "learning_rate": 2.728085404003217e-06,
+ "loss": 0.8664,
+ "step": 3983
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9082003014018147,
+ "learning_rate": 2.7238088990200538e-06,
+ "loss": 0.8688,
+ "step": 3984
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 1.0136988017010031,
+ "learning_rate": 2.7195352200256675e-06,
+ "loss": 0.9222,
+ "step": 3985
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9517713920169355,
+ "learning_rate": 2.7152643686799095e-06,
+ "loss": 0.9413,
+ "step": 3986
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9765890427796976,
+ "learning_rate": 2.710996346641528e-06,
+ "loss": 0.982,
+ "step": 3987
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9450695049660316,
+ "learning_rate": 2.706731155568175e-06,
+ "loss": 0.8556,
+ "step": 3988
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8783435829796219,
+ "learning_rate": 2.7024687971164032e-06,
+ "loss": 0.8494,
+ "step": 3989
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9080928358669355,
+ "learning_rate": 2.698209272941659e-06,
+ "loss": 0.8968,
+ "step": 3990
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8795663074369697,
+ "learning_rate": 2.693952584698294e-06,
+ "loss": 0.9132,
+ "step": 3991
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8858328071594844,
+ "learning_rate": 2.689698734039561e-06,
+ "loss": 0.8763,
+ "step": 3992
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8634775510871611,
+ "learning_rate": 2.685447722617597e-06,
+ "loss": 0.8472,
+ "step": 3993
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8552191671796697,
+ "learning_rate": 2.6811995520834543e-06,
+ "loss": 0.9068,
+ "step": 3994
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9119674262779448,
+ "learning_rate": 2.676954224087075e-06,
+ "loss": 0.8004,
+ "step": 3995
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9129993004929062,
+ "learning_rate": 2.6727117402772886e-06,
+ "loss": 0.9193,
+ "step": 3996
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 1.0262267579702624,
+ "learning_rate": 2.668472102301829e-06,
+ "loss": 0.9168,
+ "step": 3997
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8402002882161677,
+ "learning_rate": 2.664235311807327e-06,
+ "loss": 0.8912,
+ "step": 3998
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9883382087246059,
+ "learning_rate": 2.6600013704392946e-06,
+ "loss": 0.8919,
+ "step": 3999
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 1.1079905687977718,
+ "learning_rate": 2.655770279842157e-06,
+ "loss": 0.8696,
+ "step": 4000
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.860415843538877,
+ "learning_rate": 2.651542041659211e-06,
+ "loss": 0.9333,
+ "step": 4001
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8435081142389877,
+ "learning_rate": 2.6473166575326603e-06,
+ "loss": 0.9256,
+ "step": 4002
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 1.0396659674376807,
+ "learning_rate": 2.6430941291035984e-06,
+ "loss": 0.9139,
+ "step": 4003
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8764834914707512,
+ "learning_rate": 2.6388744580119975e-06,
+ "loss": 0.872,
+ "step": 4004
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8885146098838413,
+ "learning_rate": 2.6346576458967397e-06,
+ "loss": 0.8757,
+ "step": 4005
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 1.0940675197953051,
+ "learning_rate": 2.630443694395579e-06,
+ "loss": 0.9263,
+ "step": 4006
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8552372241634835,
+ "learning_rate": 2.6262326051451683e-06,
+ "loss": 0.8334,
+ "step": 4007
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9322542428007772,
+ "learning_rate": 2.6220243797810483e-06,
+ "loss": 0.9023,
+ "step": 4008
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9451728835430094,
+ "learning_rate": 2.6178190199376394e-06,
+ "loss": 0.9039,
+ "step": 4009
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.942235268274314,
+ "learning_rate": 2.6136165272482596e-06,
+ "loss": 0.9178,
+ "step": 4010
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 1.0203195968712637,
+ "learning_rate": 2.6094169033451066e-06,
+ "loss": 0.8793,
+ "step": 4011
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8950489385865193,
+ "learning_rate": 2.605220149859267e-06,
+ "loss": 0.8705,
+ "step": 4012
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9667015002148893,
+ "learning_rate": 2.6010262684207134e-06,
+ "loss": 0.8429,
+ "step": 4013
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8242413686636327,
+ "learning_rate": 2.596835260658297e-06,
+ "loss": 0.8166,
+ "step": 4014
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8657516382124385,
+ "learning_rate": 2.5926471281997577e-06,
+ "loss": 0.8587,
+ "step": 4015
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9275974136193426,
+ "learning_rate": 2.588461872671719e-06,
+ "loss": 0.9189,
+ "step": 4016
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8849700860961025,
+ "learning_rate": 2.5842794956996863e-06,
+ "loss": 0.9164,
+ "step": 4017
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8904611707155811,
+ "learning_rate": 2.580099998908049e-06,
+ "loss": 0.9107,
+ "step": 4018
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9383535438474248,
+ "learning_rate": 2.575923383920069e-06,
+ "loss": 0.8524,
+ "step": 4019
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.824236618205373,
+ "learning_rate": 2.5717496523578998e-06,
+ "loss": 0.8418,
+ "step": 4020
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8960245871106074,
+ "learning_rate": 2.5675788058425723e-06,
+ "loss": 0.7927,
+ "step": 4021
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9997637668059359,
+ "learning_rate": 2.563410845993988e-06,
+ "loss": 0.9317,
+ "step": 4022
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 1.0945901385876757,
+ "learning_rate": 2.5592457744309405e-06,
+ "loss": 0.8989,
+ "step": 4023
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.7829907181592052,
+ "learning_rate": 2.5550835927710982e-06,
+ "loss": 0.8767,
+ "step": 4024
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8033207897195992,
+ "learning_rate": 2.5509243026309983e-06,
+ "loss": 0.8061,
+ "step": 4025
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 1.0069900773031213,
+ "learning_rate": 2.546767905626063e-06,
+ "loss": 0.9182,
+ "step": 4026
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 1.1001463255802362,
+ "learning_rate": 2.5426144033705937e-06,
+ "loss": 0.9219,
+ "step": 4027
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9630224280845321,
+ "learning_rate": 2.5384637974777513e-06,
+ "loss": 0.9327,
+ "step": 4028
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 1.0035980552605197,
+ "learning_rate": 2.5343160895595977e-06,
+ "loss": 0.7907,
+ "step": 4029
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 1.0742246335041878,
+ "learning_rate": 2.530171281227044e-06,
+ "loss": 0.9693,
+ "step": 4030
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 1.2155405982873126,
+ "learning_rate": 2.52602937408989e-06,
+ "loss": 0.8633,
+ "step": 4031
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 1.0103436831615604,
+ "learning_rate": 2.5218903697568075e-06,
+ "loss": 0.8354,
+ "step": 4032
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9607433216427947,
+ "learning_rate": 2.517754269835332e-06,
+ "loss": 0.8591,
+ "step": 4033
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.967769594887381,
+ "learning_rate": 2.5136210759318814e-06,
+ "loss": 0.8976,
+ "step": 4034
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9875104240098554,
+ "learning_rate": 2.5094907896517383e-06,
+ "loss": 0.8792,
+ "step": 4035
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 1.001837531331814,
+ "learning_rate": 2.505363412599059e-06,
+ "loss": 0.9074,
+ "step": 4036
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9563728281489,
+ "learning_rate": 2.5012389463768737e-06,
+ "loss": 0.882,
+ "step": 4037
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 1.052483921655931,
+ "learning_rate": 2.4971173925870694e-06,
+ "loss": 0.899,
+ "step": 4038
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.8939458997742014,
+ "learning_rate": 2.4929987528304144e-06,
+ "loss": 0.9054,
+ "step": 4039
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9388636875761888,
+ "learning_rate": 2.4888830287065414e-06,
+ "loss": 0.873,
+ "step": 4040
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9735791321651764,
+ "learning_rate": 2.4847702218139493e-06,
+ "loss": 0.8823,
+ "step": 4041
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 1.0298245970949436,
+ "learning_rate": 2.480660333750007e-06,
+ "loss": 0.861,
+ "step": 4042
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9908190333010166,
+ "learning_rate": 2.476553366110944e-06,
+ "loss": 0.8903,
+ "step": 4043
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9123243136347812,
+ "learning_rate": 2.4724493204918598e-06,
+ "loss": 0.9315,
+ "step": 4044
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9745453873730154,
+ "learning_rate": 2.4683481984867207e-06,
+ "loss": 0.8608,
+ "step": 4045
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9006398511842804,
+ "learning_rate": 2.4642500016883532e-06,
+ "loss": 0.8974,
+ "step": 4046
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9162971825383597,
+ "learning_rate": 2.4601547316884544e-06,
+ "loss": 0.7963,
+ "step": 4047
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.7954773502759751,
+ "learning_rate": 2.4560623900775728e-06,
+ "loss": 0.8196,
+ "step": 4048
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9200893131269684,
+ "learning_rate": 2.45197297844513e-06,
+ "loss": 0.8808,
+ "step": 4049
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.963848590067518,
+ "learning_rate": 2.447886498379409e-06,
+ "loss": 0.9146,
+ "step": 4050
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.8370481026817104,
+ "learning_rate": 2.4438029514675444e-06,
+ "loss": 0.8167,
+ "step": 4051
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9124154083158099,
+ "learning_rate": 2.439722339295545e-06,
+ "loss": 0.8688,
+ "step": 4052
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9596788206716473,
+ "learning_rate": 2.4356446634482756e-06,
+ "loss": 0.9,
+ "step": 4053
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9026104302825495,
+ "learning_rate": 2.4315699255094516e-06,
+ "loss": 0.8507,
+ "step": 4054
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.8799880059621884,
+ "learning_rate": 2.427498127061658e-06,
+ "loss": 0.8473,
+ "step": 4055
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 1.055007090316962,
+ "learning_rate": 2.423429269686336e-06,
+ "loss": 0.8429,
+ "step": 4056
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 1.0000623859562858,
+ "learning_rate": 2.4193633549637765e-06,
+ "loss": 0.8758,
+ "step": 4057
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 1.1037124690313722,
+ "learning_rate": 2.4153003844731425e-06,
+ "loss": 0.8649,
+ "step": 4058
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 1.0369699282478542,
+ "learning_rate": 2.411240359792438e-06,
+ "loss": 0.8646,
+ "step": 4059
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9569318585945045,
+ "learning_rate": 2.407183282498534e-06,
+ "loss": 0.8687,
+ "step": 4060
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.8754398527489688,
+ "learning_rate": 2.403129154167153e-06,
+ "loss": 0.8689,
+ "step": 4061
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.887308770315621,
+ "learning_rate": 2.3990779763728666e-06,
+ "loss": 0.8295,
+ "step": 4062
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9346038990203916,
+ "learning_rate": 2.3950297506891084e-06,
+ "loss": 0.8883,
+ "step": 4063
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9206691830265393,
+ "learning_rate": 2.390984478688164e-06,
+ "loss": 0.9462,
+ "step": 4064
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9715102836408156,
+ "learning_rate": 2.386942161941169e-06,
+ "loss": 0.9543,
+ "step": 4065
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.7966573981430686,
+ "learning_rate": 2.3829028020181154e-06,
+ "loss": 0.8603,
+ "step": 4066
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.8329182349694442,
+ "learning_rate": 2.3788664004878405e-06,
+ "loss": 0.8719,
+ "step": 4067
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.81818784771716,
+ "learning_rate": 2.374832958918035e-06,
+ "loss": 0.8929,
+ "step": 4068
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9050910748280558,
+ "learning_rate": 2.3708024788752448e-06,
+ "loss": 0.8851,
+ "step": 4069
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 1.0899187351907824,
+ "learning_rate": 2.3667749619248614e-06,
+ "loss": 0.8801,
+ "step": 4070
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9991684980316599,
+ "learning_rate": 2.3627504096311273e-06,
+ "loss": 0.8863,
+ "step": 4071
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.7524456863232081,
+ "learning_rate": 2.3587288235571258e-06,
+ "loss": 0.8853,
+ "step": 4072
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.8491884838780307,
+ "learning_rate": 2.354710205264801e-06,
+ "loss": 0.7814,
+ "step": 4073
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9268409171191525,
+ "learning_rate": 2.350694556314934e-06,
+ "loss": 0.8927,
+ "step": 4074
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9290743263792193,
+ "learning_rate": 2.3466818782671597e-06,
+ "loss": 0.9389,
+ "step": 4075
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9259989615515022,
+ "learning_rate": 2.3426721726799573e-06,
+ "loss": 0.8902,
+ "step": 4076
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.6910196071179008,
+ "learning_rate": 2.3386654411106446e-06,
+ "loss": 0.8074,
+ "step": 4077
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9340996247053994,
+ "learning_rate": 2.3346616851153935e-06,
+ "loss": 0.8979,
+ "step": 4078
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 1.0653573731934745,
+ "learning_rate": 2.330660906249218e-06,
+ "loss": 0.9188,
+ "step": 4079
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.8937439750598773,
+ "learning_rate": 2.3266631060659685e-06,
+ "loss": 0.9171,
+ "step": 4080
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8707541188020759,
+ "learning_rate": 2.32266828611835e-06,
+ "loss": 0.8642,
+ "step": 4081
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9385840024984932,
+ "learning_rate": 2.318676447957907e-06,
+ "loss": 0.8472,
+ "step": 4082
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.7598230892124757,
+ "learning_rate": 2.3146875931350165e-06,
+ "loss": 0.8482,
+ "step": 4083
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8959896109499784,
+ "learning_rate": 2.310701723198908e-06,
+ "loss": 0.901,
+ "step": 4084
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8885092414865883,
+ "learning_rate": 2.3067188396976482e-06,
+ "loss": 0.8991,
+ "step": 4085
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9028384744600961,
+ "learning_rate": 2.3027389441781368e-06,
+ "loss": 0.8974,
+ "step": 4086
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8879483662772303,
+ "learning_rate": 2.2987620381861288e-06,
+ "loss": 0.9344,
+ "step": 4087
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9041839529362387,
+ "learning_rate": 2.2947881232662007e-06,
+ "loss": 0.8936,
+ "step": 4088
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9028787232059573,
+ "learning_rate": 2.290817200961779e-06,
+ "loss": 0.8831,
+ "step": 4089
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.926786829646905,
+ "learning_rate": 2.286849272815126e-06,
+ "loss": 0.9009,
+ "step": 4090
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 1.1179100068671728,
+ "learning_rate": 2.2828843403673338e-06,
+ "loss": 0.9245,
+ "step": 4091
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8656564298300758,
+ "learning_rate": 2.2789224051583403e-06,
+ "loss": 0.8905,
+ "step": 4092
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9607165923938596,
+ "learning_rate": 2.274963468726914e-06,
+ "loss": 0.8875,
+ "step": 4093
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9634473240315448,
+ "learning_rate": 2.2710075326106618e-06,
+ "loss": 0.9263,
+ "step": 4094
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 1.014904182740305,
+ "learning_rate": 2.2670545983460245e-06,
+ "loss": 0.8968,
+ "step": 4095
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9080426502478219,
+ "learning_rate": 2.263104667468272e-06,
+ "loss": 0.8786,
+ "step": 4096
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9237970035424105,
+ "learning_rate": 2.259157741511515e-06,
+ "loss": 0.835,
+ "step": 4097
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8695469303036835,
+ "learning_rate": 2.2552138220086927e-06,
+ "loss": 0.8819,
+ "step": 4098
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9232875172692364,
+ "learning_rate": 2.2512729104915787e-06,
+ "loss": 0.9221,
+ "step": 4099
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9126458734436097,
+ "learning_rate": 2.2473350084907806e-06,
+ "loss": 0.9396,
+ "step": 4100
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8850388026242367,
+ "learning_rate": 2.243400117535729e-06,
+ "loss": 0.9147,
+ "step": 4101
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 1.0384132801287065,
+ "learning_rate": 2.2394682391546928e-06,
+ "loss": 0.9521,
+ "step": 4102
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9729842423856129,
+ "learning_rate": 2.2355393748747702e-06,
+ "loss": 0.8762,
+ "step": 4103
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9946317186957229,
+ "learning_rate": 2.2316135262218787e-06,
+ "loss": 0.9835,
+ "step": 4104
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8642999456933055,
+ "learning_rate": 2.227690694720784e-06,
+ "loss": 0.8872,
+ "step": 4105
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8566492152229089,
+ "learning_rate": 2.223770881895061e-06,
+ "loss": 0.8275,
+ "step": 4106
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9836308489734794,
+ "learning_rate": 2.2198540892671215e-06,
+ "loss": 0.8499,
+ "step": 4107
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8928198690489183,
+ "learning_rate": 2.215940318358206e-06,
+ "loss": 0.9258,
+ "step": 4108
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 1.097043195844042,
+ "learning_rate": 2.2120295706883698e-06,
+ "loss": 0.9376,
+ "step": 4109
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8653501898374938,
+ "learning_rate": 2.2081218477765097e-06,
+ "loss": 0.8886,
+ "step": 4110
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8394851362498404,
+ "learning_rate": 2.204217151140342e-06,
+ "loss": 0.883,
+ "step": 4111
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9337083108295533,
+ "learning_rate": 2.200315482296398e-06,
+ "loss": 0.9003,
+ "step": 4112
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 1.1482500165327554,
+ "learning_rate": 2.1964168427600462e-06,
+ "loss": 0.8394,
+ "step": 4113
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9123848050228393,
+ "learning_rate": 2.1925212340454737e-06,
+ "loss": 0.9529,
+ "step": 4114
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9879145476720529,
+ "learning_rate": 2.1886286576656834e-06,
+ "loss": 0.959,
+ "step": 4115
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8606231062834674,
+ "learning_rate": 2.184739115132517e-06,
+ "loss": 0.911,
+ "step": 4116
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8886655729982125,
+ "learning_rate": 2.1808526079566215e-06,
+ "loss": 0.8061,
+ "step": 4117
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.926451670918023,
+ "learning_rate": 2.1769691376474722e-06,
+ "loss": 0.9191,
+ "step": 4118
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9565823558080365,
+ "learning_rate": 2.1730887057133678e-06,
+ "loss": 0.8662,
+ "step": 4119
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.7389054326253872,
+ "learning_rate": 2.1692113136614177e-06,
+ "loss": 0.8202,
+ "step": 4120
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 1.1228020185487793,
+ "learning_rate": 2.1653369629975595e-06,
+ "loss": 0.8583,
+ "step": 4121
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8963055951822942,
+ "learning_rate": 2.1614656552265457e-06,
+ "loss": 0.9219,
+ "step": 4122
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9207401832410509,
+ "learning_rate": 2.157597391851949e-06,
+ "loss": 0.9327,
+ "step": 4123
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8111609539925131,
+ "learning_rate": 2.1537321743761587e-06,
+ "loss": 0.8323,
+ "step": 4124
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 1.040173267158971,
+ "learning_rate": 2.1498700043003773e-06,
+ "loss": 0.8708,
+ "step": 4125
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 1.0454868556219965,
+ "learning_rate": 2.1460108831246295e-06,
+ "loss": 0.9322,
+ "step": 4126
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 1.1145646862077467,
+ "learning_rate": 2.142154812347753e-06,
+ "loss": 0.9242,
+ "step": 4127
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.7542894098810323,
+ "learning_rate": 2.1383017934674012e-06,
+ "loss": 0.8264,
+ "step": 4128
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 1.0853165928601176,
+ "learning_rate": 2.1344518279800454e-06,
+ "loss": 0.8932,
+ "step": 4129
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8619712325796545,
+ "learning_rate": 2.130604917380962e-06,
+ "loss": 0.897,
+ "step": 4130
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8526274123517904,
+ "learning_rate": 2.12676106316425e-06,
+ "loss": 0.854,
+ "step": 4131
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9174692413105993,
+ "learning_rate": 2.1229202668228197e-06,
+ "loss": 0.8768,
+ "step": 4132
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.97909810000739,
+ "learning_rate": 2.1190825298483855e-06,
+ "loss": 0.8886,
+ "step": 4133
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9333357359673061,
+ "learning_rate": 2.115247853731488e-06,
+ "loss": 0.8792,
+ "step": 4134
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8494576226832132,
+ "learning_rate": 2.11141623996147e-06,
+ "loss": 0.8449,
+ "step": 4135
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8537837931344772,
+ "learning_rate": 2.107587690026481e-06,
+ "loss": 0.8122,
+ "step": 4136
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9388101837707732,
+ "learning_rate": 2.103762205413493e-06,
+ "loss": 0.8795,
+ "step": 4137
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8949476894539724,
+ "learning_rate": 2.0999397876082726e-06,
+ "loss": 0.8947,
+ "step": 4138
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8823012117828377,
+ "learning_rate": 2.096120438095404e-06,
+ "loss": 0.8823,
+ "step": 4139
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8593543554123816,
+ "learning_rate": 2.092304158358286e-06,
+ "loss": 0.8173,
+ "step": 4140
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 1.0204157630345805,
+ "learning_rate": 2.0884909498791106e-06,
+ "loss": 0.8671,
+ "step": 4141
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8854304487164367,
+ "learning_rate": 2.0846808141388852e-06,
+ "loss": 0.8411,
+ "step": 4142
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9389146274622053,
+ "learning_rate": 2.080873752617426e-06,
+ "loss": 0.9183,
+ "step": 4143
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.908298147521934,
+ "learning_rate": 2.0770697667933436e-06,
+ "loss": 0.8465,
+ "step": 4144
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9507813187926712,
+ "learning_rate": 2.073268858144074e-06,
+ "loss": 0.8763,
+ "step": 4145
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9261199670051468,
+ "learning_rate": 2.0694710281458372e-06,
+ "loss": 0.8529,
+ "step": 4146
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8087655514954948,
+ "learning_rate": 2.0656762782736693e-06,
+ "loss": 0.8418,
+ "step": 4147
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9368211053881321,
+ "learning_rate": 2.061884610001411e-06,
+ "loss": 0.8984,
+ "step": 4148
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8912197616398623,
+ "learning_rate": 2.0580960248016966e-06,
+ "loss": 0.8444,
+ "step": 4149
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8122310280623217,
+ "learning_rate": 2.0543105241459713e-06,
+ "loss": 0.8401,
+ "step": 4150
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8079381273920042,
+ "learning_rate": 2.0505281095044804e-06,
+ "loss": 0.8429,
+ "step": 4151
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9062019494180443,
+ "learning_rate": 2.0467487823462696e-06,
+ "loss": 0.9086,
+ "step": 4152
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9883686201862377,
+ "learning_rate": 2.042972544139189e-06,
+ "loss": 0.9257,
+ "step": 4153
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8550115895013028,
+ "learning_rate": 2.039199396349881e-06,
+ "loss": 0.94,
+ "step": 4154
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9516941578752195,
+ "learning_rate": 2.0354293404437963e-06,
+ "loss": 0.8815,
+ "step": 4155
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9962738337135307,
+ "learning_rate": 2.0316623778851784e-06,
+ "loss": 0.9483,
+ "step": 4156
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.7772316116470102,
+ "learning_rate": 2.0278985101370753e-06,
+ "loss": 0.7887,
+ "step": 4157
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 1.0407315491959217,
+ "learning_rate": 2.024137738661329e-06,
+ "loss": 0.8767,
+ "step": 4158
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8603796905613208,
+ "learning_rate": 2.020380064918579e-06,
+ "loss": 0.7653,
+ "step": 4159
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 1.0884817702885652,
+ "learning_rate": 2.01662549036826e-06,
+ "loss": 0.885,
+ "step": 4160
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 1.0035394010366627,
+ "learning_rate": 2.0128740164686134e-06,
+ "loss": 0.8887,
+ "step": 4161
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9935432481978964,
+ "learning_rate": 2.009125644676656e-06,
+ "loss": 0.9244,
+ "step": 4162
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 1.070597955130549,
+ "learning_rate": 2.0053803764482226e-06,
+ "loss": 1.0022,
+ "step": 4163
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9887648205586059,
+ "learning_rate": 2.001638213237932e-06,
+ "loss": 0.9367,
+ "step": 4164
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8919775718097533,
+ "learning_rate": 1.997899156499191e-06,
+ "loss": 0.9015,
+ "step": 4165
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9436937895966855,
+ "learning_rate": 1.994163207684212e-06,
+ "loss": 0.9032,
+ "step": 4166
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 1.0769371787866853,
+ "learning_rate": 1.9904303682439896e-06,
+ "loss": 0.8859,
+ "step": 4167
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9240989582938395,
+ "learning_rate": 1.986700639628316e-06,
+ "loss": 0.8362,
+ "step": 4168
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.989569753210682,
+ "learning_rate": 1.9829740232857807e-06,
+ "loss": 0.9232,
+ "step": 4169
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8151679258449964,
+ "learning_rate": 1.9792505206637523e-06,
+ "loss": 0.8911,
+ "step": 4170
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9416777841936684,
+ "learning_rate": 1.9755301332083997e-06,
+ "loss": 0.9009,
+ "step": 4171
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8610798178870233,
+ "learning_rate": 1.9718128623646792e-06,
+ "loss": 0.8805,
+ "step": 4172
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9108907842189863,
+ "learning_rate": 1.9680987095763315e-06,
+ "loss": 0.8947,
+ "step": 4173
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8849803096210145,
+ "learning_rate": 1.964387676285894e-06,
+ "loss": 0.8781,
+ "step": 4174
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8302805443398776,
+ "learning_rate": 1.9606797639346874e-06,
+ "loss": 0.8453,
+ "step": 4175
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.7957440027524559,
+ "learning_rate": 1.9569749739628243e-06,
+ "loss": 0.826,
+ "step": 4176
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.943797668429873,
+ "learning_rate": 1.9532733078092034e-06,
+ "loss": 0.8818,
+ "step": 4177
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8500557631151054,
+ "learning_rate": 1.9495747669115062e-06,
+ "loss": 0.8259,
+ "step": 4178
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8967577054252616,
+ "learning_rate": 1.9458793527062035e-06,
+ "loss": 0.8825,
+ "step": 4179
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 1.2295227916186988,
+ "learning_rate": 1.9421870666285523e-06,
+ "loss": 0.8864,
+ "step": 4180
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8969752125296094,
+ "learning_rate": 1.9384979101125944e-06,
+ "loss": 0.892,
+ "step": 4181
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.866511603452024,
+ "learning_rate": 1.934811884591159e-06,
+ "loss": 0.8546,
+ "step": 4182
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 1.0143956233364166,
+ "learning_rate": 1.93112899149585e-06,
+ "loss": 0.9279,
+ "step": 4183
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8971611525998221,
+ "learning_rate": 1.9274492322570616e-06,
+ "loss": 0.8872,
+ "step": 4184
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8421146961160477,
+ "learning_rate": 1.923772608303972e-06,
+ "loss": 0.85,
+ "step": 4185
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9453142979896407,
+ "learning_rate": 1.9200991210645394e-06,
+ "loss": 0.9192,
+ "step": 4186
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 1.0462688231882833,
+ "learning_rate": 1.916428771965506e-06,
+ "loss": 0.9497,
+ "step": 4187
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8628255841189849,
+ "learning_rate": 1.912761562432388e-06,
+ "loss": 0.8165,
+ "step": 4188
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.7352639278433662,
+ "learning_rate": 1.9090974938894902e-06,
+ "loss": 0.799,
+ "step": 4189
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 1.07980904107261,
+ "learning_rate": 1.9054365677598963e-06,
+ "loss": 0.9411,
+ "step": 4190
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9064350449233921,
+ "learning_rate": 1.9017787854654613e-06,
+ "loss": 0.8661,
+ "step": 4191
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9686829188986636,
+ "learning_rate": 1.898124148426832e-06,
+ "loss": 0.9502,
+ "step": 4192
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 1.0384564388980664,
+ "learning_rate": 1.8944726580634287e-06,
+ "loss": 0.9211,
+ "step": 4193
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.7584584646982807,
+ "learning_rate": 1.8908243157934424e-06,
+ "loss": 0.7989,
+ "step": 4194
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.834550012029811,
+ "learning_rate": 1.8871791230338499e-06,
+ "loss": 0.8809,
+ "step": 4195
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 1.0410863356256124,
+ "learning_rate": 1.883537081200404e-06,
+ "loss": 0.8692,
+ "step": 4196
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9521083798907651,
+ "learning_rate": 1.8798981917076254e-06,
+ "loss": 0.916,
+ "step": 4197
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 1.0754796019560438,
+ "learning_rate": 1.876262455968826e-06,
+ "loss": 0.9022,
+ "step": 4198
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 1.0363029182823422,
+ "learning_rate": 1.872629875396076e-06,
+ "loss": 0.8648,
+ "step": 4199
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.864903522918182,
+ "learning_rate": 1.8690004514002314e-06,
+ "loss": 0.847,
+ "step": 4200
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9460555407020449,
+ "learning_rate": 1.8653741853909201e-06,
+ "loss": 0.9304,
+ "step": 4201
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8459959079469654,
+ "learning_rate": 1.861751078776538e-06,
+ "loss": 0.8592,
+ "step": 4202
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.7811839567522102,
+ "learning_rate": 1.8581311329642592e-06,
+ "loss": 0.836,
+ "step": 4203
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 1.011020768515595,
+ "learning_rate": 1.8545143493600293e-06,
+ "loss": 0.9431,
+ "step": 4204
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9643408748874488,
+ "learning_rate": 1.8509007293685666e-06,
+ "loss": 0.8591,
+ "step": 4205
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9576397938951362,
+ "learning_rate": 1.8472902743933608e-06,
+ "loss": 0.8651,
+ "step": 4206
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9167226135139332,
+ "learning_rate": 1.8436829858366655e-06,
+ "loss": 0.8573,
+ "step": 4207
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8704974275409894,
+ "learning_rate": 1.8400788650995137e-06,
+ "loss": 0.8649,
+ "step": 4208
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8820246946473046,
+ "learning_rate": 1.8364779135817045e-06,
+ "loss": 0.8062,
+ "step": 4209
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8346480363282573,
+ "learning_rate": 1.8328801326818045e-06,
+ "loss": 0.8575,
+ "step": 4210
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8120428420288254,
+ "learning_rate": 1.829285523797155e-06,
+ "loss": 0.8611,
+ "step": 4211
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 1.0025796855274374,
+ "learning_rate": 1.8256940883238538e-06,
+ "loss": 0.9148,
+ "step": 4212
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8364500447554787,
+ "learning_rate": 1.822105827656776e-06,
+ "loss": 0.8318,
+ "step": 4213
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9864320303697556,
+ "learning_rate": 1.8185207431895613e-06,
+ "loss": 0.8939,
+ "step": 4214
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8851820165130008,
+ "learning_rate": 1.8149388363146148e-06,
+ "loss": 0.9133,
+ "step": 4215
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9235879806978636,
+ "learning_rate": 1.8113601084231091e-06,
+ "loss": 0.8847,
+ "step": 4216
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9317026569999612,
+ "learning_rate": 1.8077845609049782e-06,
+ "loss": 0.8405,
+ "step": 4217
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8877757388003821,
+ "learning_rate": 1.8042121951489254e-06,
+ "loss": 0.8611,
+ "step": 4218
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 1.0125013366148803,
+ "learning_rate": 1.800643012542418e-06,
+ "loss": 0.8934,
+ "step": 4219
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.77328029528668,
+ "learning_rate": 1.7970770144716777e-06,
+ "loss": 0.8451,
+ "step": 4220
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9026477480194623,
+ "learning_rate": 1.7935142023217056e-06,
+ "loss": 0.9102,
+ "step": 4221
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9540904370482145,
+ "learning_rate": 1.7899545774762573e-06,
+ "loss": 0.8549,
+ "step": 4222
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9659909913805189,
+ "learning_rate": 1.7863981413178433e-06,
+ "loss": 0.8758,
+ "step": 4223
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8974931301760378,
+ "learning_rate": 1.7828448952277456e-06,
+ "loss": 0.8486,
+ "step": 4224
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9038804982423293,
+ "learning_rate": 1.7792948405860079e-06,
+ "loss": 0.9082,
+ "step": 4225
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8518238489561752,
+ "learning_rate": 1.7757479787714217e-06,
+ "loss": 0.8289,
+ "step": 4226
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9732642755656459,
+ "learning_rate": 1.7722043111615572e-06,
+ "loss": 0.8674,
+ "step": 4227
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9981586354999044,
+ "learning_rate": 1.768663839132727e-06,
+ "loss": 0.8676,
+ "step": 4228
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9767566547508983,
+ "learning_rate": 1.7651265640600113e-06,
+ "loss": 0.8986,
+ "step": 4229
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.7944108016619675,
+ "learning_rate": 1.7615924873172506e-06,
+ "loss": 0.8403,
+ "step": 4230
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8543094994212218,
+ "learning_rate": 1.7580616102770353e-06,
+ "loss": 0.8804,
+ "step": 4231
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8680298111959078,
+ "learning_rate": 1.754533934310717e-06,
+ "loss": 0.8702,
+ "step": 4232
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.798555398853347,
+ "learning_rate": 1.7510094607884075e-06,
+ "loss": 0.8088,
+ "step": 4233
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 1.0168098896767641,
+ "learning_rate": 1.7474881910789698e-06,
+ "loss": 0.8939,
+ "step": 4234
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 1.010765749661305,
+ "learning_rate": 1.7439701265500274e-06,
+ "loss": 0.8634,
+ "step": 4235
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8669125584507791,
+ "learning_rate": 1.740455268567951e-06,
+ "loss": 0.8659,
+ "step": 4236
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 1.0328717545418735,
+ "learning_rate": 1.7369436184978738e-06,
+ "loss": 0.8859,
+ "step": 4237
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.8446788596592253,
+ "learning_rate": 1.7334351777036807e-06,
+ "loss": 0.8365,
+ "step": 4238
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 1.0790026405786928,
+ "learning_rate": 1.729929947548008e-06,
+ "loss": 0.9184,
+ "step": 4239
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.8880365172860483,
+ "learning_rate": 1.7264279293922503e-06,
+ "loss": 0.8756,
+ "step": 4240
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.852480900993756,
+ "learning_rate": 1.7229291245965463e-06,
+ "loss": 0.8552,
+ "step": 4241
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.8995156844257409,
+ "learning_rate": 1.7194335345197933e-06,
+ "loss": 0.8795,
+ "step": 4242
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 1.033019126091575,
+ "learning_rate": 1.7159411605196407e-06,
+ "loss": 0.9104,
+ "step": 4243
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9042868541549544,
+ "learning_rate": 1.7124520039524805e-06,
+ "loss": 0.8678,
+ "step": 4244
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9728929156802403,
+ "learning_rate": 1.7089660661734685e-06,
+ "loss": 0.9187,
+ "step": 4245
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9176002094908589,
+ "learning_rate": 1.7054833485364962e-06,
+ "loss": 0.9312,
+ "step": 4246
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9092868064217556,
+ "learning_rate": 1.702003852394214e-06,
+ "loss": 0.8577,
+ "step": 4247
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.8676912630242394,
+ "learning_rate": 1.6985275790980205e-06,
+ "loss": 0.8912,
+ "step": 4248
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.8621206859482691,
+ "learning_rate": 1.6950545299980526e-06,
+ "loss": 0.9031,
+ "step": 4249
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9537558641892446,
+ "learning_rate": 1.691584706443209e-06,
+ "loss": 0.9074,
+ "step": 4250
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.8069863327611584,
+ "learning_rate": 1.6881181097811305e-06,
+ "loss": 0.8193,
+ "step": 4251
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.8971134429881099,
+ "learning_rate": 1.6846547413581981e-06,
+ "loss": 0.8558,
+ "step": 4252
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 1.010128964422933,
+ "learning_rate": 1.6811946025195459e-06,
+ "loss": 0.8793,
+ "step": 4253
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.826250798207253,
+ "learning_rate": 1.6777376946090552e-06,
+ "loss": 0.8046,
+ "step": 4254
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9388189750495171,
+ "learning_rate": 1.674284018969342e-06,
+ "loss": 0.912,
+ "step": 4255
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.8952634639495284,
+ "learning_rate": 1.6708335769417827e-06,
+ "loss": 0.9091,
+ "step": 4256
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9927273287526445,
+ "learning_rate": 1.667386369866484e-06,
+ "loss": 0.8048,
+ "step": 4257
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.959179914383579,
+ "learning_rate": 1.6639423990823011e-06,
+ "loss": 0.8558,
+ "step": 4258
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9412212006934919,
+ "learning_rate": 1.660501665926838e-06,
+ "loss": 0.8604,
+ "step": 4259
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 1.0107033510164385,
+ "learning_rate": 1.6570641717364277e-06,
+ "loss": 0.9321,
+ "step": 4260
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9162877665794047,
+ "learning_rate": 1.653629917846159e-06,
+ "loss": 0.845,
+ "step": 4261
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9356101234694048,
+ "learning_rate": 1.6501989055898537e-06,
+ "loss": 0.9274,
+ "step": 4262
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.7825221035897323,
+ "learning_rate": 1.6467711363000794e-06,
+ "loss": 0.8273,
+ "step": 4263
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.915839237746525,
+ "learning_rate": 1.6433466113081442e-06,
+ "loss": 0.9426,
+ "step": 4264
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9492463773319484,
+ "learning_rate": 1.6399253319440888e-06,
+ "loss": 0.9638,
+ "step": 4265
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 1.01435861381373,
+ "learning_rate": 1.6365072995367004e-06,
+ "loss": 0.8703,
+ "step": 4266
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 1.032310735939065,
+ "learning_rate": 1.6330925154135057e-06,
+ "loss": 0.9089,
+ "step": 4267
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9474316903677986,
+ "learning_rate": 1.6296809809007652e-06,
+ "loss": 0.899,
+ "step": 4268
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9122522786407117,
+ "learning_rate": 1.6262726973234844e-06,
+ "loss": 0.8979,
+ "step": 4269
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9293180651291256,
+ "learning_rate": 1.6228676660053932e-06,
+ "loss": 0.8791,
+ "step": 4270
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.7856105609802073,
+ "learning_rate": 1.6194658882689718e-06,
+ "loss": 0.8457,
+ "step": 4271
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.8979354808600734,
+ "learning_rate": 1.6160673654354331e-06,
+ "loss": 0.9186,
+ "step": 4272
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 1.0921143405482687,
+ "learning_rate": 1.6126720988247168e-06,
+ "loss": 0.8975,
+ "step": 4273
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9670671740207992,
+ "learning_rate": 1.609280089755515e-06,
+ "loss": 0.8516,
+ "step": 4274
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9351974673019499,
+ "learning_rate": 1.605891339545237e-06,
+ "loss": 0.9331,
+ "step": 4275
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9718101490560755,
+ "learning_rate": 1.6025058495100388e-06,
+ "loss": 0.8632,
+ "step": 4276
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 1.0254335153782985,
+ "learning_rate": 1.5991236209648052e-06,
+ "loss": 0.8578,
+ "step": 4277
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 1.023925867885646,
+ "learning_rate": 1.5957446552231526e-06,
+ "loss": 0.8793,
+ "step": 4278
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9311023098856142,
+ "learning_rate": 1.5923689535974307e-06,
+ "loss": 0.8639,
+ "step": 4279
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9273209097209918,
+ "learning_rate": 1.588996517398731e-06,
+ "loss": 0.9106,
+ "step": 4280
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.8100356359717925,
+ "learning_rate": 1.5856273479368611e-06,
+ "loss": 0.8314,
+ "step": 4281
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 1.08493525966539,
+ "learning_rate": 1.582261446520371e-06,
+ "loss": 0.8687,
+ "step": 4282
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 1.0588228353494755,
+ "learning_rate": 1.5788988144565397e-06,
+ "loss": 0.9386,
+ "step": 4283
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.8846910688163809,
+ "learning_rate": 1.575539453051369e-06,
+ "loss": 0.8948,
+ "step": 4284
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.8823865932843107,
+ "learning_rate": 1.572183363609603e-06,
+ "loss": 0.834,
+ "step": 4285
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.916367312075249,
+ "learning_rate": 1.568830547434703e-06,
+ "loss": 0.7975,
+ "step": 4286
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.8689353941661974,
+ "learning_rate": 1.5654810058288661e-06,
+ "loss": 0.902,
+ "step": 4287
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.8394528428749988,
+ "learning_rate": 1.5621347400930176e-06,
+ "loss": 0.8281,
+ "step": 4288
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8775838554267056,
+ "learning_rate": 1.5587917515268048e-06,
+ "loss": 0.867,
+ "step": 4289
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.964160629339188,
+ "learning_rate": 1.5554520414286067e-06,
+ "loss": 0.9083,
+ "step": 4290
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9985951864949679,
+ "learning_rate": 1.5521156110955293e-06,
+ "loss": 0.8989,
+ "step": 4291
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 1.0504657147965988,
+ "learning_rate": 1.5487824618234049e-06,
+ "loss": 0.8712,
+ "step": 4292
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8078373213446511,
+ "learning_rate": 1.54545259490679e-06,
+ "loss": 0.8519,
+ "step": 4293
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.937211660161243,
+ "learning_rate": 1.5421260116389636e-06,
+ "loss": 0.9001,
+ "step": 4294
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9732871656878034,
+ "learning_rate": 1.5388027133119343e-06,
+ "loss": 0.8315,
+ "step": 4295
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8633160539608518,
+ "learning_rate": 1.535482701216433e-06,
+ "loss": 0.9105,
+ "step": 4296
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8641619017556765,
+ "learning_rate": 1.5321659766419129e-06,
+ "loss": 0.8381,
+ "step": 4297
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8865491462244038,
+ "learning_rate": 1.5288525408765564e-06,
+ "loss": 0.8793,
+ "step": 4298
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9223542637078772,
+ "learning_rate": 1.5255423952072567e-06,
+ "loss": 0.8887,
+ "step": 4299
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.849204339706377,
+ "learning_rate": 1.52223554091964e-06,
+ "loss": 0.8348,
+ "step": 4300
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8827438209095655,
+ "learning_rate": 1.5189319792980517e-06,
+ "loss": 0.8546,
+ "step": 4301
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8658099000101591,
+ "learning_rate": 1.5156317116255515e-06,
+ "loss": 0.8301,
+ "step": 4302
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9465128463748287,
+ "learning_rate": 1.5123347391839305e-06,
+ "loss": 0.9351,
+ "step": 4303
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 1.162625936895587,
+ "learning_rate": 1.5090410632536968e-06,
+ "loss": 0.904,
+ "step": 4304
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9492950425963446,
+ "learning_rate": 1.5057506851140701e-06,
+ "loss": 0.8723,
+ "step": 4305
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8847624309484248,
+ "learning_rate": 1.5024636060429998e-06,
+ "loss": 0.8647,
+ "step": 4306
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 1.0154618576957262,
+ "learning_rate": 1.4991798273171465e-06,
+ "loss": 0.9599,
+ "step": 4307
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8993919045724885,
+ "learning_rate": 1.49589935021189e-06,
+ "loss": 0.8754,
+ "step": 4308
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8120404721237,
+ "learning_rate": 1.4926221760013393e-06,
+ "loss": 0.8671,
+ "step": 4309
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9686759027044225,
+ "learning_rate": 1.4893483059583014e-06,
+ "loss": 0.9182,
+ "step": 4310
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8825178910172966,
+ "learning_rate": 1.4860777413543138e-06,
+ "loss": 0.9025,
+ "step": 4311
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8492752577428371,
+ "learning_rate": 1.4828104834596268e-06,
+ "loss": 0.8736,
+ "step": 4312
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9995808467990425,
+ "learning_rate": 1.4795465335432036e-06,
+ "loss": 0.824,
+ "step": 4313
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9215206901437705,
+ "learning_rate": 1.4762858928727241e-06,
+ "loss": 0.902,
+ "step": 4314
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9387090032416386,
+ "learning_rate": 1.4730285627145858e-06,
+ "loss": 0.8241,
+ "step": 4315
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8184468693723096,
+ "learning_rate": 1.4697745443338984e-06,
+ "loss": 0.8341,
+ "step": 4316
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8969274254049279,
+ "learning_rate": 1.4665238389944859e-06,
+ "loss": 0.8625,
+ "step": 4317
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8976121630810855,
+ "learning_rate": 1.46327644795888e-06,
+ "loss": 0.883,
+ "step": 4318
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 1.026625129207781,
+ "learning_rate": 1.4600323724883337e-06,
+ "loss": 0.922,
+ "step": 4319
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9489282750554974,
+ "learning_rate": 1.4567916138428072e-06,
+ "loss": 0.8446,
+ "step": 4320
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8444995307678088,
+ "learning_rate": 1.4535541732809755e-06,
+ "loss": 0.8586,
+ "step": 4321
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8727797663941129,
+ "learning_rate": 1.4503200520602245e-06,
+ "loss": 0.8977,
+ "step": 4322
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9610062570706849,
+ "learning_rate": 1.4470892514366442e-06,
+ "loss": 0.8862,
+ "step": 4323
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9010078578242248,
+ "learning_rate": 1.443861772665044e-06,
+ "loss": 0.9004,
+ "step": 4324
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9458466443889767,
+ "learning_rate": 1.4406376169989389e-06,
+ "loss": 0.8412,
+ "step": 4325
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9975906724948811,
+ "learning_rate": 1.4374167856905542e-06,
+ "loss": 0.936,
+ "step": 4326
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8817184421188767,
+ "learning_rate": 1.4341992799908255e-06,
+ "loss": 0.9086,
+ "step": 4327
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9092642430322561,
+ "learning_rate": 1.4309851011493903e-06,
+ "loss": 0.8793,
+ "step": 4328
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8750464387687253,
+ "learning_rate": 1.427774250414601e-06,
+ "loss": 0.8816,
+ "step": 4329
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9331332280697446,
+ "learning_rate": 1.4245667290335175e-06,
+ "loss": 0.9228,
+ "step": 4330
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8619414036259035,
+ "learning_rate": 1.421362538251897e-06,
+ "loss": 0.8856,
+ "step": 4331
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8126223098413,
+ "learning_rate": 1.4181616793142173e-06,
+ "loss": 0.8912,
+ "step": 4332
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8076391294951282,
+ "learning_rate": 1.414964153463655e-06,
+ "loss": 0.8616,
+ "step": 4333
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9215220035914787,
+ "learning_rate": 1.4117699619420878e-06,
+ "loss": 0.7838,
+ "step": 4334
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9121929534750399,
+ "learning_rate": 1.4085791059901077e-06,
+ "loss": 0.9036,
+ "step": 4335
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8708612949484579,
+ "learning_rate": 1.4053915868470013e-06,
+ "loss": 0.8908,
+ "step": 4336
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8819912375555637,
+ "learning_rate": 1.402207405750765e-06,
+ "loss": 0.8541,
+ "step": 4337
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.7941368072678817,
+ "learning_rate": 1.399026563938105e-06,
+ "loss": 0.855,
+ "step": 4338
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9231545152873868,
+ "learning_rate": 1.3958490626444154e-06,
+ "loss": 0.8964,
+ "step": 4339
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8888049189829651,
+ "learning_rate": 1.3926749031038055e-06,
+ "loss": 0.9064,
+ "step": 4340
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9477947025863747,
+ "learning_rate": 1.3895040865490817e-06,
+ "loss": 0.8516,
+ "step": 4341
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9144954215299261,
+ "learning_rate": 1.3863366142117506e-06,
+ "loss": 0.8939,
+ "step": 4342
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9933436776611138,
+ "learning_rate": 1.383172487322023e-06,
+ "loss": 0.8657,
+ "step": 4343
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9069322246148085,
+ "learning_rate": 1.3800117071088104e-06,
+ "loss": 0.8992,
+ "step": 4344
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9546694266275502,
+ "learning_rate": 1.3768542747997215e-06,
+ "loss": 0.8876,
+ "step": 4345
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 1.1519189255583402,
+ "learning_rate": 1.3737001916210713e-06,
+ "loss": 0.987,
+ "step": 4346
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.8450222424007697,
+ "learning_rate": 1.370549458797863e-06,
+ "loss": 0.8736,
+ "step": 4347
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 1.027797851084786,
+ "learning_rate": 1.3674020775538078e-06,
+ "loss": 0.9744,
+ "step": 4348
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9596396303364736,
+ "learning_rate": 1.3642580491113122e-06,
+ "loss": 0.8457,
+ "step": 4349
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.8489943977959429,
+ "learning_rate": 1.3611173746914797e-06,
+ "loss": 0.839,
+ "step": 4350
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9177898684165542,
+ "learning_rate": 1.3579800555141165e-06,
+ "loss": 0.8443,
+ "step": 4351
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 1.0084342244773004,
+ "learning_rate": 1.3548460927977158e-06,
+ "loss": 0.8285,
+ "step": 4352
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9712532252970955,
+ "learning_rate": 1.351715487759474e-06,
+ "loss": 0.9103,
+ "step": 4353
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 1.0264245984946392,
+ "learning_rate": 1.3485882416152819e-06,
+ "loss": 0.8653,
+ "step": 4354
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9309697174629245,
+ "learning_rate": 1.3454643555797276e-06,
+ "loss": 0.8802,
+ "step": 4355
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 1.0621696950991983,
+ "learning_rate": 1.3423438308660929e-06,
+ "loss": 0.9771,
+ "step": 4356
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9076268387887377,
+ "learning_rate": 1.3392266686863508e-06,
+ "loss": 0.8588,
+ "step": 4357
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9943609267593755,
+ "learning_rate": 1.3361128702511716e-06,
+ "loss": 0.9281,
+ "step": 4358
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 1.0745052503952597,
+ "learning_rate": 1.3330024367699224e-06,
+ "loss": 0.9177,
+ "step": 4359
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9178533986339459,
+ "learning_rate": 1.3298953694506522e-06,
+ "loss": 0.8428,
+ "step": 4360
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 1.0221921399852922,
+ "learning_rate": 1.3267916695001172e-06,
+ "loss": 0.8804,
+ "step": 4361
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.8451997649832802,
+ "learning_rate": 1.3236913381237592e-06,
+ "loss": 0.8618,
+ "step": 4362
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9532388337988882,
+ "learning_rate": 1.3205943765257057e-06,
+ "loss": 0.8892,
+ "step": 4363
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.8623572184502248,
+ "learning_rate": 1.317500785908783e-06,
+ "loss": 0.8846,
+ "step": 4364
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9096356998930603,
+ "learning_rate": 1.31441056747451e-06,
+ "loss": 0.8485,
+ "step": 4365
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9319788853039657,
+ "learning_rate": 1.3113237224230836e-06,
+ "loss": 0.9086,
+ "step": 4366
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.8546538595116254,
+ "learning_rate": 1.3082402519534076e-06,
+ "loss": 0.8384,
+ "step": 4367
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9420415566779518,
+ "learning_rate": 1.3051601572630611e-06,
+ "loss": 0.892,
+ "step": 4368
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9229043665417269,
+ "learning_rate": 1.3020834395483195e-06,
+ "loss": 0.9592,
+ "step": 4369
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 1.0142996996904374,
+ "learning_rate": 1.2990101000041445e-06,
+ "loss": 0.9196,
+ "step": 4370
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9893710753174146,
+ "learning_rate": 1.2959401398241844e-06,
+ "loss": 0.9642,
+ "step": 4371
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.8935486040273064,
+ "learning_rate": 1.2928735602007768e-06,
+ "loss": 0.9024,
+ "step": 4372
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.8980300632527639,
+ "learning_rate": 1.2898103623249458e-06,
+ "loss": 0.931,
+ "step": 4373
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9400340634044065,
+ "learning_rate": 1.2867505473864029e-06,
+ "loss": 0.8775,
+ "step": 4374
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.8852922367628429,
+ "learning_rate": 1.283694116573546e-06,
+ "loss": 0.8526,
+ "step": 4375
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9308637212715061,
+ "learning_rate": 1.2806410710734552e-06,
+ "loss": 0.893,
+ "step": 4376
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 1.054242291129353,
+ "learning_rate": 1.2775914120718992e-06,
+ "loss": 0.9159,
+ "step": 4377
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9734805693778715,
+ "learning_rate": 1.2745451407533294e-06,
+ "loss": 0.923,
+ "step": 4378
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9250030033040264,
+ "learning_rate": 1.2715022583008851e-06,
+ "loss": 0.8875,
+ "step": 4379
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.8912635587673091,
+ "learning_rate": 1.2684627658963865e-06,
+ "loss": 0.8646,
+ "step": 4380
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.789099016737345,
+ "learning_rate": 1.265426664720334e-06,
+ "loss": 0.8652,
+ "step": 4381
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9141014334851884,
+ "learning_rate": 1.2623939559519161e-06,
+ "loss": 0.883,
+ "step": 4382
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 1.1470673379107672,
+ "learning_rate": 1.2593646407690051e-06,
+ "loss": 0.8575,
+ "step": 4383
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.7437164663470661,
+ "learning_rate": 1.2563387203481447e-06,
+ "loss": 0.8078,
+ "step": 4384
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.8812285120839243,
+ "learning_rate": 1.2533161958645755e-06,
+ "loss": 0.874,
+ "step": 4385
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9886474623505253,
+ "learning_rate": 1.2502970684922067e-06,
+ "loss": 0.91,
+ "step": 4386
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.8510526707996923,
+ "learning_rate": 1.2472813394036344e-06,
+ "loss": 0.8029,
+ "step": 4387
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9220893707095607,
+ "learning_rate": 1.2442690097701327e-06,
+ "loss": 0.8489,
+ "step": 4388
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.8019016015834931,
+ "learning_rate": 1.2412600807616526e-06,
+ "loss": 0.7543,
+ "step": 4389
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 1.0926757629078143,
+ "learning_rate": 1.2382545535468316e-06,
+ "loss": 0.9594,
+ "step": 4390
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.953036855555765,
+ "learning_rate": 1.2352524292929823e-06,
+ "loss": 0.9097,
+ "step": 4391
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9419931200211884,
+ "learning_rate": 1.2322537091660912e-06,
+ "loss": 0.896,
+ "step": 4392
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.0783066417312557,
+ "learning_rate": 1.22925839433083e-06,
+ "loss": 0.8952,
+ "step": 4393
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9010516525713899,
+ "learning_rate": 1.2262664859505434e-06,
+ "loss": 0.8173,
+ "step": 4394
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.0404129292108966,
+ "learning_rate": 1.2232779851872511e-06,
+ "loss": 0.9002,
+ "step": 4395
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9959939362032887,
+ "learning_rate": 1.2202928932016588e-06,
+ "loss": 0.8607,
+ "step": 4396
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.0519236391406372,
+ "learning_rate": 1.217311211153137e-06,
+ "loss": 0.9187,
+ "step": 4397
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8847643265914652,
+ "learning_rate": 1.2143329401997372e-06,
+ "loss": 0.8927,
+ "step": 4398
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.011836449899806,
+ "learning_rate": 1.2113580814981884e-06,
+ "loss": 0.9013,
+ "step": 4399
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9457684850126732,
+ "learning_rate": 1.2083866362038865e-06,
+ "loss": 0.8474,
+ "step": 4400
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8794146370800098,
+ "learning_rate": 1.2054186054709105e-06,
+ "loss": 0.9051,
+ "step": 4401
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9994042976622503,
+ "learning_rate": 1.2024539904520072e-06,
+ "loss": 0.8828,
+ "step": 4402
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9326353211162844,
+ "learning_rate": 1.1994927922985999e-06,
+ "loss": 0.8799,
+ "step": 4403
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8746336354630223,
+ "learning_rate": 1.1965350121607866e-06,
+ "loss": 0.8828,
+ "step": 4404
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9927828679864845,
+ "learning_rate": 1.1935806511873306e-06,
+ "loss": 0.8525,
+ "step": 4405
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.0753569955639017,
+ "learning_rate": 1.1906297105256725e-06,
+ "loss": 0.9051,
+ "step": 4406
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.089473780777804,
+ "learning_rate": 1.187682191321925e-06,
+ "loss": 0.8594,
+ "step": 4407
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9250170782041444,
+ "learning_rate": 1.1847380947208697e-06,
+ "loss": 0.9507,
+ "step": 4408
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8945205817460992,
+ "learning_rate": 1.1817974218659621e-06,
+ "loss": 0.9071,
+ "step": 4409
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9040203881922112,
+ "learning_rate": 1.178860173899321e-06,
+ "loss": 0.8559,
+ "step": 4410
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8544334862845641,
+ "learning_rate": 1.1759263519617437e-06,
+ "loss": 0.8399,
+ "step": 4411
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.105335993006846,
+ "learning_rate": 1.172995957192693e-06,
+ "loss": 0.9251,
+ "step": 4412
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9253771127170801,
+ "learning_rate": 1.1700689907302953e-06,
+ "loss": 0.7862,
+ "step": 4413
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.1841069055677755,
+ "learning_rate": 1.167145453711358e-06,
+ "loss": 0.8982,
+ "step": 4414
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.074350471656947,
+ "learning_rate": 1.1642253472713427e-06,
+ "loss": 0.8916,
+ "step": 4415
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.039963082327553,
+ "learning_rate": 1.161308672544389e-06,
+ "loss": 0.9328,
+ "step": 4416
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9599735733589884,
+ "learning_rate": 1.1583954306633004e-06,
+ "loss": 0.8265,
+ "step": 4417
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9678466463162355,
+ "learning_rate": 1.1554856227595435e-06,
+ "loss": 0.839,
+ "step": 4418
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.0088444774876635,
+ "learning_rate": 1.1525792499632526e-06,
+ "loss": 0.8801,
+ "step": 4419
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8932524790191629,
+ "learning_rate": 1.1496763134032363e-06,
+ "loss": 0.8493,
+ "step": 4420
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9222967778651467,
+ "learning_rate": 1.1467768142069546e-06,
+ "loss": 0.8544,
+ "step": 4421
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9780327951047955,
+ "learning_rate": 1.1438807535005437e-06,
+ "loss": 0.9147,
+ "step": 4422
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9667663733169486,
+ "learning_rate": 1.1409881324088013e-06,
+ "loss": 0.8188,
+ "step": 4423
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9365509433655029,
+ "learning_rate": 1.138098952055181e-06,
+ "loss": 0.8807,
+ "step": 4424
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8565813918026189,
+ "learning_rate": 1.1352132135618165e-06,
+ "loss": 0.9126,
+ "step": 4425
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.075644450832635,
+ "learning_rate": 1.13233091804949e-06,
+ "loss": 0.8761,
+ "step": 4426
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8165419744649355,
+ "learning_rate": 1.1294520666376518e-06,
+ "loss": 0.7936,
+ "step": 4427
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.0323805322011725,
+ "learning_rate": 1.1265766604444172e-06,
+ "loss": 0.8706,
+ "step": 4428
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.7495495075304767,
+ "learning_rate": 1.1237047005865576e-06,
+ "loss": 0.8419,
+ "step": 4429
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.025298418486006,
+ "learning_rate": 1.1208361881795116e-06,
+ "loss": 0.8654,
+ "step": 4430
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8658167366495689,
+ "learning_rate": 1.1179711243373736e-06,
+ "loss": 0.8158,
+ "step": 4431
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8703812611612662,
+ "learning_rate": 1.1151095101729047e-06,
+ "loss": 0.8659,
+ "step": 4432
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9237099858287684,
+ "learning_rate": 1.1122513467975237e-06,
+ "loss": 0.8601,
+ "step": 4433
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8753879343874093,
+ "learning_rate": 1.1093966353213036e-06,
+ "loss": 0.8508,
+ "step": 4434
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.0279338600477002,
+ "learning_rate": 1.1065453768529844e-06,
+ "loss": 0.982,
+ "step": 4435
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8896273082857911,
+ "learning_rate": 1.103697572499961e-06,
+ "loss": 0.9002,
+ "step": 4436
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.0127050002489055,
+ "learning_rate": 1.1008532233682878e-06,
+ "loss": 0.9153,
+ "step": 4437
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9888636471260973,
+ "learning_rate": 1.0980123305626812e-06,
+ "loss": 0.9356,
+ "step": 4438
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.1176917582011996,
+ "learning_rate": 1.0951748951865048e-06,
+ "loss": 0.9203,
+ "step": 4439
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9076607048301628,
+ "learning_rate": 1.0923409183417887e-06,
+ "loss": 0.9021,
+ "step": 4440
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8021459869631091,
+ "learning_rate": 1.0895104011292202e-06,
+ "loss": 0.8594,
+ "step": 4441
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8521248017433695,
+ "learning_rate": 1.0866833446481317e-06,
+ "loss": 0.8437,
+ "step": 4442
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8199654243704576,
+ "learning_rate": 1.0838597499965276e-06,
+ "loss": 0.8561,
+ "step": 4443
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9017441943740964,
+ "learning_rate": 1.0810396182710535e-06,
+ "loss": 0.8628,
+ "step": 4444
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 1.0635085735985401,
+ "learning_rate": 1.0782229505670195e-06,
+ "loss": 0.857,
+ "step": 4445
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 1.0110512531596023,
+ "learning_rate": 1.0754097479783876e-06,
+ "loss": 0.9095,
+ "step": 4446
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 1.0330156889110493,
+ "learning_rate": 1.0726000115977696e-06,
+ "loss": 0.9477,
+ "step": 4447
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.8635230495765193,
+ "learning_rate": 1.069793742516435e-06,
+ "loss": 0.8596,
+ "step": 4448
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9609302996834761,
+ "learning_rate": 1.066990941824312e-06,
+ "loss": 0.8814,
+ "step": 4449
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.7887306989653831,
+ "learning_rate": 1.0641916106099691e-06,
+ "loss": 0.8378,
+ "step": 4450
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.8799238336963068,
+ "learning_rate": 1.0613957499606388e-06,
+ "loss": 0.8461,
+ "step": 4451
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9332652089990722,
+ "learning_rate": 1.0586033609622004e-06,
+ "loss": 0.9113,
+ "step": 4452
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 1.0120415338344462,
+ "learning_rate": 1.0558144446991836e-06,
+ "loss": 0.8404,
+ "step": 4453
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 1.153849426673812,
+ "learning_rate": 1.053029002254773e-06,
+ "loss": 0.9283,
+ "step": 4454
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.8558094985018742,
+ "learning_rate": 1.0502470347108017e-06,
+ "loss": 0.8964,
+ "step": 4455
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9485891526993969,
+ "learning_rate": 1.0474685431477537e-06,
+ "loss": 0.8982,
+ "step": 4456
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.953405765044882,
+ "learning_rate": 1.0446935286447657e-06,
+ "loss": 0.9423,
+ "step": 4457
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9537729372835468,
+ "learning_rate": 1.0419219922796175e-06,
+ "loss": 0.8686,
+ "step": 4458
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 1.124640597984598,
+ "learning_rate": 1.039153935128744e-06,
+ "loss": 1.0009,
+ "step": 4459
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9752016852506975,
+ "learning_rate": 1.0363893582672246e-06,
+ "loss": 0.8911,
+ "step": 4460
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9389763281898983,
+ "learning_rate": 1.033628262768792e-06,
+ "loss": 0.8828,
+ "step": 4461
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.8817628018783435,
+ "learning_rate": 1.0308706497058252e-06,
+ "loss": 0.8775,
+ "step": 4462
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 1.149099371920294,
+ "learning_rate": 1.0281165201493437e-06,
+ "loss": 0.9358,
+ "step": 4463
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9563098643398701,
+ "learning_rate": 1.0253658751690232e-06,
+ "loss": 0.905,
+ "step": 4464
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.7786100041703143,
+ "learning_rate": 1.0226187158331825e-06,
+ "loss": 0.8446,
+ "step": 4465
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.8717003504265479,
+ "learning_rate": 1.0198750432087855e-06,
+ "loss": 0.8463,
+ "step": 4466
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9698813047001333,
+ "learning_rate": 1.017134858361446e-06,
+ "loss": 0.9045,
+ "step": 4467
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9273075261187677,
+ "learning_rate": 1.0143981623554155e-06,
+ "loss": 0.9144,
+ "step": 4468
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 1.0034713315580532,
+ "learning_rate": 1.0116649562535984e-06,
+ "loss": 0.8566,
+ "step": 4469
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9388287245230369,
+ "learning_rate": 1.0089352411175424e-06,
+ "loss": 0.9042,
+ "step": 4470
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9197462394117867,
+ "learning_rate": 1.006209018007429e-06,
+ "loss": 0.8565,
+ "step": 4471
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.8200700243995165,
+ "learning_rate": 1.0034862879821029e-06,
+ "loss": 0.8142,
+ "step": 4472
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.839765862246526,
+ "learning_rate": 1.0007670520990331e-06,
+ "loss": 0.84,
+ "step": 4473
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 1.018420139672052,
+ "learning_rate": 9.98051311414342e-07,
+ "loss": 0.9043,
+ "step": 4474
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.8344410831177014,
+ "learning_rate": 9.953390669827944e-07,
+ "loss": 0.899,
+ "step": 4475
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.978469099315929,
+ "learning_rate": 9.926303198577913e-07,
+ "loss": 0.8867,
+ "step": 4476
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.8968384685972451,
+ "learning_rate": 9.899250710913767e-07,
+ "loss": 0.8743,
+ "step": 4477
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 1.0064599184167913,
+ "learning_rate": 9.872233217342463e-07,
+ "loss": 0.8609,
+ "step": 4478
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9467028482314083,
+ "learning_rate": 9.845250728357214e-07,
+ "loss": 0.8863,
+ "step": 4479
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9683227171178042,
+ "learning_rate": 9.818303254437723e-07,
+ "loss": 0.8644,
+ "step": 4480
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.8955860844728178,
+ "learning_rate": 9.791390806050117e-07,
+ "loss": 0.8493,
+ "step": 4481
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.8326166348027098,
+ "learning_rate": 9.764513393646812e-07,
+ "loss": 0.8601,
+ "step": 4482
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.8680581770069223,
+ "learning_rate": 9.737671027666728e-07,
+ "loss": 0.8561,
+ "step": 4483
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9915909435698445,
+ "learning_rate": 9.710863718535135e-07,
+ "loss": 0.9376,
+ "step": 4484
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9446431900793263,
+ "learning_rate": 9.684091476663659e-07,
+ "loss": 0.8778,
+ "step": 4485
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 1.109495272715226,
+ "learning_rate": 9.657354312450363e-07,
+ "loss": 0.8336,
+ "step": 4486
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.8918388169589561,
+ "learning_rate": 9.630652236279626e-07,
+ "loss": 0.8686,
+ "step": 4487
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 1.0448814368667654,
+ "learning_rate": 9.603985258522219e-07,
+ "loss": 0.9076,
+ "step": 4488
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.882671986823637,
+ "learning_rate": 9.577353389535315e-07,
+ "loss": 0.8426,
+ "step": 4489
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.7884297891161138,
+ "learning_rate": 9.550756639662417e-07,
+ "loss": 0.8667,
+ "step": 4490
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 1.0291184667949875,
+ "learning_rate": 9.524195019233407e-07,
+ "loss": 0.8691,
+ "step": 4491
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9151564314671728,
+ "learning_rate": 9.497668538564475e-07,
+ "loss": 0.8411,
+ "step": 4492
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 1.094450327487545,
+ "learning_rate": 9.471177207958238e-07,
+ "loss": 0.8771,
+ "step": 4493
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.8300597469314912,
+ "learning_rate": 9.444721037703597e-07,
+ "loss": 0.8728,
+ "step": 4494
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9139620150552796,
+ "learning_rate": 9.418300038075845e-07,
+ "loss": 0.945,
+ "step": 4495
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9659572796858202,
+ "learning_rate": 9.391914219336606e-07,
+ "loss": 0.8612,
+ "step": 4496
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 1.018001634380619,
+ "learning_rate": 9.365563591733784e-07,
+ "loss": 0.9111,
+ "step": 4497
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9504457128628776,
+ "learning_rate": 9.33924816550168e-07,
+ "loss": 0.8944,
+ "step": 4498
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9986578534857201,
+ "learning_rate": 9.31296795086093e-07,
+ "loss": 0.817,
+ "step": 4499
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9395606158893343,
+ "learning_rate": 9.286722958018391e-07,
+ "loss": 0.868,
+ "step": 4500
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9602523091812222,
+ "learning_rate": 9.260513197167398e-07,
+ "loss": 0.8538,
+ "step": 4501
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.87687563248471,
+ "learning_rate": 9.234338678487509e-07,
+ "loss": 0.8714,
+ "step": 4502
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.949608181317551,
+ "learning_rate": 9.208199412144559e-07,
+ "loss": 0.8639,
+ "step": 4503
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9271674225765744,
+ "learning_rate": 9.182095408290781e-07,
+ "loss": 0.9022,
+ "step": 4504
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.964669129027502,
+ "learning_rate": 9.156026677064633e-07,
+ "loss": 0.9241,
+ "step": 4505
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9310203042552556,
+ "learning_rate": 9.129993228590917e-07,
+ "loss": 0.8388,
+ "step": 4506
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9643926178742298,
+ "learning_rate": 9.103995072980765e-07,
+ "loss": 0.9154,
+ "step": 4507
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.8671705146578107,
+ "learning_rate": 9.078032220331523e-07,
+ "loss": 0.8392,
+ "step": 4508
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 1.2225591193258285,
+ "learning_rate": 9.052104680726859e-07,
+ "loss": 0.8898,
+ "step": 4509
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9660630856365235,
+ "learning_rate": 9.026212464236772e-07,
+ "loss": 0.8241,
+ "step": 4510
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9340500282731702,
+ "learning_rate": 9.000355580917464e-07,
+ "loss": 0.8555,
+ "step": 4511
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9537654138619575,
+ "learning_rate": 8.974534040811444e-07,
+ "loss": 0.8894,
+ "step": 4512
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.895198937752595,
+ "learning_rate": 8.948747853947526e-07,
+ "loss": 0.9024,
+ "step": 4513
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.8766192717238223,
+ "learning_rate": 8.922997030340752e-07,
+ "loss": 0.8115,
+ "step": 4514
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.8815951005276368,
+ "learning_rate": 8.897281579992467e-07,
+ "loss": 0.9074,
+ "step": 4515
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.6959857370267856,
+ "learning_rate": 8.871601512890238e-07,
+ "loss": 0.7915,
+ "step": 4516
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9412151747214554,
+ "learning_rate": 8.845956839007897e-07,
+ "loss": 0.8855,
+ "step": 4517
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 1.0426813298387085,
+ "learning_rate": 8.820347568305543e-07,
+ "loss": 0.8681,
+ "step": 4518
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.8092244554699878,
+ "learning_rate": 8.794773710729543e-07,
+ "loss": 0.8603,
+ "step": 4519
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.8680896047248979,
+ "learning_rate": 8.769235276212496e-07,
+ "loss": 0.8215,
+ "step": 4520
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.896381811745064,
+ "learning_rate": 8.743732274673189e-07,
+ "loss": 0.7978,
+ "step": 4521
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.8977254594725402,
+ "learning_rate": 8.718264716016722e-07,
+ "loss": 0.9257,
+ "step": 4522
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.798855769126776,
+ "learning_rate": 8.692832610134428e-07,
+ "loss": 0.8884,
+ "step": 4523
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9157501102272877,
+ "learning_rate": 8.66743596690377e-07,
+ "loss": 0.8669,
+ "step": 4524
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9695653131871415,
+ "learning_rate": 8.642074796188594e-07,
+ "loss": 0.8686,
+ "step": 4525
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9793148217726962,
+ "learning_rate": 8.61674910783884e-07,
+ "loss": 0.9255,
+ "step": 4526
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9576973985763716,
+ "learning_rate": 8.59145891169072e-07,
+ "loss": 0.8332,
+ "step": 4527
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9081255730978633,
+ "learning_rate": 8.566204217566664e-07,
+ "loss": 0.8195,
+ "step": 4528
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.8708156229354137,
+ "learning_rate": 8.540985035275273e-07,
+ "loss": 0.9023,
+ "step": 4529
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.8691762245849086,
+ "learning_rate": 8.515801374611432e-07,
+ "loss": 0.8578,
+ "step": 4530
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.7866221852061148,
+ "learning_rate": 8.490653245356184e-07,
+ "loss": 0.7866,
+ "step": 4531
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 1.1989636305157028,
+ "learning_rate": 8.465540657276728e-07,
+ "loss": 0.9471,
+ "step": 4532
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9767141601100402,
+ "learning_rate": 8.44046362012656e-07,
+ "loss": 0.9228,
+ "step": 4533
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.8303567486057429,
+ "learning_rate": 8.415422143645247e-07,
+ "loss": 0.8237,
+ "step": 4534
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.8703551504870906,
+ "learning_rate": 8.390416237558641e-07,
+ "loss": 0.8909,
+ "step": 4535
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 1.0998537880731005,
+ "learning_rate": 8.365445911578785e-07,
+ "loss": 0.9127,
+ "step": 4536
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9489973630795188,
+ "learning_rate": 8.340511175403809e-07,
+ "loss": 0.8361,
+ "step": 4537
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 1.1220324062106524,
+ "learning_rate": 8.315612038718101e-07,
+ "loss": 0.916,
+ "step": 4538
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.7752459181440666,
+ "learning_rate": 8.290748511192214e-07,
+ "loss": 0.8526,
+ "step": 4539
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9759138766592379,
+ "learning_rate": 8.265920602482825e-07,
+ "loss": 0.9052,
+ "step": 4540
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.88200196522921,
+ "learning_rate": 8.241128322232816e-07,
+ "loss": 0.9174,
+ "step": 4541
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.8480227788746386,
+ "learning_rate": 8.216371680071244e-07,
+ "loss": 0.8847,
+ "step": 4542
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.7761758733544654,
+ "learning_rate": 8.191650685613273e-07,
+ "loss": 0.8348,
+ "step": 4543
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9288089411677429,
+ "learning_rate": 8.166965348460298e-07,
+ "loss": 0.9418,
+ "step": 4544
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.8512522320599516,
+ "learning_rate": 8.142315678199764e-07,
+ "loss": 0.8531,
+ "step": 4545
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.8622197409594021,
+ "learning_rate": 8.117701684405343e-07,
+ "loss": 0.8356,
+ "step": 4546
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9047252942224157,
+ "learning_rate": 8.093123376636836e-07,
+ "loss": 0.8358,
+ "step": 4547
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.7767940998413876,
+ "learning_rate": 8.06858076444017e-07,
+ "loss": 0.8704,
+ "step": 4548
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.8395712335488582,
+ "learning_rate": 8.044073857347423e-07,
+ "loss": 0.8137,
+ "step": 4549
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 1.0484595587985202,
+ "learning_rate": 8.019602664876758e-07,
+ "loss": 0.9307,
+ "step": 4550
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.880460137991095,
+ "learning_rate": 7.995167196532527e-07,
+ "loss": 0.8784,
+ "step": 4551
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9149158139313484,
+ "learning_rate": 7.970767461805218e-07,
+ "loss": 0.8461,
+ "step": 4552
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9131255299353188,
+ "learning_rate": 7.946403470171326e-07,
+ "loss": 0.827,
+ "step": 4553
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.8824362005579756,
+ "learning_rate": 7.922075231093628e-07,
+ "loss": 0.8546,
+ "step": 4554
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.8832373651669211,
+ "learning_rate": 7.897782754020889e-07,
+ "loss": 0.8728,
+ "step": 4555
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.8069595830282974,
+ "learning_rate": 7.873526048388025e-07,
+ "loss": 0.8346,
+ "step": 4556
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 1.1228418926088712,
+ "learning_rate": 7.849305123616091e-07,
+ "loss": 0.9323,
+ "step": 4557
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9533667074702398,
+ "learning_rate": 7.825119989112173e-07,
+ "loss": 0.8716,
+ "step": 4558
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.923293228778207,
+ "learning_rate": 7.800970654269513e-07,
+ "loss": 0.8432,
+ "step": 4559
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 1.11626140350239,
+ "learning_rate": 7.776857128467464e-07,
+ "loss": 0.8332,
+ "step": 4560
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.8938496801044397,
+ "learning_rate": 7.75277942107141e-07,
+ "loss": 0.8468,
+ "step": 4561
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 1.0354649146549935,
+ "learning_rate": 7.728737541432862e-07,
+ "loss": 0.8427,
+ "step": 4562
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.8860950842084511,
+ "learning_rate": 7.704731498889428e-07,
+ "loss": 0.8772,
+ "step": 4563
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.867606498901801,
+ "learning_rate": 7.680761302764727e-07,
+ "loss": 0.7783,
+ "step": 4564
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.7494571050820372,
+ "learning_rate": 7.65682696236858e-07,
+ "loss": 0.7823,
+ "step": 4565
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9718045190437227,
+ "learning_rate": 7.632928486996749e-07,
+ "loss": 0.829,
+ "step": 4566
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.8814772987383863,
+ "learning_rate": 7.609065885931155e-07,
+ "loss": 0.905,
+ "step": 4567
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9680328247678344,
+ "learning_rate": 7.585239168439762e-07,
+ "loss": 0.962,
+ "step": 4568
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9590121467205059,
+ "learning_rate": 7.561448343776567e-07,
+ "loss": 0.8774,
+ "step": 4569
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.8510595020547232,
+ "learning_rate": 7.537693421181658e-07,
+ "loss": 0.8738,
+ "step": 4570
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9654411670041684,
+ "learning_rate": 7.513974409881186e-07,
+ "loss": 0.8577,
+ "step": 4571
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.6154410191182488,
+ "learning_rate": 7.49029131908734e-07,
+ "loss": 0.7772,
+ "step": 4572
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.8124019210849639,
+ "learning_rate": 7.466644157998371e-07,
+ "loss": 0.868,
+ "step": 4573
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9371761053086015,
+ "learning_rate": 7.443032935798533e-07,
+ "loss": 0.895,
+ "step": 4574
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9690588905208973,
+ "learning_rate": 7.419457661658169e-07,
+ "loss": 0.9118,
+ "step": 4575
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.96849815157878,
+ "learning_rate": 7.395918344733644e-07,
+ "loss": 0.9247,
+ "step": 4576
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9868973998873788,
+ "learning_rate": 7.372414994167354e-07,
+ "loss": 0.8891,
+ "step": 4577
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 1.0239718137607154,
+ "learning_rate": 7.348947619087754e-07,
+ "loss": 0.9041,
+ "step": 4578
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9296765587267058,
+ "learning_rate": 7.325516228609264e-07,
+ "loss": 0.8811,
+ "step": 4579
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9506405539975408,
+ "learning_rate": 7.302120831832382e-07,
+ "loss": 0.8922,
+ "step": 4580
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 1.0671397784944519,
+ "learning_rate": 7.278761437843629e-07,
+ "loss": 0.9144,
+ "step": 4581
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9326971853607836,
+ "learning_rate": 7.255438055715469e-07,
+ "loss": 0.9119,
+ "step": 4582
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.856892892626281,
+ "learning_rate": 7.232150694506512e-07,
+ "loss": 0.8945,
+ "step": 4583
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9502589806243291,
+ "learning_rate": 7.208899363261234e-07,
+ "loss": 0.9065,
+ "step": 4584
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9183385431355955,
+ "learning_rate": 7.185684071010224e-07,
+ "loss": 0.8913,
+ "step": 4585
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9281050915140219,
+ "learning_rate": 7.162504826770033e-07,
+ "loss": 0.8903,
+ "step": 4586
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9488448838720547,
+ "learning_rate": 7.139361639543185e-07,
+ "loss": 0.902,
+ "step": 4587
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 1.1643793453730171,
+ "learning_rate": 7.116254518318222e-07,
+ "loss": 0.8976,
+ "step": 4588
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.8929835684417432,
+ "learning_rate": 7.093183472069753e-07,
+ "loss": 0.8782,
+ "step": 4589
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.8728833870870203,
+ "learning_rate": 7.070148509758223e-07,
+ "loss": 0.848,
+ "step": 4590
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 1.1138405382131311,
+ "learning_rate": 7.047149640330197e-07,
+ "loss": 0.9057,
+ "step": 4591
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9119819160478895,
+ "learning_rate": 7.024186872718164e-07,
+ "loss": 0.8971,
+ "step": 4592
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 1.0586261924649834,
+ "learning_rate": 7.001260215840567e-07,
+ "loss": 0.8966,
+ "step": 4593
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.8558439206138747,
+ "learning_rate": 6.978369678601892e-07,
+ "loss": 0.8803,
+ "step": 4594
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 1.1313114584415687,
+ "learning_rate": 6.955515269892533e-07,
+ "loss": 0.9254,
+ "step": 4595
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.8687686930697606,
+ "learning_rate": 6.932696998588895e-07,
+ "loss": 0.8345,
+ "step": 4596
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.972762762695167,
+ "learning_rate": 6.909914873553347e-07,
+ "loss": 0.9169,
+ "step": 4597
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 1.0722597366094893,
+ "learning_rate": 6.887168903634178e-07,
+ "loss": 0.8694,
+ "step": 4598
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.8700407063622763,
+ "learning_rate": 6.864459097665654e-07,
+ "loss": 0.9118,
+ "step": 4599
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.7231028526782665,
+ "learning_rate": 6.84178546446802e-07,
+ "loss": 0.8277,
+ "step": 4600
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9397621900449872,
+ "learning_rate": 6.819148012847454e-07,
+ "loss": 0.9099,
+ "step": 4601
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8027870927796138,
+ "learning_rate": 6.796546751596089e-07,
+ "loss": 0.8059,
+ "step": 4602
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.825610473131502,
+ "learning_rate": 6.77398168949196e-07,
+ "loss": 0.8641,
+ "step": 4603
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 1.0333130499480339,
+ "learning_rate": 6.751452835299111e-07,
+ "loss": 0.8968,
+ "step": 4604
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.7461096269793116,
+ "learning_rate": 6.728960197767475e-07,
+ "loss": 0.764,
+ "step": 4605
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 1.1053663946086056,
+ "learning_rate": 6.706503785632934e-07,
+ "loss": 0.8871,
+ "step": 4606
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8087669755123348,
+ "learning_rate": 6.68408360761732e-07,
+ "loss": 0.8347,
+ "step": 4607
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8228167450449801,
+ "learning_rate": 6.661699672428334e-07,
+ "loss": 0.8943,
+ "step": 4608
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 1.044853902103288,
+ "learning_rate": 6.639351988759657e-07,
+ "loss": 0.9091,
+ "step": 4609
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9646694942021371,
+ "learning_rate": 6.6170405652909e-07,
+ "loss": 0.8832,
+ "step": 4610
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9862719073656867,
+ "learning_rate": 6.594765410687487e-07,
+ "loss": 0.8745,
+ "step": 4611
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9469706978275599,
+ "learning_rate": 6.57252653360092e-07,
+ "loss": 0.8867,
+ "step": 4612
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9621907313147184,
+ "learning_rate": 6.550323942668469e-07,
+ "loss": 0.9048,
+ "step": 4613
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9551120725938772,
+ "learning_rate": 6.528157646513378e-07,
+ "loss": 0.916,
+ "step": 4614
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 1.165242059134334,
+ "learning_rate": 6.506027653744796e-07,
+ "loss": 0.901,
+ "step": 4615
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8276585384744689,
+ "learning_rate": 6.483933972957734e-07,
+ "loss": 0.8597,
+ "step": 4616
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8674009756824886,
+ "learning_rate": 6.461876612733109e-07,
+ "loss": 0.8642,
+ "step": 4617
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8732681246085701,
+ "learning_rate": 6.4398555816378e-07,
+ "loss": 0.8679,
+ "step": 4618
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 1.0698624969243604,
+ "learning_rate": 6.417870888224476e-07,
+ "loss": 0.9166,
+ "step": 4619
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9269176635216481,
+ "learning_rate": 6.395922541031741e-07,
+ "loss": 0.7976,
+ "step": 4620
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8911483169879542,
+ "learning_rate": 6.374010548584119e-07,
+ "loss": 0.7714,
+ "step": 4621
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9166620422433125,
+ "learning_rate": 6.352134919391928e-07,
+ "loss": 0.8986,
+ "step": 4622
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8503582591660069,
+ "learning_rate": 6.330295661951436e-07,
+ "loss": 0.8285,
+ "step": 4623
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9065331328143107,
+ "learning_rate": 6.308492784744746e-07,
+ "loss": 0.8659,
+ "step": 4624
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9446631454458843,
+ "learning_rate": 6.286726296239854e-07,
+ "loss": 0.8569,
+ "step": 4625
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8410711191297997,
+ "learning_rate": 6.264996204890628e-07,
+ "loss": 0.8798,
+ "step": 4626
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.967536775476823,
+ "learning_rate": 6.24330251913674e-07,
+ "loss": 0.912,
+ "step": 4627
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8490844246194825,
+ "learning_rate": 6.221645247403807e-07,
+ "loss": 0.8055,
+ "step": 4628
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8079323917488994,
+ "learning_rate": 6.200024398103255e-07,
+ "loss": 0.8039,
+ "step": 4629
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9217043952152276,
+ "learning_rate": 6.178439979632367e-07,
+ "loss": 0.9196,
+ "step": 4630
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8245142052913529,
+ "learning_rate": 6.156892000374293e-07,
+ "loss": 0.7905,
+ "step": 4631
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8264377267146533,
+ "learning_rate": 6.135380468698004e-07,
+ "loss": 0.7877,
+ "step": 4632
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9572302543274727,
+ "learning_rate": 6.113905392958342e-07,
+ "loss": 0.9471,
+ "step": 4633
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9894955129199708,
+ "learning_rate": 6.092466781495976e-07,
+ "loss": 0.8881,
+ "step": 4634
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.898577258005014,
+ "learning_rate": 6.071064642637404e-07,
+ "loss": 0.8959,
+ "step": 4635
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9397410181632516,
+ "learning_rate": 6.049698984695007e-07,
+ "loss": 0.8166,
+ "step": 4636
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.7923090009464244,
+ "learning_rate": 6.028369815966917e-07,
+ "loss": 0.8396,
+ "step": 4637
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 1.2202817464242952,
+ "learning_rate": 6.007077144737161e-07,
+ "loss": 0.9118,
+ "step": 4638
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.969129140892316,
+ "learning_rate": 5.985820979275569e-07,
+ "loss": 0.8887,
+ "step": 4639
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 1.0783977420547077,
+ "learning_rate": 5.96460132783776e-07,
+ "loss": 0.8832,
+ "step": 4640
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9137748341764742,
+ "learning_rate": 5.943418198665251e-07,
+ "loss": 0.8517,
+ "step": 4641
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9473977104987698,
+ "learning_rate": 5.922271599985286e-07,
+ "loss": 0.9196,
+ "step": 4642
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8204196536378642,
+ "learning_rate": 5.901161540010969e-07,
+ "loss": 0.8097,
+ "step": 4643
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9277511181073421,
+ "learning_rate": 5.880088026941233e-07,
+ "loss": 0.8416,
+ "step": 4644
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9452367143360048,
+ "learning_rate": 5.859051068960741e-07,
+ "loss": 0.9401,
+ "step": 4645
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.987232865712406,
+ "learning_rate": 5.838050674240025e-07,
+ "loss": 0.9142,
+ "step": 4646
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9357750824353854,
+ "learning_rate": 5.817086850935416e-07,
+ "loss": 0.8483,
+ "step": 4647
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9534772237743926,
+ "learning_rate": 5.796159607189001e-07,
+ "loss": 0.838,
+ "step": 4648
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 1.0211652109876297,
+ "learning_rate": 5.775268951128676e-07,
+ "loss": 0.8509,
+ "step": 4649
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9323112138160999,
+ "learning_rate": 5.754414890868154e-07,
+ "loss": 0.8589,
+ "step": 4650
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8750232423096678,
+ "learning_rate": 5.733597434506877e-07,
+ "loss": 0.8532,
+ "step": 4651
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 1.026382237023704,
+ "learning_rate": 5.712816590130133e-07,
+ "loss": 0.9044,
+ "step": 4652
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9624758633770257,
+ "learning_rate": 5.692072365808954e-07,
+ "loss": 0.8804,
+ "step": 4653
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9989242644321323,
+ "learning_rate": 5.671364769600162e-07,
+ "loss": 0.9247,
+ "step": 4654
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9616525628604601,
+ "learning_rate": 5.650693809546348e-07,
+ "loss": 0.9033,
+ "step": 4655
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9096015769269892,
+ "learning_rate": 5.630059493675866e-07,
+ "loss": 0.8009,
+ "step": 4656
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 1.0637758788778562,
+ "learning_rate": 5.60946183000285e-07,
+ "loss": 0.9068,
+ "step": 4657
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.8901945313508792,
+ "learning_rate": 5.5889008265272e-07,
+ "loss": 0.8788,
+ "step": 4658
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9099875967178034,
+ "learning_rate": 5.568376491234562e-07,
+ "loss": 0.8796,
+ "step": 4659
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 1.0202809122623,
+ "learning_rate": 5.547888832096382e-07,
+ "loss": 0.9962,
+ "step": 4660
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.954658548268079,
+ "learning_rate": 5.527437857069784e-07,
+ "loss": 0.8394,
+ "step": 4661
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9320796453469051,
+ "learning_rate": 5.507023574097725e-07,
+ "loss": 0.9111,
+ "step": 4662
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 1.0099055589396055,
+ "learning_rate": 5.486645991108875e-07,
+ "loss": 0.8903,
+ "step": 4663
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9958238294406384,
+ "learning_rate": 5.466305116017623e-07,
+ "loss": 0.9471,
+ "step": 4664
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.8651903899596718,
+ "learning_rate": 5.446000956724174e-07,
+ "loss": 0.8909,
+ "step": 4665
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 1.0929962562558704,
+ "learning_rate": 5.425733521114396e-07,
+ "loss": 0.9027,
+ "step": 4666
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 1.002891888040804,
+ "learning_rate": 5.405502817059937e-07,
+ "loss": 0.9048,
+ "step": 4667
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 1.0631978643606623,
+ "learning_rate": 5.385308852418191e-07,
+ "loss": 0.9402,
+ "step": 4668
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 1.0592678495354233,
+ "learning_rate": 5.365151635032218e-07,
+ "loss": 0.8854,
+ "step": 4669
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9465884927280965,
+ "learning_rate": 5.345031172730875e-07,
+ "loss": 0.9383,
+ "step": 4670
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 1.055539568239519,
+ "learning_rate": 5.324947473328735e-07,
+ "loss": 0.8672,
+ "step": 4671
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 1.0178495256136613,
+ "learning_rate": 5.304900544626046e-07,
+ "loss": 0.9695,
+ "step": 4672
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9207883190778684,
+ "learning_rate": 5.284890394408826e-07,
+ "loss": 0.9053,
+ "step": 4673
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 1.0413991164040401,
+ "learning_rate": 5.264917030448757e-07,
+ "loss": 0.917,
+ "step": 4674
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9852974225095827,
+ "learning_rate": 5.244980460503268e-07,
+ "loss": 0.9199,
+ "step": 4675
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9101478254967975,
+ "learning_rate": 5.225080692315532e-07,
+ "loss": 0.8581,
+ "step": 4676
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9224577901270811,
+ "learning_rate": 5.205217733614353e-07,
+ "loss": 0.8575,
+ "step": 4677
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.8052472896237471,
+ "learning_rate": 5.185391592114286e-07,
+ "loss": 0.8244,
+ "step": 4678
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9836455868456991,
+ "learning_rate": 5.165602275515592e-07,
+ "loss": 0.9301,
+ "step": 4679
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 1.1892255587314617,
+ "learning_rate": 5.145849791504187e-07,
+ "loss": 0.9277,
+ "step": 4680
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9869571273541833,
+ "learning_rate": 5.126134147751716e-07,
+ "loss": 0.831,
+ "step": 4681
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.8052676873067713,
+ "learning_rate": 5.106455351915507e-07,
+ "loss": 0.79,
+ "step": 4682
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9338371554851755,
+ "learning_rate": 5.086813411638581e-07,
+ "loss": 0.9352,
+ "step": 4683
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9015422339295802,
+ "learning_rate": 5.067208334549656e-07,
+ "loss": 0.9437,
+ "step": 4684
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 1.054167179399989,
+ "learning_rate": 5.047640128263087e-07,
+ "loss": 0.9158,
+ "step": 4685
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9082560787862947,
+ "learning_rate": 5.028108800378961e-07,
+ "loss": 0.8905,
+ "step": 4686
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.8219471768430575,
+ "learning_rate": 5.008614358483021e-07,
+ "loss": 0.8757,
+ "step": 4687
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.780745425301807,
+ "learning_rate": 4.989156810146667e-07,
+ "loss": 0.8158,
+ "step": 4688
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9234343419367949,
+ "learning_rate": 4.969736162927019e-07,
+ "loss": 0.8761,
+ "step": 4689
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 1.0014977203303232,
+ "learning_rate": 4.9503524243668e-07,
+ "loss": 0.9522,
+ "step": 4690
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9339230282937171,
+ "learning_rate": 4.931005601994432e-07,
+ "loss": 0.9174,
+ "step": 4691
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9498858891420158,
+ "learning_rate": 4.911695703324038e-07,
+ "loss": 0.8962,
+ "step": 4692
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.8832518040504395,
+ "learning_rate": 4.892422735855284e-07,
+ "loss": 0.8549,
+ "step": 4693
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.8408928804118271,
+ "learning_rate": 4.873186707073663e-07,
+ "loss": 0.8011,
+ "step": 4694
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9207618156688188,
+ "learning_rate": 4.853987624450151e-07,
+ "loss": 0.8566,
+ "step": 4695
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9262469810019246,
+ "learning_rate": 4.834825495441475e-07,
+ "loss": 0.8915,
+ "step": 4696
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.8846754187057477,
+ "learning_rate": 4.815700327490014e-07,
+ "loss": 0.9321,
+ "step": 4697
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9216289128668232,
+ "learning_rate": 4.796612128023726e-07,
+ "loss": 0.8932,
+ "step": 4698
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.8957453245571164,
+ "learning_rate": 4.777560904456236e-07,
+ "loss": 0.867,
+ "step": 4699
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9659405217417328,
+ "learning_rate": 4.7585466641868696e-07,
+ "loss": 0.8371,
+ "step": 4700
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9246429973713162,
+ "learning_rate": 4.7395694146004976e-07,
+ "loss": 0.8532,
+ "step": 4701
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9730178048995508,
+ "learning_rate": 4.7206291630677024e-07,
+ "loss": 0.8628,
+ "step": 4702
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.8908886670821141,
+ "learning_rate": 4.7017259169446104e-07,
+ "loss": 0.8401,
+ "step": 4703
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 1.2569337175476047,
+ "learning_rate": 4.6828596835730487e-07,
+ "loss": 0.9729,
+ "step": 4704
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8871567475172962,
+ "learning_rate": 4.664030470280467e-07,
+ "loss": 0.8877,
+ "step": 4705
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8771797489505685,
+ "learning_rate": 4.645238284379883e-07,
+ "loss": 0.876,
+ "step": 4706
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8974155182599607,
+ "learning_rate": 4.626483133169968e-07,
+ "loss": 0.8647,
+ "step": 4707
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 1.03217165439121,
+ "learning_rate": 4.60776502393504e-07,
+ "loss": 0.9047,
+ "step": 4708
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8647869508096034,
+ "learning_rate": 4.5890839639449514e-07,
+ "loss": 0.9272,
+ "step": 4709
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.9483061117258482,
+ "learning_rate": 4.5704399604552417e-07,
+ "loss": 0.8963,
+ "step": 4710
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 1.0459603859839062,
+ "learning_rate": 4.551833020707008e-07,
+ "loss": 0.752,
+ "step": 4711
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8095336947195265,
+ "learning_rate": 4.533263151926981e-07,
+ "loss": 0.8358,
+ "step": 4712
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8731203447007811,
+ "learning_rate": 4.514730361327502e-07,
+ "loss": 0.8537,
+ "step": 4713
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.7576831899020193,
+ "learning_rate": 4.4962346561064574e-07,
+ "loss": 0.7908,
+ "step": 4714
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.9335440679572502,
+ "learning_rate": 4.4777760434473796e-07,
+ "loss": 0.8944,
+ "step": 4715
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 1.2370637457358102,
+ "learning_rate": 4.4593545305193774e-07,
+ "loss": 0.8908,
+ "step": 4716
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.920934958052355,
+ "learning_rate": 4.440970124477173e-07,
+ "loss": 0.8929,
+ "step": 4717
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.848866861588948,
+ "learning_rate": 4.4226228324610544e-07,
+ "loss": 0.8371,
+ "step": 4718
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.7779812376352646,
+ "learning_rate": 4.404312661596877e-07,
+ "loss": 0.7985,
+ "step": 4719
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.7910637849699313,
+ "learning_rate": 4.386039618996119e-07,
+ "loss": 0.8588,
+ "step": 4720
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.9347730768411243,
+ "learning_rate": 4.3678037117558135e-07,
+ "loss": 0.869,
+ "step": 4721
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8317945691068949,
+ "learning_rate": 4.349604946958563e-07,
+ "loss": 0.9009,
+ "step": 4722
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.7744877050845054,
+ "learning_rate": 4.331443331672591e-07,
+ "loss": 0.7898,
+ "step": 4723
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 1.0950992364196257,
+ "learning_rate": 4.313318872951633e-07,
+ "loss": 0.8883,
+ "step": 4724
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 1.021349287582722,
+ "learning_rate": 4.295231577835024e-07,
+ "loss": 0.8475,
+ "step": 4725
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.91315891502072,
+ "learning_rate": 4.2771814533476787e-07,
+ "loss": 0.9307,
+ "step": 4726
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 1.009209027305893,
+ "learning_rate": 4.2591685065000223e-07,
+ "loss": 0.848,
+ "step": 4727
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.9653479615075518,
+ "learning_rate": 4.241192744288092e-07,
+ "loss": 0.8995,
+ "step": 4728
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 1.039540610615299,
+ "learning_rate": 4.223254173693492e-07,
+ "loss": 0.9164,
+ "step": 4729
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.929264173702999,
+ "learning_rate": 4.2053528016833267e-07,
+ "loss": 0.9028,
+ "step": 4730
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8152833470357382,
+ "learning_rate": 4.1874886352103015e-07,
+ "loss": 0.8695,
+ "step": 4731
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8560674313813604,
+ "learning_rate": 4.1696616812126333e-07,
+ "loss": 0.8124,
+ "step": 4732
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.9548884844167742,
+ "learning_rate": 4.1518719466141165e-07,
+ "loss": 0.8691,
+ "step": 4733
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8539519754960126,
+ "learning_rate": 4.13411943832408e-07,
+ "loss": 0.7609,
+ "step": 4734
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.9552903690370225,
+ "learning_rate": 4.116404163237386e-07,
+ "loss": 0.8559,
+ "step": 4735
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.9345544038413095,
+ "learning_rate": 4.0987261282344425e-07,
+ "loss": 0.8994,
+ "step": 4736
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8670718582046053,
+ "learning_rate": 4.081085340181223e-07,
+ "loss": 0.8565,
+ "step": 4737
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.9917587272279675,
+ "learning_rate": 4.06348180592917e-07,
+ "loss": 0.9179,
+ "step": 4738
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.9062623994797032,
+ "learning_rate": 4.0459155323153034e-07,
+ "loss": 0.8471,
+ "step": 4739
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8640138948660695,
+ "learning_rate": 4.028386526162176e-07,
+ "loss": 0.8527,
+ "step": 4740
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.9711039171948731,
+ "learning_rate": 4.010894794277831e-07,
+ "loss": 0.9265,
+ "step": 4741
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8932933479919942,
+ "learning_rate": 3.993440343455879e-07,
+ "loss": 0.851,
+ "step": 4742
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 1.072058425315494,
+ "learning_rate": 3.976023180475397e-07,
+ "loss": 0.8821,
+ "step": 4743
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8879501099847686,
+ "learning_rate": 3.95864331210104e-07,
+ "loss": 0.8876,
+ "step": 4744
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 1.0169064021683811,
+ "learning_rate": 3.941300745082932e-07,
+ "loss": 0.8965,
+ "step": 4745
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.971834892069378,
+ "learning_rate": 3.9239954861567177e-07,
+ "loss": 0.9413,
+ "step": 4746
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.9077071568664802,
+ "learning_rate": 3.906727542043598e-07,
+ "loss": 0.9478,
+ "step": 4747
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.9783496792117957,
+ "learning_rate": 3.8894969194502083e-07,
+ "loss": 0.8484,
+ "step": 4748
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.7577751457087538,
+ "learning_rate": 3.87230362506873e-07,
+ "loss": 0.7764,
+ "step": 4749
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8327591036758225,
+ "learning_rate": 3.8551476655768527e-07,
+ "loss": 0.8193,
+ "step": 4750
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8376407389609154,
+ "learning_rate": 3.8380290476377255e-07,
+ "loss": 0.8738,
+ "step": 4751
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 1.041387180792664,
+ "learning_rate": 3.8209477779000637e-07,
+ "loss": 0.7963,
+ "step": 4752
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 1.0508160439123053,
+ "learning_rate": 3.803903862998004e-07,
+ "loss": 0.9625,
+ "step": 4753
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8819817754321476,
+ "learning_rate": 3.7868973095512185e-07,
+ "loss": 0.8513,
+ "step": 4754
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8633950002628878,
+ "learning_rate": 3.7699281241648565e-07,
+ "loss": 0.8388,
+ "step": 4755
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9430616004578857,
+ "learning_rate": 3.7529963134295466e-07,
+ "loss": 0.8794,
+ "step": 4756
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8202230404710976,
+ "learning_rate": 3.736101883921406e-07,
+ "loss": 0.8608,
+ "step": 4757
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9111800394922659,
+ "learning_rate": 3.719244842202074e-07,
+ "loss": 0.8701,
+ "step": 4758
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9648783251420716,
+ "learning_rate": 3.702425194818582e-07,
+ "loss": 0.9106,
+ "step": 4759
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8601473565674794,
+ "learning_rate": 3.685642948303503e-07,
+ "loss": 0.9002,
+ "step": 4760
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8695453215948808,
+ "learning_rate": 3.66889810917489e-07,
+ "loss": 0.8081,
+ "step": 4761
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8832004349362633,
+ "learning_rate": 3.6521906839362187e-07,
+ "loss": 0.9008,
+ "step": 4762
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9373870441318044,
+ "learning_rate": 3.635520679076465e-07,
+ "loss": 0.8846,
+ "step": 4763
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8537989605488135,
+ "learning_rate": 3.6188881010700725e-07,
+ "loss": 0.874,
+ "step": 4764
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9555087172024136,
+ "learning_rate": 3.6022929563769513e-07,
+ "loss": 0.8977,
+ "step": 4765
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.89565777069052,
+ "learning_rate": 3.5857352514424573e-07,
+ "loss": 0.8879,
+ "step": 4766
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9683873603155793,
+ "learning_rate": 3.5692149926974006e-07,
+ "loss": 0.8812,
+ "step": 4767
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8446197704108228,
+ "learning_rate": 3.552732186558072e-07,
+ "loss": 0.8336,
+ "step": 4768
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.7131766899855473,
+ "learning_rate": 3.536286839426195e-07,
+ "loss": 0.8001,
+ "step": 4769
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8623764633556321,
+ "learning_rate": 3.51987895768896e-07,
+ "loss": 0.835,
+ "step": 4770
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8598354513537586,
+ "learning_rate": 3.5035085477190143e-07,
+ "loss": 0.8347,
+ "step": 4771
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8913383301607862,
+ "learning_rate": 3.4871756158744054e-07,
+ "loss": 0.8973,
+ "step": 4772
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9345288783189069,
+ "learning_rate": 3.4708801684986693e-07,
+ "loss": 0.9262,
+ "step": 4773
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9447309844765478,
+ "learning_rate": 3.454622211920766e-07,
+ "loss": 0.8473,
+ "step": 4774
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9736499373557043,
+ "learning_rate": 3.4384017524551116e-07,
+ "loss": 0.883,
+ "step": 4775
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8309086476311517,
+ "learning_rate": 3.422218796401544e-07,
+ "loss": 0.8497,
+ "step": 4776
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 1.0225774239584255,
+ "learning_rate": 3.4060733500453247e-07,
+ "loss": 0.895,
+ "step": 4777
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8911865934497273,
+ "learning_rate": 3.38996541965716e-07,
+ "loss": 0.9195,
+ "step": 4778
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8443701653753309,
+ "learning_rate": 3.3738950114932e-07,
+ "loss": 0.8844,
+ "step": 4779
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.7107396921526531,
+ "learning_rate": 3.3578621317949755e-07,
+ "loss": 0.779,
+ "step": 4780
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8399893121659415,
+ "learning_rate": 3.341866786789505e-07,
+ "loss": 0.8436,
+ "step": 4781
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9685815613770961,
+ "learning_rate": 3.325908982689185e-07,
+ "loss": 0.9355,
+ "step": 4782
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9010482476442921,
+ "learning_rate": 3.309988725691837e-07,
+ "loss": 0.8462,
+ "step": 4783
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 1.004028997913816,
+ "learning_rate": 3.294106021980714e-07,
+ "loss": 0.8934,
+ "step": 4784
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9467496885123551,
+ "learning_rate": 3.278260877724471e-07,
+ "loss": 0.8518,
+ "step": 4785
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9611481887716877,
+ "learning_rate": 3.2624532990771507e-07,
+ "loss": 0.9238,
+ "step": 4786
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9211305354120007,
+ "learning_rate": 3.2466832921782986e-07,
+ "loss": 0.8427,
+ "step": 4787
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9780646020436263,
+ "learning_rate": 3.2309508631527486e-07,
+ "loss": 0.9016,
+ "step": 4788
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8852928545464044,
+ "learning_rate": 3.215256018110824e-07,
+ "loss": 0.8977,
+ "step": 4789
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8279288966181191,
+ "learning_rate": 3.199598763148215e-07,
+ "loss": 0.7681,
+ "step": 4790
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.909226871258729,
+ "learning_rate": 3.183979104346002e-07,
+ "loss": 0.8608,
+ "step": 4791
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 1.0171373695462986,
+ "learning_rate": 3.1683970477706994e-07,
+ "loss": 0.899,
+ "step": 4792
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 1.0009925275785496,
+ "learning_rate": 3.1528525994741876e-07,
+ "loss": 0.9874,
+ "step": 4793
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8224974832611328,
+ "learning_rate": 3.13734576549376e-07,
+ "loss": 0.8328,
+ "step": 4794
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8683064937148369,
+ "learning_rate": 3.121876551852099e-07,
+ "loss": 0.8416,
+ "step": 4795
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8806177741538391,
+ "learning_rate": 3.1064449645572536e-07,
+ "loss": 0.8782,
+ "step": 4796
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9123231858787465,
+ "learning_rate": 3.091051009602675e-07,
+ "loss": 0.8457,
+ "step": 4797
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.7618300549174845,
+ "learning_rate": 3.0756946929672017e-07,
+ "loss": 0.8462,
+ "step": 4798
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8791781943865703,
+ "learning_rate": 3.060376020615052e-07,
+ "loss": 0.8286,
+ "step": 4799
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.951463421136622,
+ "learning_rate": 3.0450949984958347e-07,
+ "loss": 0.8521,
+ "step": 4800
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8866527917252199,
+ "learning_rate": 3.0298516325444893e-07,
+ "loss": 0.8836,
+ "step": 4801
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.893886355824411,
+ "learning_rate": 3.0146459286813924e-07,
+ "loss": 0.7967,
+ "step": 4802
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.961422977617786,
+ "learning_rate": 2.999477892812264e-07,
+ "loss": 0.8197,
+ "step": 4803
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 1.0167513068531195,
+ "learning_rate": 2.984347530828158e-07,
+ "loss": 0.9593,
+ "step": 4804
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 1.0707092859236371,
+ "learning_rate": 2.969254848605585e-07,
+ "loss": 0.8534,
+ "step": 4805
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 1.0635957905024185,
+ "learning_rate": 2.9541998520063344e-07,
+ "loss": 0.8137,
+ "step": 4806
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9105437286875733,
+ "learning_rate": 2.9391825468775946e-07,
+ "loss": 0.8645,
+ "step": 4807
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.8364016857339801,
+ "learning_rate": 2.9242029390519454e-07,
+ "loss": 0.8066,
+ "step": 4808
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.009222610726187,
+ "learning_rate": 2.909261034347255e-07,
+ "loss": 0.923,
+ "step": 4809
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.8628487728144087,
+ "learning_rate": 2.894356838566792e-07,
+ "loss": 0.9011,
+ "step": 4810
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.7467133884872036,
+ "learning_rate": 2.879490357499204e-07,
+ "loss": 0.8536,
+ "step": 4811
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.0318317822299221,
+ "learning_rate": 2.864661596918428e-07,
+ "loss": 0.8737,
+ "step": 4812
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.1852552119588342,
+ "learning_rate": 2.849870562583812e-07,
+ "loss": 0.9416,
+ "step": 4813
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.7617444243444661,
+ "learning_rate": 2.8351172602399945e-07,
+ "loss": 0.7771,
+ "step": 4814
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9490221699601976,
+ "learning_rate": 2.8204016956169924e-07,
+ "loss": 0.8231,
+ "step": 4815
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.7905084831280471,
+ "learning_rate": 2.8057238744301994e-07,
+ "loss": 0.8261,
+ "step": 4816
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9927543287963589,
+ "learning_rate": 2.7910838023802676e-07,
+ "loss": 0.9189,
+ "step": 4817
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.8495678485938293,
+ "learning_rate": 2.7764814851532485e-07,
+ "loss": 0.9281,
+ "step": 4818
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.8981715562853979,
+ "learning_rate": 2.761916928420527e-07,
+ "loss": 0.8538,
+ "step": 4819
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.281907221968286,
+ "learning_rate": 2.74739013783879e-07,
+ "loss": 0.9449,
+ "step": 4820
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.8362923179945122,
+ "learning_rate": 2.7329011190500797e-07,
+ "loss": 0.8955,
+ "step": 4821
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.1619832823970573,
+ "learning_rate": 2.7184498776817615e-07,
+ "loss": 0.8837,
+ "step": 4822
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9110960707661918,
+ "learning_rate": 2.704036419346534e-07,
+ "loss": 0.8811,
+ "step": 4823
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.1212523560860714,
+ "learning_rate": 2.689660749642442e-07,
+ "loss": 0.9146,
+ "step": 4824
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.0730838433206684,
+ "learning_rate": 2.675322874152786e-07,
+ "loss": 0.951,
+ "step": 4825
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.8724336401258055,
+ "learning_rate": 2.6610227984462556e-07,
+ "loss": 0.8174,
+ "step": 4826
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.7584430041691073,
+ "learning_rate": 2.646760528076842e-07,
+ "loss": 0.8407,
+ "step": 4827
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9256198254167516,
+ "learning_rate": 2.6325360685838243e-07,
+ "loss": 0.8554,
+ "step": 4828
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.8910253583304542,
+ "learning_rate": 2.618349425491851e-07,
+ "loss": 0.8225,
+ "step": 4829
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.0123914308218305,
+ "learning_rate": 2.604200604310825e-07,
+ "loss": 0.9156,
+ "step": 4830
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.2241093981548952,
+ "learning_rate": 2.590089610535984e-07,
+ "loss": 0.8943,
+ "step": 4831
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.0372795070500178,
+ "learning_rate": 2.5760164496479e-07,
+ "loss": 0.8725,
+ "step": 4832
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9015191328280726,
+ "learning_rate": 2.5619811271123897e-07,
+ "loss": 0.8565,
+ "step": 4833
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.1773068646916138,
+ "learning_rate": 2.5479836483806586e-07,
+ "loss": 0.9254,
+ "step": 4834
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9129350713541154,
+ "learning_rate": 2.5340240188891143e-07,
+ "loss": 0.846,
+ "step": 4835
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9094144875731605,
+ "learning_rate": 2.520102244059552e-07,
+ "loss": 0.9001,
+ "step": 4836
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9342262032921407,
+ "learning_rate": 2.506218329299026e-07,
+ "loss": 0.9003,
+ "step": 4837
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.0149200179717532,
+ "learning_rate": 2.4923722799998664e-07,
+ "loss": 0.899,
+ "step": 4838
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9571938084709873,
+ "learning_rate": 2.4785641015397375e-07,
+ "loss": 0.8604,
+ "step": 4839
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9507174272791346,
+ "learning_rate": 2.464793799281573e-07,
+ "loss": 0.8968,
+ "step": 4840
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.0268259071415713,
+ "learning_rate": 2.4510613785735936e-07,
+ "loss": 0.8992,
+ "step": 4841
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9180622195280348,
+ "learning_rate": 2.4373668447493225e-07,
+ "loss": 0.8623,
+ "step": 4842
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.8173243364714653,
+ "learning_rate": 2.423710203127561e-07,
+ "loss": 0.8481,
+ "step": 4843
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.8501582439070058,
+ "learning_rate": 2.410091459012376e-07,
+ "loss": 0.8666,
+ "step": 4844
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.7436945020982553,
+ "learning_rate": 2.3965106176931375e-07,
+ "loss": 0.8033,
+ "step": 4845
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.0904856546692936,
+ "learning_rate": 2.3829676844444926e-07,
+ "loss": 0.8943,
+ "step": 4846
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9200488567790596,
+ "learning_rate": 2.3694626645263675e-07,
+ "loss": 0.9005,
+ "step": 4847
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9160577148251488,
+ "learning_rate": 2.3559955631839436e-07,
+ "loss": 0.8582,
+ "step": 4848
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.8691999171424171,
+ "learning_rate": 2.3425663856476932e-07,
+ "loss": 0.8813,
+ "step": 4849
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.889963183329649,
+ "learning_rate": 2.3291751371333438e-07,
+ "loss": 0.8405,
+ "step": 4850
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.83305665054129,
+ "learning_rate": 2.3158218228419127e-07,
+ "loss": 0.8162,
+ "step": 4851
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.94606023936023,
+ "learning_rate": 2.3025064479596625e-07,
+ "loss": 0.8675,
+ "step": 4852
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9921362321103562,
+ "learning_rate": 2.2892290176581678e-07,
+ "loss": 0.8462,
+ "step": 4853
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.799977501340026,
+ "learning_rate": 2.2759895370941809e-07,
+ "loss": 0.8123,
+ "step": 4854
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9659143817670327,
+ "learning_rate": 2.2627880114097779e-07,
+ "loss": 0.8679,
+ "step": 4855
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9412116769931023,
+ "learning_rate": 2.2496244457323013e-07,
+ "loss": 0.8911,
+ "step": 4856
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9450756317603868,
+ "learning_rate": 2.2364988451742953e-07,
+ "loss": 0.8894,
+ "step": 4857
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.8983220803716617,
+ "learning_rate": 2.2234112148336373e-07,
+ "loss": 0.8341,
+ "step": 4858
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.0268524644220929,
+ "learning_rate": 2.2103615597933613e-07,
+ "loss": 0.9675,
+ "step": 4859
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 1.1491366209979879,
+ "learning_rate": 2.1973498851218244e-07,
+ "loss": 0.9454,
+ "step": 4860
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9358090668411937,
+ "learning_rate": 2.1843761958726283e-07,
+ "loss": 0.8429,
+ "step": 4861
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.6854621880409395,
+ "learning_rate": 2.1714404970845647e-07,
+ "loss": 0.7782,
+ "step": 4862
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9795775231339658,
+ "learning_rate": 2.1585427937817594e-07,
+ "loss": 0.9129,
+ "step": 4863
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.8519581561309496,
+ "learning_rate": 2.145683090973494e-07,
+ "loss": 0.8296,
+ "step": 4864
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.8514572277257874,
+ "learning_rate": 2.1328613936543396e-07,
+ "loss": 0.9167,
+ "step": 4865
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.852330306254833,
+ "learning_rate": 2.1200777068041134e-07,
+ "loss": 0.8427,
+ "step": 4866
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9473791189632517,
+ "learning_rate": 2.1073320353878102e-07,
+ "loss": 0.8657,
+ "step": 4867
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9847177080882329,
+ "learning_rate": 2.0946243843557367e-07,
+ "loss": 0.9212,
+ "step": 4868
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.8465542040304344,
+ "learning_rate": 2.0819547586434008e-07,
+ "loss": 0.8873,
+ "step": 4869
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.951387985091641,
+ "learning_rate": 2.0693231631715105e-07,
+ "loss": 0.8933,
+ "step": 4870
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9266622429240401,
+ "learning_rate": 2.0567296028460638e-07,
+ "loss": 0.8453,
+ "step": 4871
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.8174991156044403,
+ "learning_rate": 2.0441740825582258e-07,
+ "loss": 0.838,
+ "step": 4872
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 1.0089077722732323,
+ "learning_rate": 2.0316566071844402e-07,
+ "loss": 0.8936,
+ "step": 4873
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 1.0920332199012868,
+ "learning_rate": 2.0191771815863292e-07,
+ "loss": 0.9403,
+ "step": 4874
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9544594524434065,
+ "learning_rate": 2.0067358106107714e-07,
+ "loss": 0.85,
+ "step": 4875
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 1.0093062896803937,
+ "learning_rate": 1.994332499089846e-07,
+ "loss": 0.8766,
+ "step": 4876
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9205446344886734,
+ "learning_rate": 1.9819672518408662e-07,
+ "loss": 0.8626,
+ "step": 4877
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.8377835543230611,
+ "learning_rate": 1.9696400736663457e-07,
+ "loss": 0.8428,
+ "step": 4878
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9349688706042524,
+ "learning_rate": 1.9573509693540104e-07,
+ "loss": 0.8631,
+ "step": 4879
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.8660003918007885,
+ "learning_rate": 1.9450999436768093e-07,
+ "loss": 0.8902,
+ "step": 4880
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9229683300098562,
+ "learning_rate": 1.9328870013929134e-07,
+ "loss": 0.9103,
+ "step": 4881
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 1.0245997505612194,
+ "learning_rate": 1.9207121472456846e-07,
+ "loss": 0.9036,
+ "step": 4882
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.8394271533983679,
+ "learning_rate": 1.9085753859636736e-07,
+ "loss": 0.7996,
+ "step": 4883
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.8895116574215128,
+ "learning_rate": 1.8964767222606873e-07,
+ "loss": 0.8934,
+ "step": 4884
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.8271813502907709,
+ "learning_rate": 1.8844161608356782e-07,
+ "loss": 0.8819,
+ "step": 4885
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.960087942291403,
+ "learning_rate": 1.872393706372866e-07,
+ "loss": 0.8769,
+ "step": 4886
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9867545700235159,
+ "learning_rate": 1.8604093635416155e-07,
+ "loss": 0.9465,
+ "step": 4887
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9067425738013761,
+ "learning_rate": 1.8484631369964922e-07,
+ "loss": 0.875,
+ "step": 4888
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.6951368714852485,
+ "learning_rate": 1.8365550313772852e-07,
+ "loss": 0.7354,
+ "step": 4889
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.8670588577281381,
+ "learning_rate": 1.8246850513089832e-07,
+ "loss": 0.8523,
+ "step": 4890
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.8903424346494262,
+ "learning_rate": 1.8128532014017098e-07,
+ "loss": 0.8587,
+ "step": 4891
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.8822609628592021,
+ "learning_rate": 1.8010594862508669e-07,
+ "loss": 0.8117,
+ "step": 4892
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9202984522012919,
+ "learning_rate": 1.789303910436968e-07,
+ "loss": 0.8916,
+ "step": 4893
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 1.0830061317444566,
+ "learning_rate": 1.777586478525739e-07,
+ "loss": 0.9453,
+ "step": 4894
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9974045244723038,
+ "learning_rate": 1.7659071950681172e-07,
+ "loss": 0.8407,
+ "step": 4895
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9650522807388956,
+ "learning_rate": 1.754266064600174e-07,
+ "loss": 0.9346,
+ "step": 4896
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9672567598313921,
+ "learning_rate": 1.742663091643204e-07,
+ "loss": 0.8821,
+ "step": 4897
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9361755995042439,
+ "learning_rate": 1.7310982807036915e-07,
+ "loss": 0.8802,
+ "step": 4898
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.938968945334091,
+ "learning_rate": 1.719571636273243e-07,
+ "loss": 0.8815,
+ "step": 4899
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.8959667067165109,
+ "learning_rate": 1.7080831628286886e-07,
+ "loss": 0.8313,
+ "step": 4900
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9848751342582164,
+ "learning_rate": 1.6966328648320152e-07,
+ "loss": 0.8996,
+ "step": 4901
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9561656647857733,
+ "learning_rate": 1.685220746730387e-07,
+ "loss": 0.876,
+ "step": 4902
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9321518810751969,
+ "learning_rate": 1.673846812956137e-07,
+ "loss": 0.8366,
+ "step": 4903
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9301582279701739,
+ "learning_rate": 1.6625110679267642e-07,
+ "loss": 0.8306,
+ "step": 4904
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9054196529247108,
+ "learning_rate": 1.6512135160449583e-07,
+ "loss": 0.8958,
+ "step": 4905
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9301361200683044,
+ "learning_rate": 1.6399541616985648e-07,
+ "loss": 0.9442,
+ "step": 4906
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9265826979847209,
+ "learning_rate": 1.6287330092605525e-07,
+ "loss": 0.8635,
+ "step": 4907
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9315082930403457,
+ "learning_rate": 1.6175500630891128e-07,
+ "loss": 0.8604,
+ "step": 4908
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 1.0831395418253844,
+ "learning_rate": 1.6064053275275716e-07,
+ "loss": 0.8267,
+ "step": 4909
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.8879856142721188,
+ "learning_rate": 1.5952988069044105e-07,
+ "loss": 0.8619,
+ "step": 4910
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9450149735390804,
+ "learning_rate": 1.5842305055332796e-07,
+ "loss": 0.8875,
+ "step": 4911
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9907386770499438,
+ "learning_rate": 1.573200427712973e-07,
+ "loss": 0.8901,
+ "step": 4912
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9961837741898627,
+ "learning_rate": 1.562208577727442e-07,
+ "loss": 0.869,
+ "step": 4913
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.6908371104653627,
+ "learning_rate": 1.551254959845805e-07,
+ "loss": 0.781,
+ "step": 4914
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.848932592726013,
+ "learning_rate": 1.540339578322314e-07,
+ "loss": 0.8398,
+ "step": 4915
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.7969892986103903,
+ "learning_rate": 1.5294624373963894e-07,
+ "loss": 0.8683,
+ "step": 4916
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 1.00710363788212,
+ "learning_rate": 1.5186235412925744e-07,
+ "loss": 0.9465,
+ "step": 4917
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 1.0016862214468187,
+ "learning_rate": 1.5078228942205674e-07,
+ "loss": 0.935,
+ "step": 4918
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9400739828963365,
+ "learning_rate": 1.4970605003752359e-07,
+ "loss": 0.8526,
+ "step": 4919
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 1.0189479572016134,
+ "learning_rate": 1.4863363639365357e-07,
+ "loss": 0.9729,
+ "step": 4920
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.8631404228098428,
+ "learning_rate": 1.4756504890696466e-07,
+ "loss": 0.8679,
+ "step": 4921
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 1.001340863529343,
+ "learning_rate": 1.4650028799247928e-07,
+ "loss": 0.892,
+ "step": 4922
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 1.1220587828585806,
+ "learning_rate": 1.454393540637411e-07,
+ "loss": 0.8648,
+ "step": 4923
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 1.0043038891555147,
+ "learning_rate": 1.4438224753280384e-07,
+ "loss": 0.9364,
+ "step": 4924
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.840414922018483,
+ "learning_rate": 1.4332896881023462e-07,
+ "loss": 0.8567,
+ "step": 4925
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 1.075487095300389,
+ "learning_rate": 1.422795183051151e-07,
+ "loss": 0.9167,
+ "step": 4926
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.8325197328919645,
+ "learning_rate": 1.4123389642504148e-07,
+ "loss": 0.8893,
+ "step": 4927
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 1.0020654863520815,
+ "learning_rate": 1.401921035761189e-07,
+ "loss": 0.8738,
+ "step": 4928
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.8273520614516778,
+ "learning_rate": 1.3915414016296925e-07,
+ "loss": 0.8515,
+ "step": 4929
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 1.0258373369412856,
+ "learning_rate": 1.381200065887256e-07,
+ "loss": 0.9156,
+ "step": 4930
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 1.0114786282422006,
+ "learning_rate": 1.3708970325503222e-07,
+ "loss": 0.8575,
+ "step": 4931
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9620270323858393,
+ "learning_rate": 1.3606323056204795e-07,
+ "loss": 0.9302,
+ "step": 4932
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9112452792040394,
+ "learning_rate": 1.3504058890844274e-07,
+ "loss": 0.8474,
+ "step": 4933
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9110880955254214,
+ "learning_rate": 1.3402177869139887e-07,
+ "loss": 0.884,
+ "step": 4934
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9014024407300836,
+ "learning_rate": 1.3300680030661096e-07,
+ "loss": 0.8912,
+ "step": 4935
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9900134268512297,
+ "learning_rate": 1.3199565414828363e-07,
+ "loss": 0.8962,
+ "step": 4936
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.8127789986933913,
+ "learning_rate": 1.3098834060913612e-07,
+ "loss": 0.8135,
+ "step": 4937
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9552559772327756,
+ "learning_rate": 1.2998486008039545e-07,
+ "loss": 0.8783,
+ "step": 4938
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9107922053320379,
+ "learning_rate": 1.289852129518032e-07,
+ "loss": 0.7668,
+ "step": 4939
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.825516073534608,
+ "learning_rate": 1.2798939961161217e-07,
+ "loss": 0.9004,
+ "step": 4940
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9418341015108066,
+ "learning_rate": 1.269974204465818e-07,
+ "loss": 0.8216,
+ "step": 4941
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.8972634896305701,
+ "learning_rate": 1.2600927584198618e-07,
+ "loss": 0.9335,
+ "step": 4942
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9263412712239212,
+ "learning_rate": 1.2502496618161165e-07,
+ "loss": 0.9212,
+ "step": 4943
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9897179641923016,
+ "learning_rate": 1.2404449184774904e-07,
+ "loss": 0.9131,
+ "step": 4944
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9641757091346561,
+ "learning_rate": 1.2306785322120596e-07,
+ "loss": 0.8681,
+ "step": 4945
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9060032522502734,
+ "learning_rate": 1.220950506812968e-07,
+ "loss": 0.8912,
+ "step": 4946
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9215454089738369,
+ "learning_rate": 1.2112608460584707e-07,
+ "loss": 0.8756,
+ "step": 4947
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.7945254219454703,
+ "learning_rate": 1.2016095537119242e-07,
+ "loss": 0.8295,
+ "step": 4948
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 1.0414589663403506,
+ "learning_rate": 1.1919966335217636e-07,
+ "loss": 0.9457,
+ "step": 4949
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9595568292401536,
+ "learning_rate": 1.1824220892215465e-07,
+ "loss": 0.905,
+ "step": 4950
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.8461944356601063,
+ "learning_rate": 1.1728859245299207e-07,
+ "loss": 0.8652,
+ "step": 4951
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9317314160690501,
+ "learning_rate": 1.1633881431506122e-07,
+ "loss": 0.8645,
+ "step": 4952
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9120030119039478,
+ "learning_rate": 1.1539287487724594e-07,
+ "loss": 0.8319,
+ "step": 4953
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9843358541077079,
+ "learning_rate": 1.1445077450693786e-07,
+ "loss": 0.9459,
+ "step": 4954
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9261795371240237,
+ "learning_rate": 1.1351251357003656e-07,
+ "loss": 0.7929,
+ "step": 4955
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.8039379007987508,
+ "learning_rate": 1.1257809243095385e-07,
+ "loss": 0.8256,
+ "step": 4956
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9345029900856137,
+ "learning_rate": 1.1164751145260722e-07,
+ "loss": 0.8424,
+ "step": 4957
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9227138858105707,
+ "learning_rate": 1.107207709964242e-07,
+ "loss": 0.8466,
+ "step": 4958
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.8999767466755633,
+ "learning_rate": 1.0979787142233911e-07,
+ "loss": 0.8479,
+ "step": 4959
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.7525695707719479,
+ "learning_rate": 1.0887881308879633e-07,
+ "loss": 0.7916,
+ "step": 4960
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.8755468919626722,
+ "learning_rate": 1.0796359635274701e-07,
+ "loss": 0.789,
+ "step": 4961
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9434425034039697,
+ "learning_rate": 1.0705222156965011e-07,
+ "loss": 0.9036,
+ "step": 4962
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9959222395817438,
+ "learning_rate": 1.0614468909347476e-07,
+ "loss": 0.8797,
+ "step": 4963
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 1.0098054002291768,
+ "learning_rate": 1.0524099927669563e-07,
+ "loss": 0.9175,
+ "step": 4964
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.8143049652513321,
+ "learning_rate": 1.0434115247029419e-07,
+ "loss": 0.8647,
+ "step": 4965
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.8866996547768736,
+ "learning_rate": 1.0344514902376201e-07,
+ "loss": 0.941,
+ "step": 4966
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.8388311150203465,
+ "learning_rate": 1.0255298928509627e-07,
+ "loss": 0.826,
+ "step": 4967
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 1.0281085992344876,
+ "learning_rate": 1.0166467360079979e-07,
+ "loss": 0.8958,
+ "step": 4968
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.946969119102038,
+ "learning_rate": 1.007802023158877e-07,
+ "loss": 0.8803,
+ "step": 4969
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.9770804614697257,
+ "learning_rate": 9.989957577387521e-08,
+ "loss": 0.9306,
+ "step": 4970
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 1.1240751087233465,
+ "learning_rate": 9.902279431678874e-08,
+ "loss": 0.8514,
+ "step": 4971
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 1.219755530872357,
+ "learning_rate": 9.814985828516033e-08,
+ "loss": 0.9593,
+ "step": 4972
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.974785805979655,
+ "learning_rate": 9.728076801802656e-08,
+ "loss": 0.898,
+ "step": 4973
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.8319963374558925,
+ "learning_rate": 9.641552385293518e-08,
+ "loss": 0.7338,
+ "step": 4974
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.8475851587289722,
+ "learning_rate": 9.555412612593518e-08,
+ "loss": 0.8694,
+ "step": 4975
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.9221752350167541,
+ "learning_rate": 9.469657517158226e-08,
+ "loss": 0.9098,
+ "step": 4976
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.8780798629787528,
+ "learning_rate": 9.384287132294223e-08,
+ "loss": 0.8184,
+ "step": 4977
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.8253107845052225,
+ "learning_rate": 9.299301491158207e-08,
+ "loss": 0.8476,
+ "step": 4978
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.9055496502233873,
+ "learning_rate": 9.214700626757667e-08,
+ "loss": 0.8441,
+ "step": 4979
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.9286350519833819,
+ "learning_rate": 9.130484571950538e-08,
+ "loss": 0.8322,
+ "step": 4980
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.9130554928523456,
+ "learning_rate": 9.046653359445323e-08,
+ "loss": 0.8778,
+ "step": 4981
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 1.3248673251353242,
+ "learning_rate": 8.963207021801423e-08,
+ "loss": 0.8277,
+ "step": 4982
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.8582326641629127,
+ "learning_rate": 8.880145591428024e-08,
+ "loss": 0.8642,
+ "step": 4983
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.894970146284961,
+ "learning_rate": 8.797469100585432e-08,
+ "loss": 0.8512,
+ "step": 4984
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 1.0332434703597133,
+ "learning_rate": 8.715177581384182e-08,
+ "loss": 0.9301,
+ "step": 4985
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 1.0281611455914244,
+ "learning_rate": 8.633271065785486e-08,
+ "loss": 0.9253,
+ "step": 4986
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.8719749589448101,
+ "learning_rate": 8.551749585600678e-08,
+ "loss": 0.8787,
+ "step": 4987
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.9181086821632437,
+ "learning_rate": 8.470613172491981e-08,
+ "loss": 0.8588,
+ "step": 4988
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.9339848521162384,
+ "learning_rate": 8.389861857971748e-08,
+ "loss": 0.881,
+ "step": 4989
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.9173366391766944,
+ "learning_rate": 8.309495673402778e-08,
+ "loss": 0.8879,
+ "step": 4990
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 1.0399989389013817,
+ "learning_rate": 8.229514649998438e-08,
+ "loss": 0.8997,
+ "step": 4991
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 1.0624596162091668,
+ "learning_rate": 8.149918818822433e-08,
+ "loss": 0.9147,
+ "step": 4992
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.9970645138443958,
+ "learning_rate": 8.070708210788925e-08,
+ "loss": 0.8904,
+ "step": 4993
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.986835335630644,
+ "learning_rate": 7.991882856662303e-08,
+ "loss": 0.9101,
+ "step": 4994
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 1.013629135156919,
+ "learning_rate": 7.913442787057523e-08,
+ "loss": 0.8899,
+ "step": 4995
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.8835466904792467,
+ "learning_rate": 7.835388032439661e-08,
+ "loss": 0.8108,
+ "step": 4996
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.923970293443084,
+ "learning_rate": 7.757718623124466e-08,
+ "loss": 0.9156,
+ "step": 4997
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.959878509529537,
+ "learning_rate": 7.680434589277696e-08,
+ "loss": 0.8851,
+ "step": 4998
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.9017420597003865,
+ "learning_rate": 7.603535960915675e-08,
+ "loss": 0.871,
+ "step": 4999
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.9433129940418841,
+ "learning_rate": 7.527022767904957e-08,
+ "loss": 0.857,
+ "step": 5000
+ }
+ ],
+ "logging_steps": 1.0,
+ "max_steps": 5197,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 1,
+ "save_steps": 250,
+ "total_flos": 1.4802717840880173e+19,
+ "train_batch_size": 4,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/training_args.bin b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..b5a2e607501d89c4e47557ab2c7e396908001200
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:188d084c2f1ada7677e2a10c9f767124701100542ce553b962997683d3747356
+size 6011
diff --git a/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/zero_to_fp32.py b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/zero_to_fp32.py
new file mode 100755
index 0000000000000000000000000000000000000000..c98caae31534368be22b67fc4ae906836c992a8d
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/checkpoint-5000/zero_to_fp32.py
@@ -0,0 +1,587 @@
+#!/usr/bin/env python
+
+# Copyright (c) Microsoft Corporation.
+# SPDX-License-Identifier: Apache-2.0
+
+# DeepSpeed Team
+
+# This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets
+# copied into the top level checkpoint dir, so the user can easily do the conversion at any point in
+# the future. Once extracted, the weights don't require DeepSpeed and can be used in any
+# application.
+#
+# example: python zero_to_fp32.py . pytorch_model.bin
+
+import argparse
+import torch
+import glob
+import math
+import os
+import re
+from collections import OrderedDict
+from dataclasses import dataclass
+
+# while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with
+# DeepSpeed data structures it has to be available in the current python environment.
+from deepspeed.utils import logger
+from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS,
+ FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES,
+ FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS)
+
+
+@dataclass
+class zero_model_state:
+ buffers: dict()
+ param_shapes: dict()
+ shared_params: list
+ ds_version: int
+ frozen_param_shapes: dict()
+ frozen_param_fragments: dict()
+
+
+debug = 0
+
+# load to cpu
+device = torch.device('cpu')
+
+
+def atoi(text):
+ return int(text) if text.isdigit() else text
+
+
+def natural_keys(text):
+ '''
+ alist.sort(key=natural_keys) sorts in human order
+ http://nedbatchelder.com/blog/200712/human_sorting.html
+ (See Toothy's implementation in the comments)
+ '''
+ return [atoi(c) for c in re.split(r'(\d+)', text)]
+
+
+def get_model_state_file(checkpoint_dir, zero_stage):
+ if not os.path.isdir(checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist")
+
+ # there should be only one file
+ if zero_stage <= 2:
+ file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt")
+ elif zero_stage == 3:
+ file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt")
+
+ if not os.path.exists(file):
+ raise FileNotFoundError(f"can't find model states file at '{file}'")
+
+ return file
+
+
+def get_checkpoint_files(checkpoint_dir, glob_pattern):
+ # XXX: need to test that this simple glob rule works for multi-node setup too
+ ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys)
+
+ if len(ckpt_files) == 0:
+ raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'")
+
+ return ckpt_files
+
+
+def get_optim_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt")
+
+
+def get_model_state_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_model_states.pt")
+
+
+def parse_model_states(files):
+ zero_model_states = []
+ for file in files:
+ state_dict = torch.load(file, map_location=device)
+
+ if BUFFER_NAMES not in state_dict:
+ raise ValueError(f"{file} is not a model state checkpoint")
+ buffer_names = state_dict[BUFFER_NAMES]
+ if debug:
+ print("Found buffers:", buffer_names)
+
+ # recover just the buffers while restoring them to fp32 if they were saved in fp16
+ buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names}
+ param_shapes = state_dict[PARAM_SHAPES]
+
+ # collect parameters that are included in param_shapes
+ param_names = []
+ for s in param_shapes:
+ for name in s.keys():
+ param_names.append(name)
+
+ # update with frozen parameters
+ frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None)
+ if frozen_param_shapes is not None:
+ if debug:
+ print(f"Found frozen_param_shapes: {frozen_param_shapes}")
+ param_names += list(frozen_param_shapes.keys())
+
+ # handle shared params
+ shared_params = [[k, v] for k, v in state_dict["shared_params"].items()]
+
+ ds_version = state_dict.get(DS_VERSION, None)
+
+ frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None)
+
+ z_model_state = zero_model_state(buffers=buffers,
+ param_shapes=param_shapes,
+ shared_params=shared_params,
+ ds_version=ds_version,
+ frozen_param_shapes=frozen_param_shapes,
+ frozen_param_fragments=frozen_param_fragments)
+ zero_model_states.append(z_model_state)
+
+ return zero_model_states
+
+
+def parse_optim_states(files, ds_checkpoint_dir):
+
+ total_files = len(files)
+ state_dicts = []
+ for f in files:
+ state_dict = torch.load(f, map_location=device)
+ # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights
+ # and also handle the case where it was already removed by another helper script
+ state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None)
+ state_dicts.append(state_dict)
+
+ if not ZERO_STAGE in state_dicts[0][OPTIMIZER_STATE_DICT]:
+ raise ValueError(f"{files[0]} is not a zero checkpoint")
+ zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE]
+ world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT]
+
+ # For ZeRO-2 each param group can have different partition_count as data parallelism for expert
+ # parameters can be different from data parallelism for non-expert parameters. So we can just
+ # use the max of the partition_count to get the dp world_size.
+
+ if type(world_size) is list:
+ world_size = max(world_size)
+
+ if world_size != total_files:
+ raise ValueError(
+ f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. "
+ "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes."
+ )
+
+ # the groups are named differently in each stage
+ if zero_stage <= 2:
+ fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS
+ elif zero_stage == 3:
+ fp32_groups_key = FP32_FLAT_GROUPS
+ else:
+ raise ValueError(f"unknown zero stage {zero_stage}")
+
+ if zero_stage <= 2:
+ fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))]
+ elif zero_stage == 3:
+ # if there is more than one param group, there will be multiple flattened tensors - one
+ # flattened tensor per group - for simplicity merge them into a single tensor
+ #
+ # XXX: could make the script more memory efficient for when there are multiple groups - it
+ # will require matching the sub-lists of param_shapes for each param group flattened tensor
+
+ fp32_flat_groups = [
+ torch.cat(state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key], 0) for i in range(len(state_dicts))
+ ]
+
+ return zero_stage, world_size, fp32_flat_groups
+
+
+def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir):
+ """
+ Returns fp32 state_dict reconstructed from ds checkpoint
+
+ Args:
+ - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are)
+
+ """
+ print(f"Processing zero checkpoint '{ds_checkpoint_dir}'")
+
+ optim_files = get_optim_files(ds_checkpoint_dir)
+ zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir)
+ print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}")
+
+ model_files = get_model_state_files(ds_checkpoint_dir)
+
+ zero_model_states = parse_model_states(model_files)
+ print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}')
+
+ if zero_stage <= 2:
+ return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states)
+ elif zero_stage == 3:
+ return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states)
+
+
+def _zero2_merge_frozen_params(state_dict, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ frozen_param_fragments = zero_model_states[0].frozen_param_fragments
+
+ if debug:
+ num_elem = sum(s.numel() for s in frozen_param_shapes.values())
+ print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in frozen_param_fragments.values()])
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ state_dict[name] = frozen_param_fragments[name]
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+
+ # Reconstruction protocol:
+ #
+ # XXX: document this
+
+ if debug:
+ for i in range(world_size):
+ for j in range(len(fp32_flat_groups[0])):
+ print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}")
+
+ # XXX: memory usage doubles here (zero2)
+ num_param_groups = len(fp32_flat_groups[0])
+ merged_single_partition_of_fp32_groups = []
+ for i in range(num_param_groups):
+ merged_partitions = [sd[i] for sd in fp32_flat_groups]
+ full_single_fp32_vector = torch.cat(merged_partitions, 0)
+ merged_single_partition_of_fp32_groups.append(full_single_fp32_vector)
+ avail_numel = sum(
+ [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups])
+
+ if debug:
+ wanted_params = sum([len(shapes) for shapes in param_shapes])
+ wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes])
+ # not asserting if there is a mismatch due to possible padding
+ print(f"Have {avail_numel} numels to process.")
+ print(f"Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ total_numel = 0
+ total_params = 0
+ for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups):
+ offset = 0
+ avail_numel = full_single_fp32_vector.numel()
+ for name, shape in shapes.items():
+
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+ state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape)
+ offset += unpartitioned_numel
+
+ # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and
+ # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex
+ # paddings performed in the code it's almost impossible to predict the exact numbers w/o the
+ # live optimizer object, so we are checking that the numbers are within the right range
+ align_to = 2 * world_size
+
+ def zero2_align(x):
+ return align_to * math.ceil(x / align_to)
+
+ if debug:
+ print(f"original offset={offset}, avail_numel={avail_numel}")
+
+ offset = zero2_align(offset)
+ avail_numel = zero2_align(avail_numel)
+
+ if debug:
+ print(f"aligned offset={offset}, avail_numel={avail_numel}")
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ _zero2_merge_frozen_params(state_dict, zero_model_states)
+
+ _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def zero3_partitioned_param_info(unpartitioned_numel, world_size):
+ remainder = unpartitioned_numel % world_size
+ padding_numel = (world_size - remainder) if remainder else 0
+ partitioned_numel = math.ceil(unpartitioned_numel / world_size)
+ return partitioned_numel, padding_numel
+
+
+def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ if debug:
+ for i in range(world_size):
+ num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values())
+ print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in zero_model_states[0].frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states)
+ state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape)
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each
+ # param, re-consolidating each param, while dealing with padding if any
+
+ # merge list of dicts, preserving order
+ param_shapes = {k: v for d in param_shapes for k, v in d.items()}
+
+ if debug:
+ for i in range(world_size):
+ print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}")
+
+ wanted_params = len(param_shapes)
+ wanted_numel = sum(shape.numel() for shape in param_shapes.values())
+ # not asserting if there is a mismatch due to possible padding
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ print(f"Trainable params: Have {avail_numel} numels to process.")
+ print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ offset = 0
+ total_numel = 0
+ total_params = 0
+ for name, shape in param_shapes.items():
+
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ # XXX: memory usage doubles here
+ state_dict[name] = torch.cat(
+ tuple(fp32_flat_groups[i].narrow(0, offset, partitioned_numel) for i in range(world_size)),
+ 0).narrow(0, 0, unpartitioned_numel).view(shape)
+ offset += partitioned_numel
+
+ offset *= world_size
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ _zero3_merge_frozen_params(state_dict, world_size, zero_model_states)
+
+ _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag=None):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with
+ ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example
+ via a model hub.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14``
+
+ Returns:
+ - pytorch ``state_dict``
+
+ Note: this approach may not work if your application doesn't have sufficient free CPU memory and
+ you may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with
+ the checkpoint.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
+ # do the training and checkpoint saving
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu
+ model = model.cpu() # move to cpu
+ model.load_state_dict(state_dict)
+ # submit to model hub or save the model to share with others
+
+ In this example the ``model`` will no longer be usable in the deepspeed context of the same
+ application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead.
+
+ """
+ if tag is None:
+ latest_path = os.path.join(checkpoint_dir, 'latest')
+ if os.path.isfile(latest_path):
+ with open(latest_path, 'r') as fd:
+ tag = fd.read().strip()
+ else:
+ raise ValueError(f"Unable to find 'latest' file at {latest_path}")
+
+ ds_checkpoint_dir = os.path.join(checkpoint_dir, tag)
+
+ if not os.path.isdir(ds_checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist")
+
+ return _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir)
+
+
+def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir, output_file, tag=None):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be
+ loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``output_file``: path to the pytorch fp32 state_dict output file (e.g. path/pytorch_model.bin)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+ """
+
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
+ print(f"Saving fp32 state dict to {output_file}")
+ torch.save(state_dict, output_file)
+
+
+def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None):
+ """
+ 1. Put the provided model to cpu
+ 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict``
+ 3. Load it into the provided model
+
+ Args:
+ - ``model``: the model object to update
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+
+ Returns:
+ - ``model`: modified model
+
+ Make sure you have plenty of CPU memory available before you call this function. If you don't
+ have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it
+ conveniently placed for you in the checkpoint folder.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint
+ model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir)
+ # submit to model hub or save the model to share with others
+
+ Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context
+ of the same application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ """
+ logger.info(f"Extracting fp32 weights")
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
+
+ logger.info(f"Overwriting model with fp32 weights")
+ model = model.cpu()
+ model.load_state_dict(state_dict, strict=False)
+
+ return model
+
+
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("checkpoint_dir",
+ type=str,
+ help="path to the desired checkpoint folder, e.g., path/checkpoint-12")
+ parser.add_argument(
+ "output_file",
+ type=str,
+ help="path to the pytorch fp32 state_dict output file (e.g. path/checkpoint-12/pytorch_model.bin)")
+ parser.add_argument("-t",
+ "--tag",
+ type=str,
+ default=None,
+ help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1")
+ parser.add_argument("-d", "--debug", action='store_true', help="enable debug")
+ args = parser.parse_args()
+
+ debug = args.debug
+
+ convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir, args.output_file, tag=args.tag)
diff --git a/06-10-24_sd2.1_llama7b_ft/config.json b/06-10-24_sd2.1_llama7b_ft/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..2bc3f3859b48463854f340d0ee293cae78c389e4
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/config.json
@@ -0,0 +1,48 @@
+{
+ "_flash_attn_2_enabled": true,
+ "_name_or_path": "/mnt/bn/bohanzhainas1/Public_Models/llama-2_7B_hf",
+ "architectures": [
+ "LlavaLlamaForCausalLM"
+ ],
+ "attention_bias": false,
+ "attention_dropout": 0.0,
+ "bos_token_id": 1,
+ "ensemble_size": 1,
+ "eos_token_id": 2,
+ "freeze_mm_mlp_adapter": false,
+ "hidden_act": "silu",
+ "hidden_size": 4096,
+ "image_aspect_ratio": "square",
+ "image_grid_pinpoints": null,
+ "img_size": 768,
+ "initializer_range": 0.02,
+ "intermediate_size": 11008,
+ "max_position_embeddings": 2048,
+ "mm_hidden_size": 1280,
+ "mm_projector_type": "mlp2x_gelu",
+ "mm_use_im_patch_token": false,
+ "mm_use_im_start_end": false,
+ "mm_vision_select_feature": "patch",
+ "mm_vision_select_layer": -2,
+ "mm_vision_tower": "stabilityai/stable-diffusion-2-1",
+ "model_type": "llava_llama",
+ "num_attention_heads": 32,
+ "num_hidden_layers": 32,
+ "num_key_value_heads": 32,
+ "pad_token_id": 0,
+ "pretraining_tp": 1,
+ "prompt": "",
+ "rms_norm_eps": 1e-06,
+ "rope_scaling": null,
+ "rope_theta": 10000.0,
+ "t": 1,
+ "tie_word_embeddings": false,
+ "torch_dtype": "bfloat16",
+ "transformers_version": "4.38.2",
+ "tune_mm_mlp_adapter": false,
+ "up_ft_index": 0,
+ "use_cache": true,
+ "use_mm_proj": true,
+ "vision_tower": "stabilityai/stable-diffusion-2-1",
+ "vocab_size": 32000
+}
diff --git a/06-10-24_sd2.1_llama7b_ft/generation_config.json b/06-10-24_sd2.1_llama7b_ft/generation_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..2602302d2b751745299b1aa70969f28531d23ccd
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/generation_config.json
@@ -0,0 +1,9 @@
+{
+ "_from_model_config": true,
+ "bos_token_id": 1,
+ "eos_token_id": 2,
+ "pad_token_id": 0,
+ "temperature": null,
+ "top_p": null,
+ "transformers_version": "4.38.2"
+}
diff --git a/06-10-24_sd2.1_llama7b_ft/model-00001-of-00003.safetensors b/06-10-24_sd2.1_llama7b_ft/model-00001-of-00003.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..0065cd04b58767c67ac1e28f54443cd8571b43ee
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/model-00001-of-00003.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:efda49ebd0a294472121dfcc6b851eb8da6597b082b1930bf4cf654d644a728b
+size 4938985352
diff --git a/06-10-24_sd2.1_llama7b_ft/model-00002-of-00003.safetensors b/06-10-24_sd2.1_llama7b_ft/model-00002-of-00003.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..c5f5c06bfaf5fdaa6d479f11b98f0cc19063e58c
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/model-00002-of-00003.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:718bcd7f0c5d677ca17f160453acdd7cf7848227df00319a6f0aac03a8d4ad71
+size 4947390880
diff --git a/06-10-24_sd2.1_llama7b_ft/model-00003-of-00003.safetensors b/06-10-24_sd2.1_llama7b_ft/model-00003-of-00003.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..4bbc0a6a04d84c867c06900bac4dd419e0cabec3
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/model-00003-of-00003.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:667b1c298069cb4a13ef50ff5911025d4936d527ef882249cc18d876281c5bef
+size 3634545800
diff --git a/06-10-24_sd2.1_llama7b_ft/model.safetensors.index.json b/06-10-24_sd2.1_llama7b_ft/model.safetensors.index.json
new file mode 100644
index 0000000000000000000000000000000000000000..452be371a188daae5c518442b1c621e414774067
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/model.safetensors.index.json
@@ -0,0 +1,302 @@
+{
+ "metadata": {
+ "total_size": 13520887808
+ },
+ "weight_map": {
+ "lm_head.weight": "model-00003-of-00003.safetensors",
+ "model.embed_tokens.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.20.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.23.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.23.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.24.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.30.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.mm_projector.0.bias": "model-00003-of-00003.safetensors",
+ "model.mm_projector.0.weight": "model-00003-of-00003.safetensors",
+ "model.mm_projector.2.bias": "model-00003-of-00003.safetensors",
+ "model.mm_projector.2.weight": "model-00003-of-00003.safetensors",
+ "model.norm.weight": "model-00003-of-00003.safetensors"
+ }
+}
diff --git a/06-10-24_sd2.1_llama7b_ft/special_tokens_map.json b/06-10-24_sd2.1_llama7b_ft/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..f928b2409a393d47ce0d9fe519f17e048a471eca
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/special_tokens_map.json
@@ -0,0 +1,24 @@
+{
+ "bos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": "",
+ "unk_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/06-10-24_sd2.1_llama7b_ft/tokenizer.model b/06-10-24_sd2.1_llama7b_ft/tokenizer.model
new file mode 100644
index 0000000000000000000000000000000000000000..6c00c742ce03c627d6cd5b795984876fa49fa899
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/tokenizer.model
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
+size 499723
diff --git a/06-10-24_sd2.1_llama7b_ft/tokenizer_config.json b/06-10-24_sd2.1_llama7b_ft/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..00456631ca49c4adbd95ae9609e79c6444d97706
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/tokenizer_config.json
@@ -0,0 +1,43 @@
+{
+ "add_bos_token": true,
+ "add_eos_token": false,
+ "add_prefix_space": true,
+ "added_tokens_decoder": {
+ "0": {
+ "content": "",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "1": {
+ "content": "",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "2": {
+ "content": "",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "bos_token": "",
+ "clean_up_tokenization_spaces": false,
+ "eos_token": "",
+ "legacy": true,
+ "model_max_length": 2048,
+ "pad_token": "",
+ "padding_side": "right",
+ "sp_model_kwargs": {},
+ "spaces_between_special_tokens": false,
+ "tokenizer_class": "LlamaTokenizer",
+ "unk_token": "",
+ "use_default_system_prompt": false
+}
diff --git a/06-10-24_sd2.1_llama7b_ft/trainer_state.json b/06-10-24_sd2.1_llama7b_ft/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..c70e6cfcc43b541671b046e13c7a389479054f9c
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/trainer_state.json
@@ -0,0 +1,36409 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 0.9998557067962099,
+ "eval_steps": 500,
+ "global_step": 5197,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.0,
+ "grad_norm": 17.86347389075609,
+ "learning_rate": 1.282051282051282e-07,
+ "loss": 2.0553,
+ "step": 1
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 16.340788417924784,
+ "learning_rate": 2.564102564102564e-07,
+ "loss": 1.9244,
+ "step": 2
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 13.31230791159293,
+ "learning_rate": 3.846153846153847e-07,
+ "loss": 1.7098,
+ "step": 3
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 14.40727225294918,
+ "learning_rate": 5.128205128205128e-07,
+ "loss": 1.9669,
+ "step": 4
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 16.041732001068368,
+ "learning_rate": 6.41025641025641e-07,
+ "loss": 1.9909,
+ "step": 5
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 15.068210215890273,
+ "learning_rate": 7.692307692307694e-07,
+ "loss": 1.8324,
+ "step": 6
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 13.837817705671661,
+ "learning_rate": 8.974358974358975e-07,
+ "loss": 1.8703,
+ "step": 7
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 15.314386356705906,
+ "learning_rate": 1.0256410256410257e-06,
+ "loss": 1.9187,
+ "step": 8
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 18.955730564183586,
+ "learning_rate": 1.153846153846154e-06,
+ "loss": 2.0023,
+ "step": 9
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 17.866320156567383,
+ "learning_rate": 1.282051282051282e-06,
+ "loss": 1.891,
+ "step": 10
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 14.965328550240324,
+ "learning_rate": 1.4102564102564104e-06,
+ "loss": 1.8438,
+ "step": 11
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 11.89546330250022,
+ "learning_rate": 1.5384615384615387e-06,
+ "loss": 1.6365,
+ "step": 12
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 8.384692664709801,
+ "learning_rate": 1.6666666666666667e-06,
+ "loss": 1.5361,
+ "step": 13
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 12.619120531844034,
+ "learning_rate": 1.794871794871795e-06,
+ "loss": 1.6459,
+ "step": 14
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 9.194183144132957,
+ "learning_rate": 1.9230769230769234e-06,
+ "loss": 1.529,
+ "step": 15
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 10.036346362006853,
+ "learning_rate": 2.0512820512820513e-06,
+ "loss": 1.6111,
+ "step": 16
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 5.028586322255148,
+ "learning_rate": 2.1794871794871797e-06,
+ "loss": 1.3546,
+ "step": 17
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 2.4894329606548493,
+ "learning_rate": 2.307692307692308e-06,
+ "loss": 1.286,
+ "step": 18
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 3.003990110161847,
+ "learning_rate": 2.435897435897436e-06,
+ "loss": 1.3436,
+ "step": 19
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 2.624304641006234,
+ "learning_rate": 2.564102564102564e-06,
+ "loss": 1.294,
+ "step": 20
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 2.843933611748089,
+ "learning_rate": 2.6923076923076923e-06,
+ "loss": 1.3652,
+ "step": 21
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 2.5282127575561013,
+ "learning_rate": 2.8205128205128207e-06,
+ "loss": 1.258,
+ "step": 22
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 2.572954278290059,
+ "learning_rate": 2.948717948717949e-06,
+ "loss": 1.3478,
+ "step": 23
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 2.3242785478160233,
+ "learning_rate": 3.0769230769230774e-06,
+ "loss": 1.3467,
+ "step": 24
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 1.9444593814766211,
+ "learning_rate": 3.205128205128206e-06,
+ "loss": 1.2672,
+ "step": 25
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 2.2766386252395425,
+ "learning_rate": 3.3333333333333333e-06,
+ "loss": 1.3004,
+ "step": 26
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.9052625124963702,
+ "learning_rate": 3.4615384615384617e-06,
+ "loss": 1.3198,
+ "step": 27
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.883473749107834,
+ "learning_rate": 3.58974358974359e-06,
+ "loss": 1.3105,
+ "step": 28
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.8307638440888208,
+ "learning_rate": 3.7179487179487184e-06,
+ "loss": 1.2597,
+ "step": 29
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.6968574944097243,
+ "learning_rate": 3.846153846153847e-06,
+ "loss": 1.1814,
+ "step": 30
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 2.072869058382591,
+ "learning_rate": 3.974358974358974e-06,
+ "loss": 1.2785,
+ "step": 31
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.4576760922025103,
+ "learning_rate": 4.102564102564103e-06,
+ "loss": 1.2502,
+ "step": 32
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.4994099267875476,
+ "learning_rate": 4.230769230769231e-06,
+ "loss": 1.18,
+ "step": 33
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.460264902782038,
+ "learning_rate": 4.358974358974359e-06,
+ "loss": 1.1521,
+ "step": 34
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.5865701097261202,
+ "learning_rate": 4.487179487179488e-06,
+ "loss": 1.2394,
+ "step": 35
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.4428017911065363,
+ "learning_rate": 4.615384615384616e-06,
+ "loss": 1.1372,
+ "step": 36
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.2998737521778192,
+ "learning_rate": 4.743589743589744e-06,
+ "loss": 1.0461,
+ "step": 37
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.2763215591692354,
+ "learning_rate": 4.871794871794872e-06,
+ "loss": 1.155,
+ "step": 38
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.4121606917705762,
+ "learning_rate": 5e-06,
+ "loss": 1.146,
+ "step": 39
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.2631041889278862,
+ "learning_rate": 5.128205128205128e-06,
+ "loss": 1.1765,
+ "step": 40
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.3670803879604925,
+ "learning_rate": 5.256410256410257e-06,
+ "loss": 1.1339,
+ "step": 41
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.2906797090346698,
+ "learning_rate": 5.384615384615385e-06,
+ "loss": 1.1015,
+ "step": 42
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.5085866452824694,
+ "learning_rate": 5.512820512820514e-06,
+ "loss": 1.1426,
+ "step": 43
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.3071318057444536,
+ "learning_rate": 5.641025641025641e-06,
+ "loss": 1.092,
+ "step": 44
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.1016661057549553,
+ "learning_rate": 5.769230769230769e-06,
+ "loss": 0.9978,
+ "step": 45
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.2111162907372104,
+ "learning_rate": 5.897435897435898e-06,
+ "loss": 1.1211,
+ "step": 46
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.1274844124655137,
+ "learning_rate": 6.025641025641026e-06,
+ "loss": 1.1313,
+ "step": 47
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.2668397693417877,
+ "learning_rate": 6.153846153846155e-06,
+ "loss": 1.1452,
+ "step": 48
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.176403429558345,
+ "learning_rate": 6.282051282051282e-06,
+ "loss": 1.0395,
+ "step": 49
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.1951095967488896,
+ "learning_rate": 6.410256410256412e-06,
+ "loss": 1.0549,
+ "step": 50
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.0698501123701163,
+ "learning_rate": 6.538461538461539e-06,
+ "loss": 1.0768,
+ "step": 51
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.3217022898812898,
+ "learning_rate": 6.666666666666667e-06,
+ "loss": 1.1191,
+ "step": 52
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.06152889720562,
+ "learning_rate": 6.794871794871796e-06,
+ "loss": 1.1467,
+ "step": 53
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.2207687703601493,
+ "learning_rate": 6.923076923076923e-06,
+ "loss": 1.102,
+ "step": 54
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.2701407726308969,
+ "learning_rate": 7.051282051282053e-06,
+ "loss": 1.0554,
+ "step": 55
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.3329090385731375,
+ "learning_rate": 7.17948717948718e-06,
+ "loss": 1.1877,
+ "step": 56
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.356023272392615,
+ "learning_rate": 7.307692307692308e-06,
+ "loss": 1.1086,
+ "step": 57
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.1849392700006287,
+ "learning_rate": 7.435897435897437e-06,
+ "loss": 1.1194,
+ "step": 58
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.0253161321556497,
+ "learning_rate": 7.564102564102564e-06,
+ "loss": 1.017,
+ "step": 59
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.0385674726144254,
+ "learning_rate": 7.692307692307694e-06,
+ "loss": 1.0556,
+ "step": 60
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.1969264274945228,
+ "learning_rate": 7.820512820512822e-06,
+ "loss": 1.0569,
+ "step": 61
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.0645872005402048,
+ "learning_rate": 7.948717948717949e-06,
+ "loss": 1.0202,
+ "step": 62
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.2367676967422145,
+ "learning_rate": 8.076923076923077e-06,
+ "loss": 1.1035,
+ "step": 63
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.0867326076016188,
+ "learning_rate": 8.205128205128205e-06,
+ "loss": 1.0911,
+ "step": 64
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.385231704991178,
+ "learning_rate": 8.333333333333334e-06,
+ "loss": 1.105,
+ "step": 65
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.1983463619808143,
+ "learning_rate": 8.461538461538462e-06,
+ "loss": 0.9641,
+ "step": 66
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.4198421383078568,
+ "learning_rate": 8.58974358974359e-06,
+ "loss": 1.1414,
+ "step": 67
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.1099650533110834,
+ "learning_rate": 8.717948717948719e-06,
+ "loss": 1.0109,
+ "step": 68
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.2290166817062858,
+ "learning_rate": 8.846153846153847e-06,
+ "loss": 1.0907,
+ "step": 69
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.267048521231698,
+ "learning_rate": 8.974358974358976e-06,
+ "loss": 1.1442,
+ "step": 70
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.1019029454770533,
+ "learning_rate": 9.102564102564104e-06,
+ "loss": 1.0056,
+ "step": 71
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.2807328166020238,
+ "learning_rate": 9.230769230769232e-06,
+ "loss": 1.1258,
+ "step": 72
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.1551688945867158,
+ "learning_rate": 9.358974358974359e-06,
+ "loss": 1.0295,
+ "step": 73
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.0080125770907116,
+ "learning_rate": 9.487179487179487e-06,
+ "loss": 1.0032,
+ "step": 74
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.402806385707426,
+ "learning_rate": 9.615384615384616e-06,
+ "loss": 1.0427,
+ "step": 75
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.2215549239861518,
+ "learning_rate": 9.743589743589744e-06,
+ "loss": 1.0579,
+ "step": 76
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.322056156982379,
+ "learning_rate": 9.871794871794872e-06,
+ "loss": 1.0335,
+ "step": 77
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.1446838795879737,
+ "learning_rate": 1e-05,
+ "loss": 1.0138,
+ "step": 78
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2807246001751553,
+ "learning_rate": 1.012820512820513e-05,
+ "loss": 1.0419,
+ "step": 79
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.1976418300838048,
+ "learning_rate": 1.0256410256410256e-05,
+ "loss": 1.1068,
+ "step": 80
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.0292812084728495,
+ "learning_rate": 1.0384615384615386e-05,
+ "loss": 1.0136,
+ "step": 81
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.9653551844674515,
+ "learning_rate": 1.0512820512820514e-05,
+ "loss": 0.9696,
+ "step": 82
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2044521578979546,
+ "learning_rate": 1.0641025641025643e-05,
+ "loss": 1.061,
+ "step": 83
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.0506107786315697,
+ "learning_rate": 1.076923076923077e-05,
+ "loss": 1.0468,
+ "step": 84
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.4498246660106078,
+ "learning_rate": 1.0897435897435898e-05,
+ "loss": 1.0875,
+ "step": 85
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.1845347304004339,
+ "learning_rate": 1.1025641025641028e-05,
+ "loss": 1.0663,
+ "step": 86
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.161786603180624,
+ "learning_rate": 1.1153846153846154e-05,
+ "loss": 1.0531,
+ "step": 87
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.220869922686358,
+ "learning_rate": 1.1282051282051283e-05,
+ "loss": 1.0559,
+ "step": 88
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2404652106613836,
+ "learning_rate": 1.1410256410256411e-05,
+ "loss": 1.0445,
+ "step": 89
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.4743753635963737,
+ "learning_rate": 1.1538461538461538e-05,
+ "loss": 1.0602,
+ "step": 90
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.262408610467295,
+ "learning_rate": 1.1666666666666668e-05,
+ "loss": 1.0615,
+ "step": 91
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.9808569501537702,
+ "learning_rate": 1.1794871794871796e-05,
+ "loss": 0.9462,
+ "step": 92
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.3013446449801787,
+ "learning_rate": 1.1923076923076925e-05,
+ "loss": 1.1231,
+ "step": 93
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.1886643407923234,
+ "learning_rate": 1.2051282051282051e-05,
+ "loss": 1.0244,
+ "step": 94
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.1424668824418163,
+ "learning_rate": 1.217948717948718e-05,
+ "loss": 1.0228,
+ "step": 95
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2348883959266557,
+ "learning_rate": 1.230769230769231e-05,
+ "loss": 1.0589,
+ "step": 96
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.400120543528127,
+ "learning_rate": 1.2435897435897436e-05,
+ "loss": 1.0111,
+ "step": 97
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2712817733832307,
+ "learning_rate": 1.2564102564102565e-05,
+ "loss": 1.0289,
+ "step": 98
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.4134889151969168,
+ "learning_rate": 1.2692307692307693e-05,
+ "loss": 1.0632,
+ "step": 99
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2717376671306364,
+ "learning_rate": 1.2820512820512823e-05,
+ "loss": 1.0823,
+ "step": 100
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2931804137988707,
+ "learning_rate": 1.294871794871795e-05,
+ "loss": 1.0387,
+ "step": 101
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2393826879437473,
+ "learning_rate": 1.3076923076923078e-05,
+ "loss": 1.0144,
+ "step": 102
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.1747673033476334,
+ "learning_rate": 1.3205128205128207e-05,
+ "loss": 1.034,
+ "step": 103
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.1483685440735119,
+ "learning_rate": 1.3333333333333333e-05,
+ "loss": 1.0326,
+ "step": 104
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2519004715114483,
+ "learning_rate": 1.3461538461538463e-05,
+ "loss": 1.0626,
+ "step": 105
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.4413918805246082,
+ "learning_rate": 1.3589743589743592e-05,
+ "loss": 1.0787,
+ "step": 106
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2509070242345923,
+ "learning_rate": 1.3717948717948718e-05,
+ "loss": 1.0336,
+ "step": 107
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.1949113000331355,
+ "learning_rate": 1.3846153846153847e-05,
+ "loss": 1.048,
+ "step": 108
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2924213576049175,
+ "learning_rate": 1.3974358974358975e-05,
+ "loss": 1.0764,
+ "step": 109
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.1957696479928377,
+ "learning_rate": 1.4102564102564105e-05,
+ "loss": 1.1442,
+ "step": 110
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.1727412691673529,
+ "learning_rate": 1.4230769230769232e-05,
+ "loss": 1.0056,
+ "step": 111
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.0751077398034676,
+ "learning_rate": 1.435897435897436e-05,
+ "loss": 0.9844,
+ "step": 112
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.9960682080303919,
+ "learning_rate": 1.4487179487179489e-05,
+ "loss": 0.9427,
+ "step": 113
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.3195235989294032,
+ "learning_rate": 1.4615384615384615e-05,
+ "loss": 1.039,
+ "step": 114
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2346213976081493,
+ "learning_rate": 1.4743589743589745e-05,
+ "loss": 1.0804,
+ "step": 115
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2879996809518923,
+ "learning_rate": 1.4871794871794874e-05,
+ "loss": 1.0023,
+ "step": 116
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.9712618840126015,
+ "learning_rate": 1.5000000000000002e-05,
+ "loss": 0.9809,
+ "step": 117
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2174492391453324,
+ "learning_rate": 1.5128205128205129e-05,
+ "loss": 1.1002,
+ "step": 118
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.5518360442312447,
+ "learning_rate": 1.5256410256410257e-05,
+ "loss": 1.1022,
+ "step": 119
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.286191029125357,
+ "learning_rate": 1.5384615384615387e-05,
+ "loss": 1.0813,
+ "step": 120
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.0667865886248182,
+ "learning_rate": 1.5512820512820516e-05,
+ "loss": 1.0152,
+ "step": 121
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.09667692583234,
+ "learning_rate": 1.5641025641025644e-05,
+ "loss": 0.9334,
+ "step": 122
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2582078306529967,
+ "learning_rate": 1.576923076923077e-05,
+ "loss": 1.0562,
+ "step": 123
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.360466747172384,
+ "learning_rate": 1.5897435897435897e-05,
+ "loss": 1.0997,
+ "step": 124
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.1103847133012368,
+ "learning_rate": 1.602564102564103e-05,
+ "loss": 1.0463,
+ "step": 125
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.1283331979085496,
+ "learning_rate": 1.6153846153846154e-05,
+ "loss": 1.0209,
+ "step": 126
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.2347054944456584,
+ "learning_rate": 1.6282051282051282e-05,
+ "loss": 0.9817,
+ "step": 127
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.0858325607380797,
+ "learning_rate": 1.641025641025641e-05,
+ "loss": 0.9878,
+ "step": 128
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.0559584412723666,
+ "learning_rate": 1.653846153846154e-05,
+ "loss": 0.9288,
+ "step": 129
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.1649804448284384,
+ "learning_rate": 1.6666666666666667e-05,
+ "loss": 1.0552,
+ "step": 130
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.2768658059542357,
+ "learning_rate": 1.6794871794871796e-05,
+ "loss": 1.0371,
+ "step": 131
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.1484469622997453,
+ "learning_rate": 1.6923076923076924e-05,
+ "loss": 0.9749,
+ "step": 132
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.1908479242187853,
+ "learning_rate": 1.7051282051282053e-05,
+ "loss": 0.972,
+ "step": 133
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.0402509536166955,
+ "learning_rate": 1.717948717948718e-05,
+ "loss": 0.9693,
+ "step": 134
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.279153512609506,
+ "learning_rate": 1.730769230769231e-05,
+ "loss": 1.0614,
+ "step": 135
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.0544219667349493,
+ "learning_rate": 1.7435897435897438e-05,
+ "loss": 0.978,
+ "step": 136
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.1212869749297296,
+ "learning_rate": 1.7564102564102566e-05,
+ "loss": 1.1024,
+ "step": 137
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.9592020155405737,
+ "learning_rate": 1.7692307692307694e-05,
+ "loss": 0.9405,
+ "step": 138
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.9954547382113923,
+ "learning_rate": 1.7820512820512823e-05,
+ "loss": 1.0248,
+ "step": 139
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.1370111213170742,
+ "learning_rate": 1.794871794871795e-05,
+ "loss": 1.0575,
+ "step": 140
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.9834559647762655,
+ "learning_rate": 1.807692307692308e-05,
+ "loss": 1.0187,
+ "step": 141
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.5310889926993767,
+ "learning_rate": 1.8205128205128208e-05,
+ "loss": 0.9525,
+ "step": 142
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.1342556535073165,
+ "learning_rate": 1.8333333333333333e-05,
+ "loss": 1.0082,
+ "step": 143
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.3336677423335714,
+ "learning_rate": 1.8461538461538465e-05,
+ "loss": 0.9902,
+ "step": 144
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.3619580210614848,
+ "learning_rate": 1.8589743589743593e-05,
+ "loss": 1.0237,
+ "step": 145
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.4407400734323819,
+ "learning_rate": 1.8717948717948718e-05,
+ "loss": 1.017,
+ "step": 146
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.2835998371906323,
+ "learning_rate": 1.8846153846153846e-05,
+ "loss": 1.0469,
+ "step": 147
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.2354416512823005,
+ "learning_rate": 1.8974358974358975e-05,
+ "loss": 1.0207,
+ "step": 148
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.55919779578618,
+ "learning_rate": 1.9102564102564106e-05,
+ "loss": 1.102,
+ "step": 149
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.0417469349621207,
+ "learning_rate": 1.923076923076923e-05,
+ "loss": 1.0014,
+ "step": 150
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.3338629959201007,
+ "learning_rate": 1.935897435897436e-05,
+ "loss": 1.0532,
+ "step": 151
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.2584602038843604,
+ "learning_rate": 1.9487179487179488e-05,
+ "loss": 1.0235,
+ "step": 152
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.3030823235047277,
+ "learning_rate": 1.9615384615384617e-05,
+ "loss": 1.0613,
+ "step": 153
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.2348866055127186,
+ "learning_rate": 1.9743589743589745e-05,
+ "loss": 1.0402,
+ "step": 154
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.152356070113191,
+ "learning_rate": 1.9871794871794873e-05,
+ "loss": 0.9644,
+ "step": 155
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.2194973002491283,
+ "learning_rate": 2e-05,
+ "loss": 1.0733,
+ "step": 156
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.2754636914469413,
+ "learning_rate": 1.9999998058057616e-05,
+ "loss": 1.0593,
+ "step": 157
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.0948574037314331,
+ "learning_rate": 1.9999992232231216e-05,
+ "loss": 1.0224,
+ "step": 158
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.183418528094626,
+ "learning_rate": 1.999998252252306e-05,
+ "loss": 1.1092,
+ "step": 159
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.0560815732774311,
+ "learning_rate": 1.9999968928936924e-05,
+ "loss": 0.9806,
+ "step": 160
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.1358578802193122,
+ "learning_rate": 1.999995145147809e-05,
+ "loss": 1.0757,
+ "step": 161
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.2374696678028985,
+ "learning_rate": 1.9999930090153335e-05,
+ "loss": 1.0532,
+ "step": 162
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.1079912147610231,
+ "learning_rate": 1.9999904844970963e-05,
+ "loss": 1.0477,
+ "step": 163
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.0229541011411554,
+ "learning_rate": 1.999987571594078e-05,
+ "loss": 1.0308,
+ "step": 164
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.112131071523883,
+ "learning_rate": 1.99998427030741e-05,
+ "loss": 1.0121,
+ "step": 165
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.2280127901765792,
+ "learning_rate": 1.999980580638374e-05,
+ "loss": 0.971,
+ "step": 166
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.9791620231473505,
+ "learning_rate": 1.999976502588403e-05,
+ "loss": 1.0107,
+ "step": 167
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.1209582965961267,
+ "learning_rate": 1.9999720361590812e-05,
+ "loss": 1.0036,
+ "step": 168
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.0519871421863807,
+ "learning_rate": 1.9999671813521435e-05,
+ "loss": 0.9852,
+ "step": 169
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.070507196091445,
+ "learning_rate": 1.999961938169475e-05,
+ "loss": 1.0146,
+ "step": 170
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.2738992659769082,
+ "learning_rate": 1.9999563066131124e-05,
+ "loss": 1.0163,
+ "step": 171
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.9509746001304237,
+ "learning_rate": 1.9999502866852427e-05,
+ "loss": 0.952,
+ "step": 172
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.1393322955608345,
+ "learning_rate": 1.999943878388204e-05,
+ "loss": 1.0464,
+ "step": 173
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.2140923212260029,
+ "learning_rate": 1.9999370817244853e-05,
+ "loss": 1.0614,
+ "step": 174
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.134122432733768,
+ "learning_rate": 1.9999298966967264e-05,
+ "loss": 1.0424,
+ "step": 175
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.0712747655486559,
+ "learning_rate": 1.9999223233077178e-05,
+ "loss": 0.9444,
+ "step": 176
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.0801415577846951,
+ "learning_rate": 1.999914361560401e-05,
+ "loss": 1.0388,
+ "step": 177
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.1092667973918147,
+ "learning_rate": 1.9999060114578682e-05,
+ "loss": 1.0614,
+ "step": 178
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.3338891937207205,
+ "learning_rate": 1.9998972730033624e-05,
+ "loss": 0.9689,
+ "step": 179
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.2526136774335184,
+ "learning_rate": 1.9998881462002778e-05,
+ "loss": 1.0375,
+ "step": 180
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 1.2148881075034643,
+ "learning_rate": 1.9998786310521585e-05,
+ "loss": 0.9825,
+ "step": 181
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1835507142706478,
+ "learning_rate": 1.9998687275627008e-05,
+ "loss": 1.0314,
+ "step": 182
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0952611287150276,
+ "learning_rate": 1.9998584357357503e-05,
+ "loss": 1.038,
+ "step": 183
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.055745407520473,
+ "learning_rate": 1.9998477555753054e-05,
+ "loss": 1.0356,
+ "step": 184
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.047898258171331,
+ "learning_rate": 1.9998366870855134e-05,
+ "loss": 0.9735,
+ "step": 185
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0844942676429572,
+ "learning_rate": 1.999825230270673e-05,
+ "loss": 0.9655,
+ "step": 186
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.2102542072272067,
+ "learning_rate": 1.9998133851352342e-05,
+ "loss": 0.9695,
+ "step": 187
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.5351794364867144,
+ "learning_rate": 1.9998011516837974e-05,
+ "loss": 0.9742,
+ "step": 188
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0829242522713511,
+ "learning_rate": 1.999788529921114e-05,
+ "loss": 1.0439,
+ "step": 189
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.196733287932661,
+ "learning_rate": 1.999775519852086e-05,
+ "loss": 1.0344,
+ "step": 190
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1412285937057054,
+ "learning_rate": 1.999762121481767e-05,
+ "loss": 1.019,
+ "step": 191
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0790195675591219,
+ "learning_rate": 1.99974833481536e-05,
+ "loss": 0.9841,
+ "step": 192
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.2350711196975463,
+ "learning_rate": 1.9997341598582197e-05,
+ "loss": 1.0445,
+ "step": 193
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1425652576602308,
+ "learning_rate": 1.9997195966158518e-05,
+ "loss": 1.0422,
+ "step": 194
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.606456629008868,
+ "learning_rate": 1.9997046450939122e-05,
+ "loss": 1.0935,
+ "step": 195
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0639619643411806,
+ "learning_rate": 1.9996893052982083e-05,
+ "loss": 0.9272,
+ "step": 196
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1114960836440815,
+ "learning_rate": 1.9996735772346973e-05,
+ "loss": 1.1476,
+ "step": 197
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1204248642253234,
+ "learning_rate": 1.9996574609094887e-05,
+ "loss": 0.9985,
+ "step": 198
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.9946527403315302,
+ "learning_rate": 1.9996409563288404e-05,
+ "loss": 0.9582,
+ "step": 199
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1078356935974523,
+ "learning_rate": 1.9996240634991645e-05,
+ "loss": 1.032,
+ "step": 200
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.4640703912774728,
+ "learning_rate": 1.9996067824270204e-05,
+ "loss": 1.0689,
+ "step": 201
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1399093993533496,
+ "learning_rate": 1.999589113119121e-05,
+ "loss": 1.0513,
+ "step": 202
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0597768450437273,
+ "learning_rate": 1.9995710555823277e-05,
+ "loss": 0.9459,
+ "step": 203
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.9853784011195135,
+ "learning_rate": 1.999552609823655e-05,
+ "loss": 0.9497,
+ "step": 204
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1295434063672711,
+ "learning_rate": 1.999533775850266e-05,
+ "loss": 1.0133,
+ "step": 205
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.9723564757441976,
+ "learning_rate": 1.9995145536694764e-05,
+ "loss": 0.9556,
+ "step": 206
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.098636570394235,
+ "learning_rate": 1.9994949432887512e-05,
+ "loss": 0.9391,
+ "step": 207
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1172358470739157,
+ "learning_rate": 1.999474944715708e-05,
+ "loss": 1.0021,
+ "step": 208
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.120101339931511,
+ "learning_rate": 1.9994545579581125e-05,
+ "loss": 1.0528,
+ "step": 209
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.104126742032167,
+ "learning_rate": 1.9994337830238836e-05,
+ "loss": 0.9719,
+ "step": 210
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.9900241648436833,
+ "learning_rate": 1.9994126199210897e-05,
+ "loss": 0.9351,
+ "step": 211
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1062113566320204,
+ "learning_rate": 1.999391068657951e-05,
+ "loss": 0.9947,
+ "step": 212
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0912350553018915,
+ "learning_rate": 1.9993691292428364e-05,
+ "loss": 1.0142,
+ "step": 213
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.2248252251779344,
+ "learning_rate": 1.9993468016842684e-05,
+ "loss": 0.9468,
+ "step": 214
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.2646610193105368,
+ "learning_rate": 1.999324085990918e-05,
+ "loss": 1.0577,
+ "step": 215
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0650758981936763,
+ "learning_rate": 1.9993009821716076e-05,
+ "loss": 1.0205,
+ "step": 216
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.2290765225960296,
+ "learning_rate": 1.9992774902353104e-05,
+ "loss": 1.0925,
+ "step": 217
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1433310161715657,
+ "learning_rate": 1.999253610191151e-05,
+ "loss": 1.0701,
+ "step": 218
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0459359117325058,
+ "learning_rate": 1.999229342048404e-05,
+ "loss": 1.0457,
+ "step": 219
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0564872906762484,
+ "learning_rate": 1.9992046858164942e-05,
+ "loss": 0.9763,
+ "step": 220
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0819025507537388,
+ "learning_rate": 1.999179641504999e-05,
+ "loss": 0.9547,
+ "step": 221
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1934400237001315,
+ "learning_rate": 1.9991542091236438e-05,
+ "loss": 1.0922,
+ "step": 222
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.161179989745852,
+ "learning_rate": 1.9991283886823075e-05,
+ "loss": 1.0458,
+ "step": 223
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0867826068691018,
+ "learning_rate": 1.9991021801910177e-05,
+ "loss": 1.034,
+ "step": 224
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0585184576383915,
+ "learning_rate": 1.999075583659954e-05,
+ "loss": 0.9095,
+ "step": 225
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0235086504577238,
+ "learning_rate": 1.999048599099446e-05,
+ "loss": 0.9233,
+ "step": 226
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.103995240475684,
+ "learning_rate": 1.9990212265199738e-05,
+ "loss": 1.0443,
+ "step": 227
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.0985369068246764,
+ "learning_rate": 1.998993465932169e-05,
+ "loss": 0.9732,
+ "step": 228
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1595157370784517,
+ "learning_rate": 1.9989653173468137e-05,
+ "loss": 0.9698,
+ "step": 229
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.909226275371055,
+ "learning_rate": 1.99893678077484e-05,
+ "loss": 0.9459,
+ "step": 230
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.9721503193880335,
+ "learning_rate": 1.9989078562273313e-05,
+ "loss": 0.8879,
+ "step": 231
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1167168793076159,
+ "learning_rate": 1.9988785437155222e-05,
+ "loss": 1.0538,
+ "step": 232
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 1.1203943554433957,
+ "learning_rate": 1.9988488432507963e-05,
+ "loss": 1.0474,
+ "step": 233
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.2003829338387189,
+ "learning_rate": 1.9988187548446895e-05,
+ "loss": 1.0171,
+ "step": 234
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.2001828855704575,
+ "learning_rate": 1.998788278508888e-05,
+ "loss": 1.0292,
+ "step": 235
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.1777841113075826,
+ "learning_rate": 1.9987574142552274e-05,
+ "loss": 0.974,
+ "step": 236
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.3487292075045965,
+ "learning_rate": 1.9987261620956964e-05,
+ "loss": 1.049,
+ "step": 237
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.021633961676533,
+ "learning_rate": 1.9986945220424326e-05,
+ "loss": 1.0227,
+ "step": 238
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.2011481891405715,
+ "learning_rate": 1.998662494107724e-05,
+ "loss": 1.0574,
+ "step": 239
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.2480837841874388,
+ "learning_rate": 1.99863007830401e-05,
+ "loss": 1.0578,
+ "step": 240
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.1539635705528444,
+ "learning_rate": 1.9985972746438815e-05,
+ "loss": 1.0352,
+ "step": 241
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.2375605968879775,
+ "learning_rate": 1.9985640831400778e-05,
+ "loss": 1.0455,
+ "step": 242
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.2382629442817368,
+ "learning_rate": 1.998530503805491e-05,
+ "loss": 1.0042,
+ "step": 243
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.2491761957486767,
+ "learning_rate": 1.9984965366531624e-05,
+ "loss": 0.984,
+ "step": 244
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.9855487927840765,
+ "learning_rate": 1.9984621816962843e-05,
+ "loss": 0.9494,
+ "step": 245
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0803961197344898,
+ "learning_rate": 1.9984274389482005e-05,
+ "loss": 0.9561,
+ "step": 246
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.3126933935997356,
+ "learning_rate": 1.9983923084224047e-05,
+ "loss": 1.0042,
+ "step": 247
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0712696345783617,
+ "learning_rate": 1.9983567901325404e-05,
+ "loss": 1.103,
+ "step": 248
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.2299127993608967,
+ "learning_rate": 1.9983208840924028e-05,
+ "loss": 1.0678,
+ "step": 249
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0459571990167438,
+ "learning_rate": 1.998284590315937e-05,
+ "loss": 1.0011,
+ "step": 250
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.234895666534169,
+ "learning_rate": 1.9982479088172403e-05,
+ "loss": 1.0247,
+ "step": 251
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0400591644618937,
+ "learning_rate": 1.9982108396105584e-05,
+ "loss": 0.9653,
+ "step": 252
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.1222203513778257,
+ "learning_rate": 1.9981733827102884e-05,
+ "loss": 1.0255,
+ "step": 253
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0428445711754202,
+ "learning_rate": 1.998135538130979e-05,
+ "loss": 1.009,
+ "step": 254
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.993313396131739,
+ "learning_rate": 1.998097305887328e-05,
+ "loss": 1.0353,
+ "step": 255
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.1138141339680838,
+ "learning_rate": 1.9980586859941846e-05,
+ "loss": 1.0283,
+ "step": 256
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.06237415793888,
+ "learning_rate": 1.998019678466548e-05,
+ "loss": 0.941,
+ "step": 257
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.103853261194552,
+ "learning_rate": 1.997980283319568e-05,
+ "loss": 1.0525,
+ "step": 258
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0851989323256417,
+ "learning_rate": 1.9979405005685466e-05,
+ "loss": 1.021,
+ "step": 259
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.1013505172431879,
+ "learning_rate": 1.9979003302289336e-05,
+ "loss": 1.0366,
+ "step": 260
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0528197990342376,
+ "learning_rate": 1.997859772316331e-05,
+ "loss": 1.0017,
+ "step": 261
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.9961957077596891,
+ "learning_rate": 1.9978188268464912e-05,
+ "loss": 1.0011,
+ "step": 262
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0986536329203895,
+ "learning_rate": 1.997777493835317e-05,
+ "loss": 1.0291,
+ "step": 263
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0895648151990427,
+ "learning_rate": 1.9977357732988616e-05,
+ "loss": 0.998,
+ "step": 264
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.2103849011530055,
+ "learning_rate": 1.9976936652533288e-05,
+ "loss": 1.0342,
+ "step": 265
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.9507441705890096,
+ "learning_rate": 1.997651169715073e-05,
+ "loss": 0.937,
+ "step": 266
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.183923457824153,
+ "learning_rate": 1.9976082867005985e-05,
+ "loss": 1.0814,
+ "step": 267
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0666908558586932,
+ "learning_rate": 1.997565016226561e-05,
+ "loss": 1.0267,
+ "step": 268
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0938238668192006,
+ "learning_rate": 1.997521358309766e-05,
+ "loss": 0.9797,
+ "step": 269
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.9974396215028214,
+ "learning_rate": 1.99747731296717e-05,
+ "loss": 0.9745,
+ "step": 270
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.1114483480841397,
+ "learning_rate": 1.9974328802158798e-05,
+ "loss": 1.0203,
+ "step": 271
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.045538807506972,
+ "learning_rate": 1.997388060073152e-05,
+ "loss": 1.0451,
+ "step": 272
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.1083583866563211,
+ "learning_rate": 1.9973428525563948e-05,
+ "loss": 1.0574,
+ "step": 273
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.1701859122896223,
+ "learning_rate": 1.9972972576831656e-05,
+ "loss": 1.0661,
+ "step": 274
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0679023759461068,
+ "learning_rate": 1.9972512754711738e-05,
+ "loss": 1.0016,
+ "step": 275
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0776214334936154,
+ "learning_rate": 1.997204905938278e-05,
+ "loss": 1.0481,
+ "step": 276
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0994739379188883,
+ "learning_rate": 1.9971581491024873e-05,
+ "loss": 0.9836,
+ "step": 277
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0540199770109147,
+ "learning_rate": 1.997111004981962e-05,
+ "loss": 1.0737,
+ "step": 278
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.9725209801803888,
+ "learning_rate": 1.9970634735950117e-05,
+ "loss": 0.9469,
+ "step": 279
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.9522461545148276,
+ "learning_rate": 1.9970155549600978e-05,
+ "loss": 0.956,
+ "step": 280
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.9723112391371475,
+ "learning_rate": 1.9969672490958304e-05,
+ "loss": 0.9375,
+ "step": 281
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.1097297277623286,
+ "learning_rate": 1.996918556020972e-05,
+ "loss": 1.0106,
+ "step": 282
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.131166385572371,
+ "learning_rate": 1.996869475754434e-05,
+ "loss": 0.9987,
+ "step": 283
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.0827734775808813,
+ "learning_rate": 1.9968200083152784e-05,
+ "loss": 1.0048,
+ "step": 284
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 1.117780989198639,
+ "learning_rate": 1.9967701537227175e-05,
+ "loss": 1.0228,
+ "step": 285
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.0681296455501026,
+ "learning_rate": 1.996719911996115e-05,
+ "loss": 0.9366,
+ "step": 286
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.060941460084828,
+ "learning_rate": 1.996669283154984e-05,
+ "loss": 0.9932,
+ "step": 287
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.156906817119832,
+ "learning_rate": 1.996618267218988e-05,
+ "loss": 1.0177,
+ "step": 288
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.3152595631612354,
+ "learning_rate": 1.996566864207941e-05,
+ "loss": 1.0269,
+ "step": 289
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.0913419325854408,
+ "learning_rate": 1.9965150741418072e-05,
+ "loss": 1.0438,
+ "step": 290
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.883672872961938,
+ "learning_rate": 1.9964628970407018e-05,
+ "loss": 0.8671,
+ "step": 291
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.148266864364802,
+ "learning_rate": 1.9964103329248892e-05,
+ "loss": 1.0138,
+ "step": 292
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.9555332042108989,
+ "learning_rate": 1.996357381814785e-05,
+ "loss": 0.9864,
+ "step": 293
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.9867179317501176,
+ "learning_rate": 1.996304043730955e-05,
+ "loss": 1.0427,
+ "step": 294
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.065732418166951,
+ "learning_rate": 1.9962503186941143e-05,
+ "loss": 1.0111,
+ "step": 295
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.1637313554979152,
+ "learning_rate": 1.9961962067251298e-05,
+ "loss": 1.0636,
+ "step": 296
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.1028865508626255,
+ "learning_rate": 1.9961417078450177e-05,
+ "loss": 0.9832,
+ "step": 297
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.3709736442670721,
+ "learning_rate": 1.996086822074945e-05,
+ "loss": 0.9825,
+ "step": 298
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.1319212799054115,
+ "learning_rate": 1.9960315494362286e-05,
+ "loss": 1.066,
+ "step": 299
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.0096657706385352,
+ "learning_rate": 1.9959758899503355e-05,
+ "loss": 1.0275,
+ "step": 300
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.9512338439427707,
+ "learning_rate": 1.995919843638883e-05,
+ "loss": 0.9121,
+ "step": 301
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.122276064017236,
+ "learning_rate": 1.9958634105236395e-05,
+ "loss": 1.0361,
+ "step": 302
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.0049578061308773,
+ "learning_rate": 1.9958065906265228e-05,
+ "loss": 0.9547,
+ "step": 303
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.1621443110111267,
+ "learning_rate": 1.9957493839696013e-05,
+ "loss": 1.0012,
+ "step": 304
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.3393044686281577,
+ "learning_rate": 1.9956917905750926e-05,
+ "loss": 0.9688,
+ "step": 305
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.0477868559661858,
+ "learning_rate": 1.995633810465366e-05,
+ "loss": 0.9109,
+ "step": 306
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.041686619229756,
+ "learning_rate": 1.99557544366294e-05,
+ "loss": 0.9793,
+ "step": 307
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.0268588020698692,
+ "learning_rate": 1.9955166901904838e-05,
+ "loss": 1.0161,
+ "step": 308
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.0338259745864973,
+ "learning_rate": 1.9954575500708164e-05,
+ "loss": 1.0123,
+ "step": 309
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.2528306493767145,
+ "learning_rate": 1.995398023326907e-05,
+ "loss": 1.0563,
+ "step": 310
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.1022842626383675,
+ "learning_rate": 1.9953381099818756e-05,
+ "loss": 0.9896,
+ "step": 311
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.9341887599034772,
+ "learning_rate": 1.9952778100589912e-05,
+ "loss": 0.9934,
+ "step": 312
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.156864345777507,
+ "learning_rate": 1.9952171235816747e-05,
+ "loss": 0.9987,
+ "step": 313
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.0709841278140249,
+ "learning_rate": 1.9951560505734948e-05,
+ "loss": 1.0313,
+ "step": 314
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.0773209829635768,
+ "learning_rate": 1.9950945910581718e-05,
+ "loss": 1.0868,
+ "step": 315
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.8752748058379218,
+ "learning_rate": 1.9950327450595766e-05,
+ "loss": 0.8964,
+ "step": 316
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.0745752107298585,
+ "learning_rate": 1.9949705126017286e-05,
+ "loss": 1.0216,
+ "step": 317
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.1554871302658731,
+ "learning_rate": 1.9949078937087988e-05,
+ "loss": 1.0333,
+ "step": 318
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.1028418446189259,
+ "learning_rate": 1.994844888405107e-05,
+ "loss": 1.0139,
+ "step": 319
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.8438392565458945,
+ "learning_rate": 1.9947814967151246e-05,
+ "loss": 0.9158,
+ "step": 320
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.0364604199680414,
+ "learning_rate": 1.9947177186634716e-05,
+ "loss": 0.9688,
+ "step": 321
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.2178021317078678,
+ "learning_rate": 1.9946535542749187e-05,
+ "loss": 1.0816,
+ "step": 322
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.1932580154846006,
+ "learning_rate": 1.9945890035743866e-05,
+ "loss": 0.9809,
+ "step": 323
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.1311891952725674,
+ "learning_rate": 1.9945240665869465e-05,
+ "loss": 1.0205,
+ "step": 324
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.0033781289686534,
+ "learning_rate": 1.9944587433378187e-05,
+ "loss": 0.9005,
+ "step": 325
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.1183374382936138,
+ "learning_rate": 1.994393033852374e-05,
+ "loss": 0.9944,
+ "step": 326
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.1626536753558943,
+ "learning_rate": 1.9943269381561334e-05,
+ "loss": 0.9838,
+ "step": 327
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.1153734528792847,
+ "learning_rate": 1.994260456274768e-05,
+ "loss": 0.9332,
+ "step": 328
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.1631398320623185,
+ "learning_rate": 1.9941935882340976e-05,
+ "loss": 1.015,
+ "step": 329
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.1008251297518674,
+ "learning_rate": 1.994126334060094e-05,
+ "loss": 1.0311,
+ "step": 330
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.9609773365201152,
+ "learning_rate": 1.994058693778878e-05,
+ "loss": 0.9125,
+ "step": 331
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.0289968099817348,
+ "learning_rate": 1.9939906674167192e-05,
+ "loss": 1.0187,
+ "step": 332
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.9871375826142248,
+ "learning_rate": 1.993922255000039e-05,
+ "loss": 1.0017,
+ "step": 333
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.9840376508630518,
+ "learning_rate": 1.993853456555408e-05,
+ "loss": 0.8998,
+ "step": 334
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.005956093758362,
+ "learning_rate": 1.9937842721095468e-05,
+ "loss": 1.017,
+ "step": 335
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 1.0565881405397841,
+ "learning_rate": 1.9937147016893257e-05,
+ "loss": 1.0183,
+ "step": 336
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.9327636558797503,
+ "learning_rate": 1.9936447453217646e-05,
+ "loss": 0.9183,
+ "step": 337
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.9015052548631132,
+ "learning_rate": 1.9935744030340347e-05,
+ "loss": 0.9363,
+ "step": 338
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.112830724657339,
+ "learning_rate": 1.9935036748534555e-05,
+ "loss": 0.9958,
+ "step": 339
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.0940370866245723,
+ "learning_rate": 1.993432560807497e-05,
+ "loss": 0.8806,
+ "step": 340
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.0072231094052617,
+ "learning_rate": 1.993361060923779e-05,
+ "loss": 0.9866,
+ "step": 341
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.1803294860217535,
+ "learning_rate": 1.9932891752300717e-05,
+ "loss": 1.0277,
+ "step": 342
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.0648689434483245,
+ "learning_rate": 1.9932169037542947e-05,
+ "loss": 1.0703,
+ "step": 343
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.4552596539034177,
+ "learning_rate": 1.9931442465245164e-05,
+ "loss": 1.0366,
+ "step": 344
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.1152475023499158,
+ "learning_rate": 1.9930712035689576e-05,
+ "loss": 1.0401,
+ "step": 345
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.9860353379930871,
+ "learning_rate": 1.992997774915986e-05,
+ "loss": 1.0142,
+ "step": 346
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.9691128601581361,
+ "learning_rate": 1.992923960594121e-05,
+ "loss": 0.9511,
+ "step": 347
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.9029971163164436,
+ "learning_rate": 1.9928497606320308e-05,
+ "loss": 1.0134,
+ "step": 348
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.0987897442950476,
+ "learning_rate": 1.992775175058535e-05,
+ "loss": 0.988,
+ "step": 349
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.9245914555297068,
+ "learning_rate": 1.9927002039026002e-05,
+ "loss": 0.9449,
+ "step": 350
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.071436883991358,
+ "learning_rate": 1.9926248471933453e-05,
+ "loss": 1.0236,
+ "step": 351
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.10276548579562,
+ "learning_rate": 1.9925491049600382e-05,
+ "loss": 1.0356,
+ "step": 352
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.0581216489631218,
+ "learning_rate": 1.9924729772320953e-05,
+ "loss": 0.9895,
+ "step": 353
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.0260701970266382,
+ "learning_rate": 1.9923964640390846e-05,
+ "loss": 0.9689,
+ "step": 354
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.8816165434650342,
+ "learning_rate": 1.9923195654107227e-05,
+ "loss": 1.0149,
+ "step": 355
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.9546173722232031,
+ "learning_rate": 1.992242281376876e-05,
+ "loss": 0.9698,
+ "step": 356
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.2048203176710783,
+ "learning_rate": 1.9921646119675606e-05,
+ "loss": 0.9945,
+ "step": 357
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.7850382985235519,
+ "learning_rate": 1.9920865572129426e-05,
+ "loss": 0.8834,
+ "step": 358
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.0993381867966732,
+ "learning_rate": 1.9920081171433377e-05,
+ "loss": 1.0291,
+ "step": 359
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.9860726984166353,
+ "learning_rate": 1.991929291789211e-05,
+ "loss": 0.9798,
+ "step": 360
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.1649400082596675,
+ "learning_rate": 1.9918500811811778e-05,
+ "loss": 0.9856,
+ "step": 361
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.9768507287731225,
+ "learning_rate": 1.991770485350002e-05,
+ "loss": 1.0322,
+ "step": 362
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.9719930832304183,
+ "learning_rate": 1.991690504326597e-05,
+ "loss": 1.0114,
+ "step": 363
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.0208665386118512,
+ "learning_rate": 1.9916101381420285e-05,
+ "loss": 0.9605,
+ "step": 364
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.1091918338672133,
+ "learning_rate": 1.9915293868275083e-05,
+ "loss": 1.0122,
+ "step": 365
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.9916669511874985,
+ "learning_rate": 1.9914482504143996e-05,
+ "loss": 1.0572,
+ "step": 366
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.0877445095428253,
+ "learning_rate": 1.9913667289342147e-05,
+ "loss": 0.9825,
+ "step": 367
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.9951418535733804,
+ "learning_rate": 1.991284822418616e-05,
+ "loss": 0.9763,
+ "step": 368
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.1099440939197665,
+ "learning_rate": 1.9912025308994146e-05,
+ "loss": 0.9827,
+ "step": 369
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.9147837781723692,
+ "learning_rate": 1.9911198544085723e-05,
+ "loss": 0.9713,
+ "step": 370
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.0340673243328542,
+ "learning_rate": 1.991036792978199e-05,
+ "loss": 1.0105,
+ "step": 371
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.1372723304735337,
+ "learning_rate": 1.990953346640555e-05,
+ "loss": 1.0105,
+ "step": 372
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.0046670930534125,
+ "learning_rate": 1.9908695154280496e-05,
+ "loss": 0.8756,
+ "step": 373
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.007583801986781,
+ "learning_rate": 1.9907852993732425e-05,
+ "loss": 0.9576,
+ "step": 374
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.034350963445426,
+ "learning_rate": 1.990700698508842e-05,
+ "loss": 0.9691,
+ "step": 375
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.7365590280197416,
+ "learning_rate": 1.990615712867706e-05,
+ "loss": 0.8323,
+ "step": 376
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.1086572670483268,
+ "learning_rate": 1.9905303424828418e-05,
+ "loss": 1.0489,
+ "step": 377
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.98108011919429,
+ "learning_rate": 1.9904445873874068e-05,
+ "loss": 0.9333,
+ "step": 378
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.995597752797983,
+ "learning_rate": 1.9903584476147066e-05,
+ "loss": 0.9806,
+ "step": 379
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.0386606289436602,
+ "learning_rate": 1.9902719231981975e-05,
+ "loss": 0.9641,
+ "step": 380
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.9811938400165474,
+ "learning_rate": 1.9901850141714843e-05,
+ "loss": 0.98,
+ "step": 381
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.9680779744918118,
+ "learning_rate": 1.9900977205683213e-05,
+ "loss": 0.9748,
+ "step": 382
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.8834086257950933,
+ "learning_rate": 1.9900100424226124e-05,
+ "loss": 0.9368,
+ "step": 383
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.090124582012756,
+ "learning_rate": 1.9899219797684113e-05,
+ "loss": 0.9813,
+ "step": 384
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.0074425933799687,
+ "learning_rate": 1.98983353263992e-05,
+ "loss": 0.9535,
+ "step": 385
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.1085026154314939,
+ "learning_rate": 1.9897447010714905e-05,
+ "loss": 0.9324,
+ "step": 386
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.0106463055120962,
+ "learning_rate": 1.989655485097624e-05,
+ "loss": 0.9752,
+ "step": 387
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.09924917860149,
+ "learning_rate": 1.989565884752971e-05,
+ "loss": 1.0201,
+ "step": 388
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 1.1307196942637978,
+ "learning_rate": 1.9894759000723308e-05,
+ "loss": 0.986,
+ "step": 389
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.2395516795653494,
+ "learning_rate": 1.9893855310906526e-05,
+ "loss": 1.0174,
+ "step": 390
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.1755919525884955,
+ "learning_rate": 1.9892947778430352e-05,
+ "loss": 1.0104,
+ "step": 391
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.1286283315117354,
+ "learning_rate": 1.9892036403647256e-05,
+ "loss": 1.022,
+ "step": 392
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.0254048346488573,
+ "learning_rate": 1.9891121186911207e-05,
+ "loss": 0.9568,
+ "step": 393
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.0260073369434373,
+ "learning_rate": 1.9890202128577664e-05,
+ "loss": 1.0247,
+ "step": 394
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.1777603983178653,
+ "learning_rate": 1.988927922900358e-05,
+ "loss": 0.9998,
+ "step": 395
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.9728921221019166,
+ "learning_rate": 1.9888352488547397e-05,
+ "loss": 1.0295,
+ "step": 396
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.1873920401009974,
+ "learning_rate": 1.988742190756905e-05,
+ "loss": 0.9524,
+ "step": 397
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.1229702475569519,
+ "learning_rate": 1.9886487486429966e-05,
+ "loss": 0.9283,
+ "step": 398
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.9557043740069384,
+ "learning_rate": 1.9885549225493064e-05,
+ "loss": 1.0169,
+ "step": 399
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.8853322061290558,
+ "learning_rate": 1.9884607125122753e-05,
+ "loss": 0.9217,
+ "step": 400
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.9926636503259895,
+ "learning_rate": 1.988366118568494e-05,
+ "loss": 0.9532,
+ "step": 401
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.0006958386411087,
+ "learning_rate": 1.988271140754701e-05,
+ "loss": 0.9481,
+ "step": 402
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.0260858023013157,
+ "learning_rate": 1.9881757791077848e-05,
+ "loss": 0.9914,
+ "step": 403
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.0913630570085329,
+ "learning_rate": 1.9880800336647825e-05,
+ "loss": 0.9643,
+ "step": 404
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.109445115093341,
+ "learning_rate": 1.987983904462881e-05,
+ "loss": 1.0165,
+ "step": 405
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.8584168156054991,
+ "learning_rate": 1.9878873915394154e-05,
+ "loss": 0.9473,
+ "step": 406
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.1679236196407334,
+ "learning_rate": 1.9877904949318704e-05,
+ "loss": 1.0144,
+ "step": 407
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.1251644456814822,
+ "learning_rate": 1.9876932146778796e-05,
+ "loss": 0.9818,
+ "step": 408
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.92846100559881,
+ "learning_rate": 1.9875955508152254e-05,
+ "loss": 0.9586,
+ "step": 409
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.1989101861770795,
+ "learning_rate": 1.987497503381839e-05,
+ "loss": 1.0557,
+ "step": 410
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.1576181312622837,
+ "learning_rate": 1.9873990724158014e-05,
+ "loss": 0.9345,
+ "step": 411
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.9947611145521551,
+ "learning_rate": 1.987300257955342e-05,
+ "loss": 0.9857,
+ "step": 412
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.95083284712632,
+ "learning_rate": 1.987201060038839e-05,
+ "loss": 0.9734,
+ "step": 413
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.8788506837339953,
+ "learning_rate": 1.9871014787048197e-05,
+ "loss": 0.9648,
+ "step": 414
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.9652159901617957,
+ "learning_rate": 1.9870015139919606e-05,
+ "loss": 1.0217,
+ "step": 415
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.115957319344163,
+ "learning_rate": 1.9869011659390866e-05,
+ "loss": 1.0174,
+ "step": 416
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.10631243887674,
+ "learning_rate": 1.9868004345851716e-05,
+ "loss": 0.9295,
+ "step": 417
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.8396659931872941,
+ "learning_rate": 1.9866993199693393e-05,
+ "loss": 0.8177,
+ "step": 418
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.1557846141605452,
+ "learning_rate": 1.98659782213086e-05,
+ "loss": 1.064,
+ "step": 419
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.9000696594877208,
+ "learning_rate": 1.986495941109156e-05,
+ "loss": 0.894,
+ "step": 420
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.9488372546306818,
+ "learning_rate": 1.9863936769437956e-05,
+ "loss": 0.9482,
+ "step": 421
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.2431576583319777,
+ "learning_rate": 1.986291029674497e-05,
+ "loss": 1.0355,
+ "step": 422
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.9832767607334706,
+ "learning_rate": 1.986187999341128e-05,
+ "loss": 0.9785,
+ "step": 423
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.9066624673918414,
+ "learning_rate": 1.9860845859837034e-05,
+ "loss": 0.9399,
+ "step": 424
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.9804432267689261,
+ "learning_rate": 1.985980789642388e-05,
+ "loss": 0.9826,
+ "step": 425
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.0474637765844146,
+ "learning_rate": 1.985876610357496e-05,
+ "loss": 1.0477,
+ "step": 426
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.1981920004067885,
+ "learning_rate": 1.9857720481694887e-05,
+ "loss": 1.0065,
+ "step": 427
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.9423419660649944,
+ "learning_rate": 1.9856671031189765e-05,
+ "loss": 0.9813,
+ "step": 428
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.039276169197374,
+ "learning_rate": 1.98556177524672e-05,
+ "loss": 0.9061,
+ "step": 429
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.9606855679333003,
+ "learning_rate": 1.9854560645936262e-05,
+ "loss": 1.0137,
+ "step": 430
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.8373828115493064,
+ "learning_rate": 1.9853499712007523e-05,
+ "loss": 0.963,
+ "step": 431
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.208529699031717,
+ "learning_rate": 1.9852434951093035e-05,
+ "loss": 1.0257,
+ "step": 432
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.0672762000421674,
+ "learning_rate": 1.985136636360635e-05,
+ "loss": 0.9716,
+ "step": 433
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.0963028521904867,
+ "learning_rate": 1.985029394996248e-05,
+ "loss": 0.9474,
+ "step": 434
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.9163688190227294,
+ "learning_rate": 1.9849217710577945e-05,
+ "loss": 0.9505,
+ "step": 435
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.050544750709766,
+ "learning_rate": 1.9848137645870745e-05,
+ "loss": 0.9958,
+ "step": 436
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.0500893960835593,
+ "learning_rate": 1.9847053756260363e-05,
+ "loss": 0.9822,
+ "step": 437
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.9638183430641204,
+ "learning_rate": 1.984596604216777e-05,
+ "loss": 0.9274,
+ "step": 438
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.0027183551032477,
+ "learning_rate": 1.984487450401542e-05,
+ "loss": 1.0227,
+ "step": 439
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 1.0126448520312659,
+ "learning_rate": 1.9843779142227258e-05,
+ "loss": 0.939,
+ "step": 440
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.8601680052657347,
+ "learning_rate": 1.9842679957228706e-05,
+ "loss": 0.9062,
+ "step": 441
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.1152170579938703,
+ "learning_rate": 1.9841576949446675e-05,
+ "loss": 0.9915,
+ "step": 442
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.9228161270160428,
+ "learning_rate": 1.984047011930956e-05,
+ "loss": 0.9444,
+ "step": 443
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.9621324346983371,
+ "learning_rate": 1.9839359467247243e-05,
+ "loss": 0.9347,
+ "step": 444
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.077600427957637,
+ "learning_rate": 1.983824499369109e-05,
+ "loss": 1.0871,
+ "step": 445
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.1205398016744077,
+ "learning_rate": 1.9837126699073948e-05,
+ "loss": 0.9663,
+ "step": 446
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.05513833949092,
+ "learning_rate": 1.9836004583830146e-05,
+ "loss": 0.9598,
+ "step": 447
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0784477194893523,
+ "learning_rate": 1.9834878648395507e-05,
+ "loss": 1.0089,
+ "step": 448
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.218897394519893,
+ "learning_rate": 1.9833748893207326e-05,
+ "loss": 1.0376,
+ "step": 449
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.102108513161202,
+ "learning_rate": 1.9832615318704388e-05,
+ "loss": 1.002,
+ "step": 450
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.1117520137781198,
+ "learning_rate": 1.9831477925326962e-05,
+ "loss": 0.9867,
+ "step": 451
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.9516860024791847,
+ "learning_rate": 1.98303367135168e-05,
+ "loss": 0.9682,
+ "step": 452
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0469044011350126,
+ "learning_rate": 1.9829191683717133e-05,
+ "loss": 0.9555,
+ "step": 453
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.3651285527860566,
+ "learning_rate": 1.9828042836372677e-05,
+ "loss": 0.9684,
+ "step": 454
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.1737464733176803,
+ "learning_rate": 1.9826890171929634e-05,
+ "loss": 1.0662,
+ "step": 455
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.9965826863460079,
+ "learning_rate": 1.982573369083568e-05,
+ "loss": 1.0071,
+ "step": 456
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.9783513198655882,
+ "learning_rate": 1.9824573393539984e-05,
+ "loss": 0.9869,
+ "step": 457
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0311206030672846,
+ "learning_rate": 1.982340928049319e-05,
+ "loss": 0.9851,
+ "step": 458
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.8945449954022129,
+ "learning_rate": 1.9822241352147426e-05,
+ "loss": 0.9294,
+ "step": 459
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0110242576733897,
+ "learning_rate": 1.9821069608956307e-05,
+ "loss": 1.0002,
+ "step": 460
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.1047177352183692,
+ "learning_rate": 1.9819894051374917e-05,
+ "loss": 1.0201,
+ "step": 461
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0722862873101653,
+ "learning_rate": 1.981871467985983e-05,
+ "loss": 0.9839,
+ "step": 462
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.7992134051176842,
+ "learning_rate": 1.9817531494869105e-05,
+ "loss": 0.9206,
+ "step": 463
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.09694344736295,
+ "learning_rate": 1.9816344496862272e-05,
+ "loss": 0.9826,
+ "step": 464
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.112091064871851,
+ "learning_rate": 1.9815153686300352e-05,
+ "loss": 0.9378,
+ "step": 465
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0620939801683327,
+ "learning_rate": 1.981395906364584e-05,
+ "loss": 0.9833,
+ "step": 466
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0927986239274194,
+ "learning_rate": 1.9812760629362714e-05,
+ "loss": 0.9961,
+ "step": 467
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0260975079168466,
+ "learning_rate": 1.981155838391643e-05,
+ "loss": 0.898,
+ "step": 468
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.1148764798877446,
+ "learning_rate": 1.9810352327773935e-05,
+ "loss": 1.0624,
+ "step": 469
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.047369326060531,
+ "learning_rate": 1.9809142461403635e-05,
+ "loss": 1.0518,
+ "step": 470
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.9949304877707057,
+ "learning_rate": 1.9807928785275433e-05,
+ "loss": 0.9958,
+ "step": 471
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0400326074928232,
+ "learning_rate": 1.980671129986071e-05,
+ "loss": 0.9636,
+ "step": 472
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0311206230508945,
+ "learning_rate": 1.9805490005632323e-05,
+ "loss": 1.02,
+ "step": 473
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0879635240675696,
+ "learning_rate": 1.98042649030646e-05,
+ "loss": 1.0428,
+ "step": 474
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0166991214039482,
+ "learning_rate": 1.9803035992633366e-05,
+ "loss": 1.0782,
+ "step": 475
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.1494495553279038,
+ "learning_rate": 1.9801803274815915e-05,
+ "loss": 1.028,
+ "step": 476
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0502559175610942,
+ "learning_rate": 1.9800566750091018e-05,
+ "loss": 0.95,
+ "step": 477
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.1045546514014202,
+ "learning_rate": 1.9799326418938924e-05,
+ "loss": 1.0072,
+ "step": 478
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.3056896038881167,
+ "learning_rate": 1.979808228184137e-05,
+ "loss": 1.0127,
+ "step": 479
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.1149911519233298,
+ "learning_rate": 1.9796834339281557e-05,
+ "loss": 0.9874,
+ "step": 480
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.9535015908036342,
+ "learning_rate": 1.979558259174418e-05,
+ "loss": 0.8895,
+ "step": 481
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.1040398449723414,
+ "learning_rate": 1.9794327039715395e-05,
+ "loss": 1.0279,
+ "step": 482
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.2020662594960165,
+ "learning_rate": 1.979306768368285e-05,
+ "loss": 0.9662,
+ "step": 483
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.1617626482359902,
+ "learning_rate": 1.9791804524135663e-05,
+ "loss": 0.9733,
+ "step": 484
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.9858924210251201,
+ "learning_rate": 1.979053756156443e-05,
+ "loss": 0.9833,
+ "step": 485
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.08640612402288,
+ "learning_rate": 1.9789266796461222e-05,
+ "loss": 0.9355,
+ "step": 486
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0819304471796969,
+ "learning_rate": 1.9787992229319594e-05,
+ "loss": 0.9629,
+ "step": 487
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.8480780378607349,
+ "learning_rate": 1.978671386063457e-05,
+ "loss": 0.9419,
+ "step": 488
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.9771514137294415,
+ "learning_rate": 1.978543169090265e-05,
+ "loss": 0.977,
+ "step": 489
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0672876151108321,
+ "learning_rate": 1.9784145720621827e-05,
+ "loss": 0.9911,
+ "step": 490
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.9338685826013793,
+ "learning_rate": 1.9782855950291542e-05,
+ "loss": 1.001,
+ "step": 491
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.127158307035761,
+ "learning_rate": 1.978156238041274e-05,
+ "loss": 0.9582,
+ "step": 492
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 1.0731823106590381,
+ "learning_rate": 1.9780265011487822e-05,
+ "loss": 0.9314,
+ "step": 493
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.1026747968864483,
+ "learning_rate": 1.9778963844020668e-05,
+ "loss": 0.9816,
+ "step": 494
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.8721020093136242,
+ "learning_rate": 1.977765887851664e-05,
+ "loss": 0.9347,
+ "step": 495
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.1173303456950077,
+ "learning_rate": 1.977635011548257e-05,
+ "loss": 1.0086,
+ "step": 496
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.1340305355028328,
+ "learning_rate": 1.9775037555426772e-05,
+ "loss": 1.0657,
+ "step": 497
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.8776348494635778,
+ "learning_rate": 1.9773721198859024e-05,
+ "loss": 0.9833,
+ "step": 498
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9597375630238096,
+ "learning_rate": 1.9772401046290584e-05,
+ "loss": 0.9231,
+ "step": 499
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.309040553408022,
+ "learning_rate": 1.9771077098234187e-05,
+ "loss": 1.0313,
+ "step": 500
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.1664832697368677,
+ "learning_rate": 1.9769749355204034e-05,
+ "loss": 0.9909,
+ "step": 501
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9732734163687559,
+ "learning_rate": 1.976841781771581e-05,
+ "loss": 0.9881,
+ "step": 502
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.251367057800584,
+ "learning_rate": 1.9767082486286667e-05,
+ "loss": 1.0196,
+ "step": 503
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.950723223553314,
+ "learning_rate": 1.9765743361435234e-05,
+ "loss": 0.9409,
+ "step": 504
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.0699038804523258,
+ "learning_rate": 1.9764400443681607e-05,
+ "loss": 1.0846,
+ "step": 505
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.0808870677516138,
+ "learning_rate": 1.9763053733547367e-05,
+ "loss": 1.0295,
+ "step": 506
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.8987687065357317,
+ "learning_rate": 1.976170323155555e-05,
+ "loss": 0.9478,
+ "step": 507
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.009587668356229,
+ "learning_rate": 1.976034893823069e-05,
+ "loss": 0.9822,
+ "step": 508
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.0760293799596983,
+ "learning_rate": 1.975899085409876e-05,
+ "loss": 1.0342,
+ "step": 509
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.979947021628255,
+ "learning_rate": 1.9757628979687247e-05,
+ "loss": 0.957,
+ "step": 510
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9102526524656575,
+ "learning_rate": 1.975626331552507e-05,
+ "loss": 0.9591,
+ "step": 511
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.0068945109167378,
+ "learning_rate": 1.9754893862142643e-05,
+ "loss": 0.9861,
+ "step": 512
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9908109026807387,
+ "learning_rate": 1.9753520620071846e-05,
+ "loss": 0.9733,
+ "step": 513
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9392793317686784,
+ "learning_rate": 1.9752143589846027e-05,
+ "loss": 0.9708,
+ "step": 514
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.1514636687669961,
+ "learning_rate": 1.9750762772000014e-05,
+ "loss": 0.9004,
+ "step": 515
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.0612799142292089,
+ "learning_rate": 1.9749378167070097e-05,
+ "loss": 1.0027,
+ "step": 516
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.0884952183522267,
+ "learning_rate": 1.9747989775594044e-05,
+ "loss": 0.9798,
+ "step": 517
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9909419382985257,
+ "learning_rate": 1.974659759811109e-05,
+ "loss": 1.0046,
+ "step": 518
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.1115561307544186,
+ "learning_rate": 1.9745201635161938e-05,
+ "loss": 1.0215,
+ "step": 519
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.2019653967298156,
+ "learning_rate": 1.9743801887288762e-05,
+ "loss": 0.972,
+ "step": 520
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9966881370550695,
+ "learning_rate": 1.9742398355035212e-05,
+ "loss": 0.9743,
+ "step": 521
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.1213143026660388,
+ "learning_rate": 1.9740991038946404e-05,
+ "loss": 1.1124,
+ "step": 522
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9883630166511729,
+ "learning_rate": 1.973957993956892e-05,
+ "loss": 0.9529,
+ "step": 523
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.0538838631766885,
+ "learning_rate": 1.9738165057450817e-05,
+ "loss": 0.9849,
+ "step": 524
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.0339361235832036,
+ "learning_rate": 1.9736746393141617e-05,
+ "loss": 0.8873,
+ "step": 525
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.1066944796273215,
+ "learning_rate": 1.9735323947192317e-05,
+ "loss": 0.9441,
+ "step": 526
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9596076853330495,
+ "learning_rate": 1.9733897720155377e-05,
+ "loss": 1.0429,
+ "step": 527
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.2700469172838509,
+ "learning_rate": 1.9732467712584723e-05,
+ "loss": 0.8791,
+ "step": 528
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9050792010677117,
+ "learning_rate": 1.973103392503576e-05,
+ "loss": 1.0016,
+ "step": 529
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.885318967539835,
+ "learning_rate": 1.9729596358065347e-05,
+ "loss": 0.9863,
+ "step": 530
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9803399653508227,
+ "learning_rate": 1.9728155012231825e-05,
+ "loss": 0.9765,
+ "step": 531
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.0469533529335453,
+ "learning_rate": 1.9726709888094994e-05,
+ "loss": 0.9708,
+ "step": 532
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9550877375109239,
+ "learning_rate": 1.972526098621612e-05,
+ "loss": 0.9091,
+ "step": 533
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.0055702317020512,
+ "learning_rate": 1.972380830715795e-05,
+ "loss": 1.0518,
+ "step": 534
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.8925291498978056,
+ "learning_rate": 1.9722351851484677e-05,
+ "loss": 0.9679,
+ "step": 535
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.845516568590393,
+ "learning_rate": 1.9720891619761974e-05,
+ "loss": 0.966,
+ "step": 536
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.8409431262046926,
+ "learning_rate": 1.9719427612556982e-05,
+ "loss": 0.9629,
+ "step": 537
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.0872103956772339,
+ "learning_rate": 1.9717959830438302e-05,
+ "loss": 0.9911,
+ "step": 538
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9873600073143931,
+ "learning_rate": 1.9716488273976006e-05,
+ "loss": 0.9889,
+ "step": 539
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9582318693838419,
+ "learning_rate": 1.971501294374162e-05,
+ "loss": 1.034,
+ "step": 540
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.0505492025517522,
+ "learning_rate": 1.971353384030816e-05,
+ "loss": 0.9326,
+ "step": 541
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.080752233944646,
+ "learning_rate": 1.9712050964250083e-05,
+ "loss": 1.0414,
+ "step": 542
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.0021047241613121,
+ "learning_rate": 1.9710564316143323e-05,
+ "loss": 0.9727,
+ "step": 543
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 1.0365183591134215,
+ "learning_rate": 1.9709073896565276e-05,
+ "loss": 1.0163,
+ "step": 544
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.9263413124443935,
+ "learning_rate": 1.9707579706094807e-05,
+ "loss": 0.9942,
+ "step": 545
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9415012128457283,
+ "learning_rate": 1.970608174531224e-05,
+ "loss": 0.9592,
+ "step": 546
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.085023024342262,
+ "learning_rate": 1.970458001479937e-05,
+ "loss": 1.0216,
+ "step": 547
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.0777088572387146,
+ "learning_rate": 1.9703074515139445e-05,
+ "loss": 0.9411,
+ "step": 548
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.8954427184820994,
+ "learning_rate": 1.9701565246917184e-05,
+ "loss": 0.9779,
+ "step": 549
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9419731262583303,
+ "learning_rate": 1.9700052210718775e-05,
+ "loss": 0.9901,
+ "step": 550
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.8817663228638201,
+ "learning_rate": 1.969853540713186e-05,
+ "loss": 0.9236,
+ "step": 551
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.0954242523958229,
+ "learning_rate": 1.9697014836745552e-05,
+ "loss": 1.0373,
+ "step": 552
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.955612651878377,
+ "learning_rate": 1.9695490500150418e-05,
+ "loss": 1.0164,
+ "step": 553
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.0524488036996646,
+ "learning_rate": 1.9693962397938495e-05,
+ "loss": 0.961,
+ "step": 554
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9236957184737671,
+ "learning_rate": 1.9692430530703282e-05,
+ "loss": 1.0028,
+ "step": 555
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9901537987037,
+ "learning_rate": 1.9690894899039735e-05,
+ "loss": 1.0905,
+ "step": 556
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9930842769836845,
+ "learning_rate": 1.9689355503544277e-05,
+ "loss": 0.9376,
+ "step": 557
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.0058896090306109,
+ "learning_rate": 1.968781234481479e-05,
+ "loss": 1.0343,
+ "step": 558
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.8966508232440634,
+ "learning_rate": 1.9686265423450624e-05,
+ "loss": 0.9702,
+ "step": 559
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.0279038868521404,
+ "learning_rate": 1.9684714740052584e-05,
+ "loss": 0.843,
+ "step": 560
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9799864758819165,
+ "learning_rate": 1.9683160295222934e-05,
+ "loss": 0.9933,
+ "step": 561
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.8452950137389139,
+ "learning_rate": 1.9681602089565403e-05,
+ "loss": 0.8928,
+ "step": 562
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.0685671006382729,
+ "learning_rate": 1.968004012368518e-05,
+ "loss": 1.0005,
+ "step": 563
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.0725039177597746,
+ "learning_rate": 1.967847439818892e-05,
+ "loss": 1.0575,
+ "step": 564
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.062041235984927,
+ "learning_rate": 1.9676904913684725e-05,
+ "loss": 0.996,
+ "step": 565
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9523922622707521,
+ "learning_rate": 1.967533167078217e-05,
+ "loss": 0.9757,
+ "step": 566
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.7964819810821044,
+ "learning_rate": 1.9673754670092283e-05,
+ "loss": 0.8787,
+ "step": 567
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.1164433094758257,
+ "learning_rate": 1.9672173912227556e-05,
+ "loss": 1.0268,
+ "step": 568
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.0349146489037746,
+ "learning_rate": 1.967058939780193e-05,
+ "loss": 0.9208,
+ "step": 569
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9878509364700656,
+ "learning_rate": 1.966900112743082e-05,
+ "loss": 1.0082,
+ "step": 570
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.126424620877137,
+ "learning_rate": 1.966740910173108e-05,
+ "loss": 0.996,
+ "step": 571
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.0986175315641928,
+ "learning_rate": 1.9665813321321054e-05,
+ "loss": 0.9665,
+ "step": 572
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.0764478104278572,
+ "learning_rate": 1.9664213786820502e-05,
+ "loss": 1.0315,
+ "step": 573
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.8998641964862616,
+ "learning_rate": 1.9662610498850684e-05,
+ "loss": 0.9363,
+ "step": 574
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9679128404985713,
+ "learning_rate": 1.9661003458034288e-05,
+ "loss": 0.986,
+ "step": 575
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9776597248647867,
+ "learning_rate": 1.965939266499547e-05,
+ "loss": 0.9886,
+ "step": 576
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9991039283783492,
+ "learning_rate": 1.9657778120359848e-05,
+ "loss": 0.9933,
+ "step": 577
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9553531726543301,
+ "learning_rate": 1.965615982475449e-05,
+ "loss": 0.9762,
+ "step": 578
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9797491811369795,
+ "learning_rate": 1.9654537778807924e-05,
+ "loss": 1.0457,
+ "step": 579
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.0295952381693567,
+ "learning_rate": 1.9652911983150135e-05,
+ "loss": 0.9814,
+ "step": 580
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.3381153154690866,
+ "learning_rate": 1.965128243841256e-05,
+ "loss": 0.8935,
+ "step": 581
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.0032594519610416,
+ "learning_rate": 1.96496491452281e-05,
+ "loss": 0.9598,
+ "step": 582
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9816592441190367,
+ "learning_rate": 1.9648012104231106e-05,
+ "loss": 1.0833,
+ "step": 583
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.0541040477843548,
+ "learning_rate": 1.964637131605738e-05,
+ "loss": 1.0731,
+ "step": 584
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.0283305630893005,
+ "learning_rate": 1.9644726781344197e-05,
+ "loss": 0.9996,
+ "step": 585
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.165472800453247,
+ "learning_rate": 1.964307850073026e-05,
+ "loss": 1.0854,
+ "step": 586
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.021180337630849,
+ "learning_rate": 1.964142647485576e-05,
+ "loss": 1.0666,
+ "step": 587
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.1125142366457403,
+ "learning_rate": 1.9639770704362305e-05,
+ "loss": 1.049,
+ "step": 588
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9844476170662394,
+ "learning_rate": 1.9638111189892994e-05,
+ "loss": 0.9533,
+ "step": 589
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9812726625726741,
+ "learning_rate": 1.9636447932092354e-05,
+ "loss": 1.0329,
+ "step": 590
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9616451497505701,
+ "learning_rate": 1.963478093160638e-05,
+ "loss": 0.9243,
+ "step": 591
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.1285986493634241,
+ "learning_rate": 1.9633110189082515e-05,
+ "loss": 0.945,
+ "step": 592
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.9294359108155703,
+ "learning_rate": 1.963143570516965e-05,
+ "loss": 0.9501,
+ "step": 593
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.1903078262896956,
+ "learning_rate": 1.9629757480518144e-05,
+ "loss": 1.0148,
+ "step": 594
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.980479406825127,
+ "learning_rate": 1.9628075515779796e-05,
+ "loss": 0.9456,
+ "step": 595
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.1164312675034156,
+ "learning_rate": 1.962638981160786e-05,
+ "loss": 1.0495,
+ "step": 596
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 1.048075933467679,
+ "learning_rate": 1.9624700368657045e-05,
+ "loss": 1.019,
+ "step": 597
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9192475461396691,
+ "learning_rate": 1.9623007187583518e-05,
+ "loss": 0.9797,
+ "step": 598
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9640226308845612,
+ "learning_rate": 1.962131026904488e-05,
+ "loss": 0.9873,
+ "step": 599
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.118639485931765,
+ "learning_rate": 1.96196096137002e-05,
+ "loss": 0.9998,
+ "step": 600
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.0617218463844134,
+ "learning_rate": 1.9617905222209998e-05,
+ "loss": 0.9422,
+ "step": 601
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.077601678319198,
+ "learning_rate": 1.961619709523623e-05,
+ "loss": 1.0219,
+ "step": 602
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.0317162756461955,
+ "learning_rate": 1.9614485233442316e-05,
+ "loss": 0.9653,
+ "step": 603
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9886237905965549,
+ "learning_rate": 1.961276963749313e-05,
+ "loss": 0.9958,
+ "step": 604
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.8621674625037563,
+ "learning_rate": 1.9611050308054982e-05,
+ "loss": 0.9172,
+ "step": 605
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.0538077678947617,
+ "learning_rate": 1.9609327245795642e-05,
+ "loss": 0.9863,
+ "step": 606
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.0845798248830119,
+ "learning_rate": 1.9607600451384327e-05,
+ "loss": 1.0658,
+ "step": 607
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.1150062798769047,
+ "learning_rate": 1.960586992549171e-05,
+ "loss": 0.9652,
+ "step": 608
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9016377809012965,
+ "learning_rate": 1.9604135668789897e-05,
+ "loss": 1.0217,
+ "step": 609
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.0451722451202368,
+ "learning_rate": 1.9602397681952462e-05,
+ "loss": 1.0304,
+ "step": 610
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.031425030441544,
+ "learning_rate": 1.9600655965654413e-05,
+ "loss": 1.0239,
+ "step": 611
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.1544689229364578,
+ "learning_rate": 1.959891052057222e-05,
+ "loss": 1.0324,
+ "step": 612
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.057258594257786,
+ "learning_rate": 1.9597161347383783e-05,
+ "loss": 0.9878,
+ "step": 613
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.0497997727389952,
+ "learning_rate": 1.959540844676847e-05,
+ "loss": 1.0141,
+ "step": 614
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9474310922138933,
+ "learning_rate": 1.9593651819407084e-05,
+ "loss": 1.0073,
+ "step": 615
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.021159403915022,
+ "learning_rate": 1.959189146598188e-05,
+ "loss": 1.084,
+ "step": 616
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.0154972835163243,
+ "learning_rate": 1.9590127387176556e-05,
+ "loss": 0.9819,
+ "step": 617
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.0302369938411624,
+ "learning_rate": 1.9588359583676263e-05,
+ "loss": 1.0083,
+ "step": 618
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.8616437948503519,
+ "learning_rate": 1.9586588056167595e-05,
+ "loss": 0.974,
+ "step": 619
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.1515168257775552,
+ "learning_rate": 1.958481280533859e-05,
+ "loss": 0.9448,
+ "step": 620
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.912537750499609,
+ "learning_rate": 1.958303383187874e-05,
+ "loss": 0.947,
+ "step": 621
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9996092764555125,
+ "learning_rate": 1.9581251136478974e-05,
+ "loss": 0.9699,
+ "step": 622
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9770734794835924,
+ "learning_rate": 1.9579464719831668e-05,
+ "loss": 0.9872,
+ "step": 623
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.028670285835641,
+ "learning_rate": 1.9577674582630653e-05,
+ "loss": 1.0365,
+ "step": 624
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9143019763944704,
+ "learning_rate": 1.957588072557119e-05,
+ "loss": 0.9081,
+ "step": 625
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.0167790399065086,
+ "learning_rate": 1.957408314935e-05,
+ "loss": 1.0287,
+ "step": 626
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.1155834478432463,
+ "learning_rate": 1.9572281854665233e-05,
+ "loss": 1.0148,
+ "step": 627
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.0183895668497078,
+ "learning_rate": 1.95704768422165e-05,
+ "loss": 1.0,
+ "step": 628
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9107872923916688,
+ "learning_rate": 1.956866811270484e-05,
+ "loss": 0.9703,
+ "step": 629
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.1143357699318515,
+ "learning_rate": 1.9566855666832743e-05,
+ "loss": 0.9656,
+ "step": 630
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.069422848971197,
+ "learning_rate": 1.9565039505304145e-05,
+ "loss": 0.9959,
+ "step": 631
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9567954135329023,
+ "learning_rate": 1.956321962882442e-05,
+ "loss": 0.9908,
+ "step": 632
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.1212128831786514,
+ "learning_rate": 1.956139603810039e-05,
+ "loss": 0.9949,
+ "step": 633
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.1504412617249653,
+ "learning_rate": 1.9559568733840317e-05,
+ "loss": 1.0192,
+ "step": 634
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9589081642164562,
+ "learning_rate": 1.9557737716753898e-05,
+ "loss": 1.0319,
+ "step": 635
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.8249215086553999,
+ "learning_rate": 1.9555902987552283e-05,
+ "loss": 0.8644,
+ "step": 636
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9906162133808685,
+ "learning_rate": 1.9554064546948064e-05,
+ "loss": 1.0065,
+ "step": 637
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.994494951635327,
+ "learning_rate": 1.9552222395655262e-05,
+ "loss": 0.9595,
+ "step": 638
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.0287369197090521,
+ "learning_rate": 1.9550376534389355e-05,
+ "loss": 0.9892,
+ "step": 639
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.859541536851516,
+ "learning_rate": 1.9548526963867253e-05,
+ "loss": 0.8776,
+ "step": 640
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.8642559431346785,
+ "learning_rate": 1.9546673684807303e-05,
+ "loss": 1.01,
+ "step": 641
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9788548014720811,
+ "learning_rate": 1.95448166979293e-05,
+ "loss": 0.9524,
+ "step": 642
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.033542662447903,
+ "learning_rate": 1.9542956003954477e-05,
+ "loss": 0.9357,
+ "step": 643
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.8718690983137145,
+ "learning_rate": 1.9541091603605508e-05,
+ "loss": 0.8491,
+ "step": 644
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9375592920806041,
+ "learning_rate": 1.95392234976065e-05,
+ "loss": 0.9115,
+ "step": 645
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.0320072811824508,
+ "learning_rate": 1.9537351686683003e-05,
+ "loss": 1.0233,
+ "step": 646
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9946938867087536,
+ "learning_rate": 1.9535476171562014e-05,
+ "loss": 0.986,
+ "step": 647
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 1.026914493281081,
+ "learning_rate": 1.9533596952971955e-05,
+ "loss": 1.0278,
+ "step": 648
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.9583004175067096,
+ "learning_rate": 1.9531714031642698e-05,
+ "loss": 0.954,
+ "step": 649
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.0515715138309774,
+ "learning_rate": 1.9529827408305542e-05,
+ "loss": 0.9719,
+ "step": 650
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.005029351853862,
+ "learning_rate": 1.9527937083693233e-05,
+ "loss": 0.9243,
+ "step": 651
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.8932520359057077,
+ "learning_rate": 1.952604305853995e-05,
+ "loss": 0.9068,
+ "step": 652
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9695248569316938,
+ "learning_rate": 1.9524145333581315e-05,
+ "loss": 0.9584,
+ "step": 653
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.970978214225334,
+ "learning_rate": 1.9522243909554375e-05,
+ "loss": 0.9821,
+ "step": 654
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.0971857329654915,
+ "learning_rate": 1.952033878719763e-05,
+ "loss": 0.9564,
+ "step": 655
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.150321699726554,
+ "learning_rate": 1.9518429967251e-05,
+ "loss": 1.0102,
+ "step": 656
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.8737491584881734,
+ "learning_rate": 1.9516517450455853e-05,
+ "loss": 0.9647,
+ "step": 657
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9897709546027356,
+ "learning_rate": 1.951460123755499e-05,
+ "loss": 0.9557,
+ "step": 658
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.037634026058284,
+ "learning_rate": 1.9512681329292635e-05,
+ "loss": 1.037,
+ "step": 659
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9681029294161705,
+ "learning_rate": 1.951075772641447e-05,
+ "loss": 0.975,
+ "step": 660
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9516199687205524,
+ "learning_rate": 1.95088304296676e-05,
+ "loss": 0.9433,
+ "step": 661
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.024309750310836,
+ "learning_rate": 1.950689943980056e-05,
+ "loss": 1.0857,
+ "step": 662
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.226082072960153,
+ "learning_rate": 1.9504964757563322e-05,
+ "loss": 0.9702,
+ "step": 663
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.003867245307899,
+ "learning_rate": 1.95030263837073e-05,
+ "loss": 0.9951,
+ "step": 664
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9406162890873699,
+ "learning_rate": 1.9501084318985335e-05,
+ "loss": 1.0059,
+ "step": 665
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.1470288672543922,
+ "learning_rate": 1.94991385641517e-05,
+ "loss": 0.9457,
+ "step": 666
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.244537358005595,
+ "learning_rate": 1.9497189119962105e-05,
+ "loss": 1.0986,
+ "step": 667
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.0184080275706304,
+ "learning_rate": 1.9495235987173693e-05,
+ "loss": 0.9256,
+ "step": 668
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9521024969217489,
+ "learning_rate": 1.949327916654504e-05,
+ "loss": 0.9937,
+ "step": 669
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9984073794997418,
+ "learning_rate": 1.949131865883614e-05,
+ "loss": 0.9852,
+ "step": 670
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.8725748012597938,
+ "learning_rate": 1.948935446480845e-05,
+ "loss": 0.9448,
+ "step": 671
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.2651416339727257,
+ "learning_rate": 1.948738658522483e-05,
+ "loss": 1.0173,
+ "step": 672
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9569990583280045,
+ "learning_rate": 1.9485415020849583e-05,
+ "loss": 0.9413,
+ "step": 673
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.0091673046357896,
+ "learning_rate": 1.9483439772448444e-05,
+ "loss": 0.9742,
+ "step": 674
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.0912502810880946,
+ "learning_rate": 1.9481460840788573e-05,
+ "loss": 0.9711,
+ "step": 675
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9894366562807843,
+ "learning_rate": 1.9479478226638565e-05,
+ "loss": 0.8987,
+ "step": 676
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.2375743349343702,
+ "learning_rate": 1.947749193076845e-05,
+ "loss": 0.953,
+ "step": 677
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.1818693686390342,
+ "learning_rate": 1.9475501953949674e-05,
+ "loss": 0.9888,
+ "step": 678
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.0175580442144072,
+ "learning_rate": 1.9473508296955126e-05,
+ "loss": 0.9699,
+ "step": 679
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.0133467735882349,
+ "learning_rate": 1.9471510960559122e-05,
+ "loss": 1.0137,
+ "step": 680
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9750925206262613,
+ "learning_rate": 1.9469509945537395e-05,
+ "loss": 0.9695,
+ "step": 681
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.1890758658396041,
+ "learning_rate": 1.9467505252667126e-05,
+ "loss": 1.0032,
+ "step": 682
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.0248792917422507,
+ "learning_rate": 1.9465496882726913e-05,
+ "loss": 0.9401,
+ "step": 683
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.8310457998466182,
+ "learning_rate": 1.946348483649678e-05,
+ "loss": 0.9213,
+ "step": 684
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.881725876694822,
+ "learning_rate": 1.9461469114758184e-05,
+ "loss": 0.9014,
+ "step": 685
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9050813569396872,
+ "learning_rate": 1.9459449718294008e-05,
+ "loss": 0.9357,
+ "step": 686
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.2913300128537322,
+ "learning_rate": 1.945742664788856e-05,
+ "loss": 1.0287,
+ "step": 687
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9569365072551512,
+ "learning_rate": 1.9455399904327585e-05,
+ "loss": 1.005,
+ "step": 688
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.0316515778697148,
+ "learning_rate": 1.945336948839824e-05,
+ "loss": 0.9622,
+ "step": 689
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9699361546794562,
+ "learning_rate": 1.9451335400889114e-05,
+ "loss": 1.0499,
+ "step": 690
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.8506759232989879,
+ "learning_rate": 1.944929764259023e-05,
+ "loss": 0.8996,
+ "step": 691
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9500681568094445,
+ "learning_rate": 1.9447256214293026e-05,
+ "loss": 0.969,
+ "step": 692
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.986962190625067,
+ "learning_rate": 1.9445211116790365e-05,
+ "loss": 1.0111,
+ "step": 693
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9798509690880004,
+ "learning_rate": 1.9443162350876544e-05,
+ "loss": 0.9696,
+ "step": 694
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.1836872602890092,
+ "learning_rate": 1.944110991734728e-05,
+ "loss": 0.9689,
+ "step": 695
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.0403191883207907,
+ "learning_rate": 1.9439053816999717e-05,
+ "loss": 0.9156,
+ "step": 696
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9375830839514839,
+ "learning_rate": 1.9436994050632415e-05,
+ "loss": 0.9596,
+ "step": 697
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.9713611354979523,
+ "learning_rate": 1.9434930619045367e-05,
+ "loss": 1.0016,
+ "step": 698
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.0222670176295763,
+ "learning_rate": 1.9432863523039986e-05,
+ "loss": 1.0191,
+ "step": 699
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.0016663007872235,
+ "learning_rate": 1.9430792763419105e-05,
+ "loss": 0.9667,
+ "step": 700
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 1.0336727978333773,
+ "learning_rate": 1.942871834098699e-05,
+ "loss": 1.0496,
+ "step": 701
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.8976968260939017,
+ "learning_rate": 1.9426640256549313e-05,
+ "loss": 0.9628,
+ "step": 702
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.8218270591072413,
+ "learning_rate": 1.9424558510913186e-05,
+ "loss": 0.8697,
+ "step": 703
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0184264262300544,
+ "learning_rate": 1.9422473104887133e-05,
+ "loss": 0.9943,
+ "step": 704
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0086518902498114,
+ "learning_rate": 1.9420384039281103e-05,
+ "loss": 0.9802,
+ "step": 705
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.020839670513961,
+ "learning_rate": 1.941829131490646e-05,
+ "loss": 0.9747,
+ "step": 706
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9388421502196214,
+ "learning_rate": 1.9416194932576e-05,
+ "loss": 0.9456,
+ "step": 707
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0485208531205585,
+ "learning_rate": 1.941409489310393e-05,
+ "loss": 0.9839,
+ "step": 708
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.909534274569543,
+ "learning_rate": 1.9411991197305878e-05,
+ "loss": 0.897,
+ "step": 709
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0205339097605757,
+ "learning_rate": 1.9409883845998905e-05,
+ "loss": 1.0331,
+ "step": 710
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9071340678012022,
+ "learning_rate": 1.9407772840001473e-05,
+ "loss": 0.8875,
+ "step": 711
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.92489406786702,
+ "learning_rate": 1.9405658180133477e-05,
+ "loss": 0.8978,
+ "step": 712
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9015811878458317,
+ "learning_rate": 1.9403539867216226e-05,
+ "loss": 0.9602,
+ "step": 713
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.1509869566858648,
+ "learning_rate": 1.9401417902072447e-05,
+ "loss": 0.9402,
+ "step": 714
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9630806118398794,
+ "learning_rate": 1.9399292285526286e-05,
+ "loss": 0.9199,
+ "step": 715
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0772982326322353,
+ "learning_rate": 1.939716301840331e-05,
+ "loss": 0.894,
+ "step": 716
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0518544320060759,
+ "learning_rate": 1.9395030101530504e-05,
+ "loss": 0.9789,
+ "step": 717
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.1599584115369084,
+ "learning_rate": 1.939289353573626e-05,
+ "loss": 1.0222,
+ "step": 718
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.1108159536370812,
+ "learning_rate": 1.9390753321850404e-05,
+ "loss": 0.9887,
+ "step": 719
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9830021628019077,
+ "learning_rate": 1.938860946070417e-05,
+ "loss": 0.9177,
+ "step": 720
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.310911248032901,
+ "learning_rate": 1.93864619531302e-05,
+ "loss": 0.9587,
+ "step": 721
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.8939556278909199,
+ "learning_rate": 1.9384310799962575e-05,
+ "loss": 0.8889,
+ "step": 722
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9591305508666739,
+ "learning_rate": 1.9382156002036764e-05,
+ "loss": 0.9946,
+ "step": 723
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0710300642782549,
+ "learning_rate": 1.9379997560189677e-05,
+ "loss": 1.0379,
+ "step": 724
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9746030258304443,
+ "learning_rate": 1.937783547525962e-05,
+ "loss": 0.9298,
+ "step": 725
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.15598408014875,
+ "learning_rate": 1.9375669748086326e-05,
+ "loss": 1.0194,
+ "step": 726
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9412306944896369,
+ "learning_rate": 1.937350037951094e-05,
+ "loss": 0.9599,
+ "step": 727
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0101795567883216,
+ "learning_rate": 1.9371327370376018e-05,
+ "loss": 1.0061,
+ "step": 728
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.8893756653729351,
+ "learning_rate": 1.936915072152553e-05,
+ "loss": 0.8683,
+ "step": 729
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.100881206486699,
+ "learning_rate": 1.936697043380486e-05,
+ "loss": 1.0098,
+ "step": 730
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0171269619725332,
+ "learning_rate": 1.936478650806081e-05,
+ "loss": 1.0467,
+ "step": 731
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0169124233342686,
+ "learning_rate": 1.936259894514159e-05,
+ "loss": 0.9184,
+ "step": 732
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.900877308262594,
+ "learning_rate": 1.9360407745896828e-05,
+ "loss": 0.9553,
+ "step": 733
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.8684162437660856,
+ "learning_rate": 1.9358212911177556e-05,
+ "loss": 0.9644,
+ "step": 734
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0240095551783224,
+ "learning_rate": 1.935601444183622e-05,
+ "loss": 0.9792,
+ "step": 735
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.085694430569257,
+ "learning_rate": 1.935381233872669e-05,
+ "loss": 0.9762,
+ "step": 736
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0056031342843705,
+ "learning_rate": 1.935160660270423e-05,
+ "loss": 1.0149,
+ "step": 737
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0601776270144836,
+ "learning_rate": 1.934939723462552e-05,
+ "loss": 0.9448,
+ "step": 738
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9568548976676675,
+ "learning_rate": 1.9347184235348663e-05,
+ "loss": 0.9735,
+ "step": 739
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9920128463040526,
+ "learning_rate": 1.9344967605733154e-05,
+ "loss": 0.9506,
+ "step": 740
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.863639412832335,
+ "learning_rate": 1.934274734663991e-05,
+ "loss": 0.9306,
+ "step": 741
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9496830263021463,
+ "learning_rate": 1.934052345893125e-05,
+ "loss": 0.9641,
+ "step": 742
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.010783206181086,
+ "learning_rate": 1.9338295943470915e-05,
+ "loss": 0.9932,
+ "step": 743
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9552038184149039,
+ "learning_rate": 1.9336064801124034e-05,
+ "loss": 1.0013,
+ "step": 744
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.899378048924427,
+ "learning_rate": 1.933383003275717e-05,
+ "loss": 0.9246,
+ "step": 745
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9170206504026146,
+ "learning_rate": 1.933159163923827e-05,
+ "loss": 0.9364,
+ "step": 746
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.8780328136560523,
+ "learning_rate": 1.9329349621436708e-05,
+ "loss": 0.9373,
+ "step": 747
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0103180987298908,
+ "learning_rate": 1.9327103980223255e-05,
+ "loss": 0.9843,
+ "step": 748
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.7846608277324099,
+ "learning_rate": 1.932485471647009e-05,
+ "loss": 0.8487,
+ "step": 749
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9682004254409378,
+ "learning_rate": 1.9322601831050804e-05,
+ "loss": 1.0076,
+ "step": 750
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.9595767627175493,
+ "learning_rate": 1.9320345324840396e-05,
+ "loss": 0.9891,
+ "step": 751
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.8101944582398378,
+ "learning_rate": 1.9318085198715257e-05,
+ "loss": 0.8918,
+ "step": 752
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 1.0538486654973627,
+ "learning_rate": 1.93158214535532e-05,
+ "loss": 0.9233,
+ "step": 753
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.0992956832039944,
+ "learning_rate": 1.9313554090233436e-05,
+ "loss": 0.9717,
+ "step": 754
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.7802381711934618,
+ "learning_rate": 1.9311283109636586e-05,
+ "loss": 0.8638,
+ "step": 755
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9880307162959113,
+ "learning_rate": 1.9309008512644668e-05,
+ "loss": 1.0436,
+ "step": 756
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9017787755132864,
+ "learning_rate": 1.930673030014111e-05,
+ "loss": 0.9658,
+ "step": 757
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.938643330038015,
+ "learning_rate": 1.930444847301075e-05,
+ "loss": 0.9487,
+ "step": 758
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9406237262171853,
+ "learning_rate": 1.9302163032139813e-05,
+ "loss": 0.9316,
+ "step": 759
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.0005251727323674,
+ "learning_rate": 1.9299873978415947e-05,
+ "loss": 1.0257,
+ "step": 760
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9071087095973415,
+ "learning_rate": 1.9297581312728187e-05,
+ "loss": 0.8911,
+ "step": 761
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.0070742314486858,
+ "learning_rate": 1.929528503596698e-05,
+ "loss": 0.986,
+ "step": 762
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.0345804470898885,
+ "learning_rate": 1.929298514902418e-05,
+ "loss": 1.0488,
+ "step": 763
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.0532405838039678,
+ "learning_rate": 1.929068165279303e-05,
+ "loss": 0.9029,
+ "step": 764
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.0435070968348559,
+ "learning_rate": 1.928837454816818e-05,
+ "loss": 0.9503,
+ "step": 765
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.8484725704835063,
+ "learning_rate": 1.9286063836045687e-05,
+ "loss": 0.8668,
+ "step": 766
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.8825696372712778,
+ "learning_rate": 1.9283749517323e-05,
+ "loss": 0.9284,
+ "step": 767
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.4106660348079156,
+ "learning_rate": 1.928143159289898e-05,
+ "loss": 0.8946,
+ "step": 768
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.8380573985935399,
+ "learning_rate": 1.927911006367388e-05,
+ "loss": 0.9591,
+ "step": 769
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9869997692007562,
+ "learning_rate": 1.927678493054935e-05,
+ "loss": 1.0069,
+ "step": 770
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.0001381100244082,
+ "learning_rate": 1.9274456194428454e-05,
+ "loss": 0.9832,
+ "step": 771
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9346411066835593,
+ "learning_rate": 1.9272123856215643e-05,
+ "loss": 0.8938,
+ "step": 772
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.826372309279838,
+ "learning_rate": 1.9269787916816764e-05,
+ "loss": 0.8649,
+ "step": 773
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9583486423185323,
+ "learning_rate": 1.9267448377139074e-05,
+ "loss": 1.0063,
+ "step": 774
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9155429157204348,
+ "learning_rate": 1.9265105238091227e-05,
+ "loss": 0.925,
+ "step": 775
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.100953437202969,
+ "learning_rate": 1.9262758500583265e-05,
+ "loss": 0.9781,
+ "step": 776
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9910767356335862,
+ "learning_rate": 1.9260408165526638e-05,
+ "loss": 0.9383,
+ "step": 777
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9466330830022603,
+ "learning_rate": 1.9258054233834184e-05,
+ "loss": 0.9991,
+ "step": 778
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.8974827937836365,
+ "learning_rate": 1.9255696706420147e-05,
+ "loss": 0.8803,
+ "step": 779
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.1879861898674295,
+ "learning_rate": 1.9253335584200165e-05,
+ "loss": 1.0229,
+ "step": 780
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.8632606726670962,
+ "learning_rate": 1.9250970868091268e-05,
+ "loss": 0.8801,
+ "step": 781
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.1017728120330221,
+ "learning_rate": 1.924860255901188e-05,
+ "loss": 1.0249,
+ "step": 782
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.8668625088114832,
+ "learning_rate": 1.9246230657881834e-05,
+ "loss": 0.9014,
+ "step": 783
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9298695696032182,
+ "learning_rate": 1.9243855165622345e-05,
+ "loss": 0.9866,
+ "step": 784
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.023055658401976,
+ "learning_rate": 1.9241476083156026e-05,
+ "loss": 1.0535,
+ "step": 785
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9742522622235574,
+ "learning_rate": 1.9239093411406885e-05,
+ "loss": 1.0512,
+ "step": 786
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.8059558327082518,
+ "learning_rate": 1.9236707151300326e-05,
+ "loss": 0.9789,
+ "step": 787
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.7563506496362821,
+ "learning_rate": 1.9234317303763145e-05,
+ "loss": 0.8526,
+ "step": 788
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9581062257110713,
+ "learning_rate": 1.9231923869723528e-05,
+ "loss": 0.9279,
+ "step": 789
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.0320384960403879,
+ "learning_rate": 1.922952685011106e-05,
+ "loss": 0.9907,
+ "step": 790
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.0243556166887016,
+ "learning_rate": 1.9227126245856716e-05,
+ "loss": 1.0331,
+ "step": 791
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9100755913648656,
+ "learning_rate": 1.922472205789286e-05,
+ "loss": 0.8855,
+ "step": 792
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.8853085856585948,
+ "learning_rate": 1.9222314287153255e-05,
+ "loss": 0.8872,
+ "step": 793
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9183764196710127,
+ "learning_rate": 1.9219902934573048e-05,
+ "loss": 0.9303,
+ "step": 794
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9601180616455742,
+ "learning_rate": 1.9217488001088784e-05,
+ "loss": 0.9848,
+ "step": 795
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.8006087133278984,
+ "learning_rate": 1.9215069487638396e-05,
+ "loss": 0.9199,
+ "step": 796
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9096110010507297,
+ "learning_rate": 1.92126473951612e-05,
+ "loss": 0.91,
+ "step": 797
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.1783963202706182,
+ "learning_rate": 1.921022172459791e-05,
+ "loss": 0.9924,
+ "step": 798
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.0558096188074213,
+ "learning_rate": 1.920779247689064e-05,
+ "loss": 1.0305,
+ "step": 799
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.0307054410548946,
+ "learning_rate": 1.9205359652982868e-05,
+ "loss": 0.9389,
+ "step": 800
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9140877921424031,
+ "learning_rate": 1.9202923253819482e-05,
+ "loss": 0.9895,
+ "step": 801
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.0093026349498095,
+ "learning_rate": 1.920048328034675e-05,
+ "loss": 1.0162,
+ "step": 802
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9471943682238223,
+ "learning_rate": 1.9198039733512326e-05,
+ "loss": 0.8681,
+ "step": 803
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 1.1060929804311044,
+ "learning_rate": 1.9195592614265262e-05,
+ "loss": 1.0339,
+ "step": 804
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.9229585676372616,
+ "learning_rate": 1.9193141923555984e-05,
+ "loss": 0.9157,
+ "step": 805
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9515766351316286,
+ "learning_rate": 1.919068766233632e-05,
+ "loss": 0.95,
+ "step": 806
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.8835516838717262,
+ "learning_rate": 1.9188229831559468e-05,
+ "loss": 0.8409,
+ "step": 807
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.8486474945951469,
+ "learning_rate": 1.9185768432180026e-05,
+ "loss": 0.9541,
+ "step": 808
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.156521123032195,
+ "learning_rate": 1.9183303465153972e-05,
+ "loss": 1.008,
+ "step": 809
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.2124822154719448,
+ "learning_rate": 1.9180834931438673e-05,
+ "loss": 0.9374,
+ "step": 810
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.0578764771979732,
+ "learning_rate": 1.917836283199288e-05,
+ "loss": 0.9981,
+ "step": 811
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.0412671472392887,
+ "learning_rate": 1.917588716777672e-05,
+ "loss": 0.9376,
+ "step": 812
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9389118910154383,
+ "learning_rate": 1.917340793975172e-05,
+ "loss": 0.9755,
+ "step": 813
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.916400094928299,
+ "learning_rate": 1.917092514888078e-05,
+ "loss": 0.963,
+ "step": 814
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9537763841864934,
+ "learning_rate": 1.9168438796128193e-05,
+ "loss": 0.942,
+ "step": 815
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.0222942470093788,
+ "learning_rate": 1.9165948882459623e-05,
+ "loss": 1.0059,
+ "step": 816
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9531671291708219,
+ "learning_rate": 1.9163455408842123e-05,
+ "loss": 0.9337,
+ "step": 817
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9673206995827385,
+ "learning_rate": 1.9160958376244138e-05,
+ "loss": 0.9792,
+ "step": 818
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.076755346471494,
+ "learning_rate": 1.9158457785635478e-05,
+ "loss": 0.9649,
+ "step": 819
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.007573871583606,
+ "learning_rate": 1.915595363798735e-05,
+ "loss": 0.9941,
+ "step": 820
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.0196743835900524,
+ "learning_rate": 1.915344593427233e-05,
+ "loss": 0.9957,
+ "step": 821
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9186323059236925,
+ "learning_rate": 1.9150934675464384e-05,
+ "loss": 0.9715,
+ "step": 822
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.0582302569593054,
+ "learning_rate": 1.9148419862538858e-05,
+ "loss": 1.0031,
+ "step": 823
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.008376705645545,
+ "learning_rate": 1.9145901496472474e-05,
+ "loss": 1.0022,
+ "step": 824
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.020725588513926,
+ "learning_rate": 1.9143379578243335e-05,
+ "loss": 0.9596,
+ "step": 825
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.8751660256262519,
+ "learning_rate": 1.914085410883093e-05,
+ "loss": 0.8788,
+ "step": 826
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.8339919915175331,
+ "learning_rate": 1.9138325089216118e-05,
+ "loss": 0.9288,
+ "step": 827
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.0534247762404043,
+ "learning_rate": 1.913579252038114e-05,
+ "loss": 1.0412,
+ "step": 828
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9329756986366945,
+ "learning_rate": 1.9133256403309627e-05,
+ "loss": 0.9791,
+ "step": 829
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.1974038402905118,
+ "learning_rate": 1.913071673898656e-05,
+ "loss": 0.9471,
+ "step": 830
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9492837367064159,
+ "learning_rate": 1.912817352839833e-05,
+ "loss": 0.9876,
+ "step": 831
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.8964769346472791,
+ "learning_rate": 1.9125626772532683e-05,
+ "loss": 0.9526,
+ "step": 832
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9842887483002896,
+ "learning_rate": 1.9123076472378753e-05,
+ "loss": 0.9654,
+ "step": 833
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.0449871301329061,
+ "learning_rate": 1.9120522628927047e-05,
+ "loss": 0.9937,
+ "step": 834
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.0956586607327587,
+ "learning_rate": 1.9117965243169444e-05,
+ "loss": 0.9111,
+ "step": 835
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9499494145244028,
+ "learning_rate": 1.9115404316099212e-05,
+ "loss": 0.932,
+ "step": 836
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.1134533937275666,
+ "learning_rate": 1.9112839848710978e-05,
+ "loss": 0.9275,
+ "step": 837
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.987142200239311,
+ "learning_rate": 1.9110271842000755e-05,
+ "loss": 0.9796,
+ "step": 838
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.922861437517647,
+ "learning_rate": 1.9107700296965926e-05,
+ "loss": 0.9994,
+ "step": 839
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.040672496640339,
+ "learning_rate": 1.910512521460525e-05,
+ "loss": 0.9491,
+ "step": 840
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.121182245905376,
+ "learning_rate": 1.9102546595918857e-05,
+ "loss": 1.0086,
+ "step": 841
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9382489379393961,
+ "learning_rate": 1.9099964441908258e-05,
+ "loss": 0.9713,
+ "step": 842
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9032784112027201,
+ "learning_rate": 1.9097378753576327e-05,
+ "loss": 0.9544,
+ "step": 843
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.941344942029661,
+ "learning_rate": 1.9094789531927315e-05,
+ "loss": 0.9555,
+ "step": 844
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.0794338752456596,
+ "learning_rate": 1.909219677796685e-05,
+ "loss": 1.0294,
+ "step": 845
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9796208361823697,
+ "learning_rate": 1.9089600492701926e-05,
+ "loss": 0.9753,
+ "step": 846
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9391801401800499,
+ "learning_rate": 1.908700067714091e-05,
+ "loss": 0.9743,
+ "step": 847
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9746592070314117,
+ "learning_rate": 1.9084397332293537e-05,
+ "loss": 0.9676,
+ "step": 848
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9849563735725314,
+ "learning_rate": 1.9081790459170926e-05,
+ "loss": 0.9566,
+ "step": 849
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.2526725908887066,
+ "learning_rate": 1.9079180058785547e-05,
+ "loss": 0.9363,
+ "step": 850
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.024753362541994,
+ "learning_rate": 1.9076566132151255e-05,
+ "loss": 0.9719,
+ "step": 851
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.1193459382390043,
+ "learning_rate": 1.907394868028326e-05,
+ "loss": 0.9887,
+ "step": 852
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.0957167581230274,
+ "learning_rate": 1.9071327704198163e-05,
+ "loss": 0.9731,
+ "step": 853
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.8810929173302238,
+ "learning_rate": 1.906870320491391e-05,
+ "loss": 0.9067,
+ "step": 854
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.9574523964981423,
+ "learning_rate": 1.9066075183449835e-05,
+ "loss": 0.9502,
+ "step": 855
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.921763216834495,
+ "learning_rate": 1.9063443640826624e-05,
+ "loss": 0.9252,
+ "step": 856
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 1.0542993606189686,
+ "learning_rate": 1.906080857806634e-05,
+ "loss": 0.9557,
+ "step": 857
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9125679542204395,
+ "learning_rate": 1.905816999619242e-05,
+ "loss": 0.9192,
+ "step": 858
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9505650432203572,
+ "learning_rate": 1.9055527896229642e-05,
+ "loss": 0.9343,
+ "step": 859
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9797085550859939,
+ "learning_rate": 1.905288227920418e-05,
+ "loss": 0.976,
+ "step": 860
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9378127947174137,
+ "learning_rate": 1.9050233146143554e-05,
+ "loss": 0.9898,
+ "step": 861
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.021298501233176,
+ "learning_rate": 1.9047580498076663e-05,
+ "loss": 1.0113,
+ "step": 862
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9821863956339766,
+ "learning_rate": 1.904492433603376e-05,
+ "loss": 1.0428,
+ "step": 863
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9461148960938188,
+ "learning_rate": 1.904226466104647e-05,
+ "loss": 0.9989,
+ "step": 864
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9747637648410694,
+ "learning_rate": 1.903960147414778e-05,
+ "loss": 0.9956,
+ "step": 865
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9287287699604906,
+ "learning_rate": 1.903693477637204e-05,
+ "loss": 0.9849,
+ "step": 866
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.0513732675592349,
+ "learning_rate": 1.9034264568754967e-05,
+ "loss": 0.9361,
+ "step": 867
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.8557869605686496,
+ "learning_rate": 1.9031590852333637e-05,
+ "loss": 0.9625,
+ "step": 868
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.120810804482619,
+ "learning_rate": 1.9028913628146487e-05,
+ "loss": 0.9831,
+ "step": 869
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.117583170179081,
+ "learning_rate": 1.902623289723333e-05,
+ "loss": 1.0286,
+ "step": 870
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9205853078015869,
+ "learning_rate": 1.902354866063532e-05,
+ "loss": 1.0139,
+ "step": 871
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.930674992163926,
+ "learning_rate": 1.9020860919394992e-05,
+ "loss": 1.0258,
+ "step": 872
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.0264752575648477,
+ "learning_rate": 1.9018169674556228e-05,
+ "loss": 1.0151,
+ "step": 873
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.0372148521221145,
+ "learning_rate": 1.901547492716428e-05,
+ "loss": 1.0382,
+ "step": 874
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9434622133219497,
+ "learning_rate": 1.9012776678265756e-05,
+ "loss": 0.9812,
+ "step": 875
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9052102214485251,
+ "learning_rate": 1.9010074928908624e-05,
+ "loss": 0.9339,
+ "step": 876
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9316266612889813,
+ "learning_rate": 1.900736968014221e-05,
+ "loss": 0.9013,
+ "step": 877
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9133470615785118,
+ "learning_rate": 1.9004660933017208e-05,
+ "loss": 0.9294,
+ "step": 878
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9536120519224839,
+ "learning_rate": 1.900194868858566e-05,
+ "loss": 0.9119,
+ "step": 879
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9357559667532992,
+ "learning_rate": 1.8999232947900968e-05,
+ "loss": 0.9743,
+ "step": 880
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.1386309824122716,
+ "learning_rate": 1.89965137120179e-05,
+ "loss": 0.925,
+ "step": 881
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.8998242598947602,
+ "learning_rate": 1.899379098199257e-05,
+ "loss": 0.976,
+ "step": 882
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9039806326398101,
+ "learning_rate": 1.899106475888246e-05,
+ "loss": 0.9895,
+ "step": 883
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9345914552802297,
+ "learning_rate": 1.89883350437464e-05,
+ "loss": 0.9303,
+ "step": 884
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.147386596990526,
+ "learning_rate": 1.8985601837644586e-05,
+ "loss": 1.078,
+ "step": 885
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.2155363128449506,
+ "learning_rate": 1.8982865141638557e-05,
+ "loss": 0.8954,
+ "step": 886
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.8840276880732251,
+ "learning_rate": 1.8980124956791216e-05,
+ "loss": 0.9941,
+ "step": 887
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.0028413770383007,
+ "learning_rate": 1.8977381284166818e-05,
+ "loss": 0.9192,
+ "step": 888
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.0383545522633073,
+ "learning_rate": 1.897463412483098e-05,
+ "loss": 0.9872,
+ "step": 889
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.807056210262064,
+ "learning_rate": 1.897188347985066e-05,
+ "loss": 0.9211,
+ "step": 890
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.976902533221811,
+ "learning_rate": 1.896912935029418e-05,
+ "loss": 0.9424,
+ "step": 891
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.8111651059263858,
+ "learning_rate": 1.896637173723121e-05,
+ "loss": 0.8296,
+ "step": 892
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9699141538464766,
+ "learning_rate": 1.8963610641732777e-05,
+ "loss": 0.9202,
+ "step": 893
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.875464251306016,
+ "learning_rate": 1.8960846064871257e-05,
+ "loss": 0.9113,
+ "step": 894
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9397587920810292,
+ "learning_rate": 1.8958078007720387e-05,
+ "loss": 0.946,
+ "step": 895
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9364869336361953,
+ "learning_rate": 1.895530647135524e-05,
+ "loss": 0.9354,
+ "step": 896
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9005529620400964,
+ "learning_rate": 1.8952531456852248e-05,
+ "loss": 0.9719,
+ "step": 897
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.0055156280112623,
+ "learning_rate": 1.8949752965289197e-05,
+ "loss": 1.0446,
+ "step": 898
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9450115081067701,
+ "learning_rate": 1.894697099774523e-05,
+ "loss": 0.945,
+ "step": 899
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9690686920697665,
+ "learning_rate": 1.894418555530082e-05,
+ "loss": 0.9882,
+ "step": 900
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9402681346380628,
+ "learning_rate": 1.89413966390378e-05,
+ "loss": 0.8947,
+ "step": 901
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9435792263502418,
+ "learning_rate": 1.8938604250039362e-05,
+ "loss": 0.9081,
+ "step": 902
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9565390998865296,
+ "learning_rate": 1.8935808389390032e-05,
+ "loss": 0.9928,
+ "step": 903
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.8622694297205252,
+ "learning_rate": 1.893300905817569e-05,
+ "loss": 0.9648,
+ "step": 904
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.0227949520042103,
+ "learning_rate": 1.8930206257483566e-05,
+ "loss": 1.0145,
+ "step": 905
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.1632548326893022,
+ "learning_rate": 1.8927399988402233e-05,
+ "loss": 0.9496,
+ "step": 906
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.9640864762450096,
+ "learning_rate": 1.8924590252021614e-05,
+ "loss": 0.9789,
+ "step": 907
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.8875385431624611,
+ "learning_rate": 1.8921777049432985e-05,
+ "loss": 0.9221,
+ "step": 908
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 1.1563281326056831,
+ "learning_rate": 1.8918960381728947e-05,
+ "loss": 1.0354,
+ "step": 909
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.8245858613862742,
+ "learning_rate": 1.8916140250003475e-05,
+ "loss": 0.8914,
+ "step": 910
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9705990739391612,
+ "learning_rate": 1.891331665535187e-05,
+ "loss": 0.9331,
+ "step": 911
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.1044362797740637,
+ "learning_rate": 1.8910489598870784e-05,
+ "loss": 1.0126,
+ "step": 912
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.0385793610460308,
+ "learning_rate": 1.8907659081658214e-05,
+ "loss": 0.9817,
+ "step": 913
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.087873826847476,
+ "learning_rate": 1.8904825104813497e-05,
+ "loss": 1.0294,
+ "step": 914
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.1774112602561206,
+ "learning_rate": 1.8901987669437322e-05,
+ "loss": 0.9921,
+ "step": 915
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.044443883038202,
+ "learning_rate": 1.889914677663171e-05,
+ "loss": 1.027,
+ "step": 916
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.0418712955640264,
+ "learning_rate": 1.8896302427500042e-05,
+ "loss": 0.979,
+ "step": 917
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9799529582810814,
+ "learning_rate": 1.8893454623147017e-05,
+ "loss": 0.9805,
+ "step": 918
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.8380106587673484,
+ "learning_rate": 1.88906033646787e-05,
+ "loss": 0.9692,
+ "step": 919
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.8106029993756833,
+ "learning_rate": 1.8887748653202478e-05,
+ "loss": 0.8796,
+ "step": 920
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.967888763511043,
+ "learning_rate": 1.8884890489827097e-05,
+ "loss": 0.9426,
+ "step": 921
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.8338662955627411,
+ "learning_rate": 1.8882028875662627e-05,
+ "loss": 0.8948,
+ "step": 922
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.326596871236574,
+ "learning_rate": 1.8879163811820493e-05,
+ "loss": 1.002,
+ "step": 923
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9403465734130465,
+ "learning_rate": 1.8876295299413445e-05,
+ "loss": 0.9673,
+ "step": 924
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.939767471544234,
+ "learning_rate": 1.8873423339555584e-05,
+ "loss": 0.9745,
+ "step": 925
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9209027941278132,
+ "learning_rate": 1.8870547933362352e-05,
+ "loss": 0.9733,
+ "step": 926
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.980995120756399,
+ "learning_rate": 1.886766908195051e-05,
+ "loss": 0.9022,
+ "step": 927
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9822517815616164,
+ "learning_rate": 1.8864786786438187e-05,
+ "loss": 0.9176,
+ "step": 928
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9732772645231046,
+ "learning_rate": 1.886190104794482e-05,
+ "loss": 0.9272,
+ "step": 929
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9033484939097034,
+ "learning_rate": 1.8859011867591203e-05,
+ "loss": 0.9156,
+ "step": 930
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.821730878154358,
+ "learning_rate": 1.885611924649946e-05,
+ "loss": 0.8436,
+ "step": 931
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.8968746163391496,
+ "learning_rate": 1.885322318579305e-05,
+ "loss": 0.9866,
+ "step": 932
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9281483108229382,
+ "learning_rate": 1.8850323686596766e-05,
+ "loss": 0.9353,
+ "step": 933
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9780845576552375,
+ "learning_rate": 1.8847420750036748e-05,
+ "loss": 0.9498,
+ "step": 934
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.1074021798523632,
+ "learning_rate": 1.884451437724046e-05,
+ "loss": 0.9729,
+ "step": 935
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9864524913978737,
+ "learning_rate": 1.8841604569336702e-05,
+ "loss": 1.0014,
+ "step": 936
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.0299581565078642,
+ "learning_rate": 1.883869132745561e-05,
+ "loss": 0.9974,
+ "step": 937
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9825537232395749,
+ "learning_rate": 1.883577465272866e-05,
+ "loss": 1.0037,
+ "step": 938
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9682193599915532,
+ "learning_rate": 1.8832854546288642e-05,
+ "loss": 0.9545,
+ "step": 939
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.010030344909483,
+ "learning_rate": 1.8829931009269707e-05,
+ "loss": 1.0333,
+ "step": 940
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9617210233213701,
+ "learning_rate": 1.882700404280731e-05,
+ "loss": 1.0205,
+ "step": 941
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9103739156118881,
+ "learning_rate": 1.8824073648038258e-05,
+ "loss": 0.9213,
+ "step": 942
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.8942582250476749,
+ "learning_rate": 1.882113982610068e-05,
+ "loss": 0.9589,
+ "step": 943
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.2246614350808254,
+ "learning_rate": 1.881820257813404e-05,
+ "loss": 0.9832,
+ "step": 944
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9687670626406696,
+ "learning_rate": 1.8815261905279133e-05,
+ "loss": 1.0154,
+ "step": 945
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.884833132382099,
+ "learning_rate": 1.8812317808678075e-05,
+ "loss": 1.0016,
+ "step": 946
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.94668538176656,
+ "learning_rate": 1.8809370289474327e-05,
+ "loss": 0.9796,
+ "step": 947
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.0370259309774235,
+ "learning_rate": 1.8806419348812673e-05,
+ "loss": 0.9148,
+ "step": 948
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9825488412786969,
+ "learning_rate": 1.8803464987839217e-05,
+ "loss": 0.9951,
+ "step": 949
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.8949635972579602,
+ "learning_rate": 1.88005072077014e-05,
+ "loss": 0.9795,
+ "step": 950
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.0744539293861581,
+ "learning_rate": 1.8797546009547996e-05,
+ "loss": 1.0171,
+ "step": 951
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9486636984000835,
+ "learning_rate": 1.879458139452909e-05,
+ "loss": 0.989,
+ "step": 952
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9991123454952828,
+ "learning_rate": 1.8791613363796118e-05,
+ "loss": 0.9458,
+ "step": 953
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9393906617167698,
+ "learning_rate": 1.8788641918501817e-05,
+ "loss": 0.9234,
+ "step": 954
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9486632769964769,
+ "learning_rate": 1.8785667059800264e-05,
+ "loss": 0.9286,
+ "step": 955
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.8870441093503006,
+ "learning_rate": 1.8782688788846865e-05,
+ "loss": 0.9235,
+ "step": 956
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.8445444958236513,
+ "learning_rate": 1.877970710679834e-05,
+ "loss": 0.912,
+ "step": 957
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.0227589962975683,
+ "learning_rate": 1.877672201481275e-05,
+ "loss": 1.024,
+ "step": 958
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 1.1486051169109843,
+ "learning_rate": 1.877373351404946e-05,
+ "loss": 1.0335,
+ "step": 959
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.9301070966417918,
+ "learning_rate": 1.8770741605669173e-05,
+ "loss": 0.968,
+ "step": 960
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.7461499466378382,
+ "learning_rate": 1.876774629083391e-05,
+ "loss": 0.9275,
+ "step": 961
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9433594283109852,
+ "learning_rate": 1.8764747570707017e-05,
+ "loss": 0.9797,
+ "step": 962
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.0462804278397861,
+ "learning_rate": 1.8761745446453167e-05,
+ "loss": 0.9755,
+ "step": 963
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.8697248223371079,
+ "learning_rate": 1.875873991923835e-05,
+ "loss": 0.8028,
+ "step": 964
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9238445016405439,
+ "learning_rate": 1.875573099022987e-05,
+ "loss": 0.9137,
+ "step": 965
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9185090223935947,
+ "learning_rate": 1.8752718660596367e-05,
+ "loss": 0.8734,
+ "step": 966
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.0060678826112743,
+ "learning_rate": 1.8749702931507797e-05,
+ "loss": 1.0001,
+ "step": 967
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.8640951411665814,
+ "learning_rate": 1.874668380413543e-05,
+ "loss": 0.9071,
+ "step": 968
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9579366597749922,
+ "learning_rate": 1.8743661279651856e-05,
+ "loss": 0.9849,
+ "step": 969
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.1644248312491494,
+ "learning_rate": 1.8740635359231e-05,
+ "loss": 1.0096,
+ "step": 970
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.91235757351678,
+ "learning_rate": 1.8737606044048086e-05,
+ "loss": 0.9553,
+ "step": 971
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.0253659887987323,
+ "learning_rate": 1.873457333527967e-05,
+ "loss": 0.948,
+ "step": 972
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.948888840073587,
+ "learning_rate": 1.873153723410362e-05,
+ "loss": 0.951,
+ "step": 973
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.96031752242475,
+ "learning_rate": 1.8728497741699115e-05,
+ "loss": 1.0257,
+ "step": 974
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9188946313189172,
+ "learning_rate": 1.872545485924667e-05,
+ "loss": 0.9749,
+ "step": 975
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.8163168615248108,
+ "learning_rate": 1.8722408587928104e-05,
+ "loss": 0.8978,
+ "step": 976
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.8876465101014909,
+ "learning_rate": 1.8719358928926546e-05,
+ "loss": 0.8812,
+ "step": 977
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.8862832476660408,
+ "learning_rate": 1.8716305883426456e-05,
+ "loss": 0.9869,
+ "step": 978
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.8935702653475464,
+ "learning_rate": 1.87132494526136e-05,
+ "loss": 0.9192,
+ "step": 979
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.1220879391513314,
+ "learning_rate": 1.8710189637675055e-05,
+ "loss": 0.9531,
+ "step": 980
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.0465585705009761,
+ "learning_rate": 1.8707126439799225e-05,
+ "loss": 1.0469,
+ "step": 981
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.970630297145126,
+ "learning_rate": 1.870405986017582e-05,
+ "loss": 0.9726,
+ "step": 982
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.1293785087481336,
+ "learning_rate": 1.8700989899995857e-05,
+ "loss": 0.9579,
+ "step": 983
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.6473436736666303,
+ "learning_rate": 1.8697916560451682e-05,
+ "loss": 0.8201,
+ "step": 984
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.1236681712299788,
+ "learning_rate": 1.869483984273694e-05,
+ "loss": 0.9738,
+ "step": 985
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.0555334678685977,
+ "learning_rate": 1.8691759748046594e-05,
+ "loss": 0.9551,
+ "step": 986
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9585948424160133,
+ "learning_rate": 1.8688676277576916e-05,
+ "loss": 0.9906,
+ "step": 987
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9632166565383102,
+ "learning_rate": 1.868558943252549e-05,
+ "loss": 0.9773,
+ "step": 988
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9429997781564794,
+ "learning_rate": 1.868249921409122e-05,
+ "loss": 0.9394,
+ "step": 989
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9246823658958572,
+ "learning_rate": 1.8679405623474294e-05,
+ "loss": 0.9167,
+ "step": 990
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.8955009469545985,
+ "learning_rate": 1.8676308661876242e-05,
+ "loss": 0.8777,
+ "step": 991
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.982781827274399,
+ "learning_rate": 1.8673208330499884e-05,
+ "loss": 0.947,
+ "step": 992
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9961617738740262,
+ "learning_rate": 1.8670104630549348e-05,
+ "loss": 1.0081,
+ "step": 993
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9333120768087941,
+ "learning_rate": 1.866699756323008e-05,
+ "loss": 1.015,
+ "step": 994
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9245694395241534,
+ "learning_rate": 1.866388712974883e-05,
+ "loss": 0.9596,
+ "step": 995
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9574096418211646,
+ "learning_rate": 1.866077333131365e-05,
+ "loss": 0.9551,
+ "step": 996
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9183459344015832,
+ "learning_rate": 1.8657656169133908e-05,
+ "loss": 0.9506,
+ "step": 997
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9746043591188694,
+ "learning_rate": 1.8654535644420276e-05,
+ "loss": 0.9975,
+ "step": 998
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.0161883025404344,
+ "learning_rate": 1.8651411758384718e-05,
+ "loss": 0.9156,
+ "step": 999
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.8604787162644674,
+ "learning_rate": 1.8648284512240527e-05,
+ "loss": 0.9258,
+ "step": 1000
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9866324292883194,
+ "learning_rate": 1.8645153907202285e-05,
+ "loss": 1.0335,
+ "step": 1001
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9478964519070111,
+ "learning_rate": 1.8642019944485884e-05,
+ "loss": 0.9153,
+ "step": 1002
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.0778311714738529,
+ "learning_rate": 1.863888262530852e-05,
+ "loss": 0.9707,
+ "step": 1003
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.093708408365323,
+ "learning_rate": 1.863574195088869e-05,
+ "loss": 0.9556,
+ "step": 1004
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9361796608322317,
+ "learning_rate": 1.8632597922446195e-05,
+ "loss": 0.9856,
+ "step": 1005
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.012900733529525,
+ "learning_rate": 1.8629450541202142e-05,
+ "loss": 0.9084,
+ "step": 1006
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.8303001646786801,
+ "learning_rate": 1.8626299808378933e-05,
+ "loss": 0.9557,
+ "step": 1007
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.002565523250232,
+ "learning_rate": 1.862314572520028e-05,
+ "loss": 0.9794,
+ "step": 1008
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.0463288126955839,
+ "learning_rate": 1.861998829289119e-05,
+ "loss": 0.9744,
+ "step": 1009
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.941255816261249,
+ "learning_rate": 1.861682751267798e-05,
+ "loss": 0.9816,
+ "step": 1010
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.8403448876068528,
+ "learning_rate": 1.861366338578825e-05,
+ "loss": 0.9146,
+ "step": 1011
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 1.0064716949813777,
+ "learning_rate": 1.8610495913450922e-05,
+ "loss": 1.0151,
+ "step": 1012
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.9652778830269011,
+ "learning_rate": 1.8607325096896197e-05,
+ "loss": 0.9702,
+ "step": 1013
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9760926012576204,
+ "learning_rate": 1.8604150937355588e-05,
+ "loss": 0.9315,
+ "step": 1014
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.1018390357024368,
+ "learning_rate": 1.86009734360619e-05,
+ "loss": 0.9942,
+ "step": 1015
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9995486970554732,
+ "learning_rate": 1.8597792594249237e-05,
+ "loss": 0.9465,
+ "step": 1016
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.0214383067506745,
+ "learning_rate": 1.8594608413153e-05,
+ "loss": 0.9439,
+ "step": 1017
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9481003865939039,
+ "learning_rate": 1.8591420894009897e-05,
+ "loss": 0.901,
+ "step": 1018
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.885272514013036,
+ "learning_rate": 1.8588230038057913e-05,
+ "loss": 0.9229,
+ "step": 1019
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.800853261093981,
+ "learning_rate": 1.8585035846536347e-05,
+ "loss": 0.9221,
+ "step": 1020
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9120921974688442,
+ "learning_rate": 1.8581838320685782e-05,
+ "loss": 0.8923,
+ "step": 1021
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9651722966404574,
+ "learning_rate": 1.8578637461748105e-05,
+ "loss": 0.9409,
+ "step": 1022
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.056961914378481,
+ "learning_rate": 1.857543327096649e-05,
+ "loss": 1.0135,
+ "step": 1023
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9557639326224096,
+ "learning_rate": 1.85722257495854e-05,
+ "loss": 0.9502,
+ "step": 1024
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9921641896167874,
+ "learning_rate": 1.856901489885061e-05,
+ "loss": 0.9231,
+ "step": 1025
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.8787286117965492,
+ "learning_rate": 1.856580072000918e-05,
+ "loss": 0.8833,
+ "step": 1026
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.8355556429449305,
+ "learning_rate": 1.8562583214309447e-05,
+ "loss": 0.9224,
+ "step": 1027
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.0692043922458312,
+ "learning_rate": 1.855936238300106e-05,
+ "loss": 0.9477,
+ "step": 1028
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9987461436123213,
+ "learning_rate": 1.8556138227334957e-05,
+ "loss": 0.9554,
+ "step": 1029
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.972986412074494,
+ "learning_rate": 1.855291074856336e-05,
+ "loss": 0.9946,
+ "step": 1030
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9469262820444003,
+ "learning_rate": 1.8549679947939778e-05,
+ "loss": 0.9943,
+ "step": 1031
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.8556549384591313,
+ "learning_rate": 1.8546445826719023e-05,
+ "loss": 0.9185,
+ "step": 1032
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.0952135503576756,
+ "learning_rate": 1.8543208386157195e-05,
+ "loss": 1.0426,
+ "step": 1033
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9039786625494457,
+ "learning_rate": 1.853996762751167e-05,
+ "loss": 0.958,
+ "step": 1034
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.1473722853807693,
+ "learning_rate": 1.8536723552041124e-05,
+ "loss": 0.9515,
+ "step": 1035
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.8948166217125512,
+ "learning_rate": 1.853347616100552e-05,
+ "loss": 0.8794,
+ "step": 1036
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.90770359735155,
+ "learning_rate": 1.8530225455666103e-05,
+ "loss": 0.9746,
+ "step": 1037
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.8731173504476053,
+ "learning_rate": 1.8526971437285416e-05,
+ "loss": 0.9967,
+ "step": 1038
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9589506765396064,
+ "learning_rate": 1.8523714107127278e-05,
+ "loss": 0.9686,
+ "step": 1039
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9179436692910353,
+ "learning_rate": 1.8520453466456797e-05,
+ "loss": 0.9728,
+ "step": 1040
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.03075208257366,
+ "learning_rate": 1.8517189516540376e-05,
+ "loss": 0.9082,
+ "step": 1041
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9147623943087246,
+ "learning_rate": 1.8513922258645687e-05,
+ "loss": 0.982,
+ "step": 1042
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.0474706977056625,
+ "learning_rate": 1.8510651694041702e-05,
+ "loss": 0.9273,
+ "step": 1043
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.0085098460886854,
+ "learning_rate": 1.8507377823998664e-05,
+ "loss": 0.9805,
+ "step": 1044
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9433410143694413,
+ "learning_rate": 1.850410064978811e-05,
+ "loss": 0.8955,
+ "step": 1045
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.0023684630725813,
+ "learning_rate": 1.8500820172682858e-05,
+ "loss": 0.9487,
+ "step": 1046
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9042810223427815,
+ "learning_rate": 1.8497536393957005e-05,
+ "loss": 0.9821,
+ "step": 1047
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.947212109086368,
+ "learning_rate": 1.8494249314885932e-05,
+ "loss": 0.9766,
+ "step": 1048
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9009711629257964,
+ "learning_rate": 1.8490958936746304e-05,
+ "loss": 0.9436,
+ "step": 1049
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9872688297321746,
+ "learning_rate": 1.848766526081607e-05,
+ "loss": 1.0462,
+ "step": 1050
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.021365604415821,
+ "learning_rate": 1.8484368288374452e-05,
+ "loss": 1.0377,
+ "step": 1051
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.8925480189296228,
+ "learning_rate": 1.8481068020701954e-05,
+ "loss": 0.9581,
+ "step": 1052
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9920114223461531,
+ "learning_rate": 1.8477764459080364e-05,
+ "loss": 0.9259,
+ "step": 1053
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9946430045651665,
+ "learning_rate": 1.8474457604792746e-05,
+ "loss": 1.0001,
+ "step": 1054
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.040563362121139,
+ "learning_rate": 1.8471147459123447e-05,
+ "loss": 0.979,
+ "step": 1055
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.0489211802607663,
+ "learning_rate": 1.8467834023358088e-05,
+ "loss": 1.0051,
+ "step": 1056
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.9349229223664337,
+ "learning_rate": 1.846451729878357e-05,
+ "loss": 0.9621,
+ "step": 1057
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.0071684249504653,
+ "learning_rate": 1.846119728668807e-05,
+ "loss": 0.9809,
+ "step": 1058
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.251399388463867,
+ "learning_rate": 1.845787398836104e-05,
+ "loss": 1.0743,
+ "step": 1059
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 1.0200937220397808,
+ "learning_rate": 1.8454547405093212e-05,
+ "loss": 0.9124,
+ "step": 1060
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.97407087086635,
+ "learning_rate": 1.8451217538176597e-05,
+ "loss": 0.9745,
+ "step": 1061
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.935686022956608,
+ "learning_rate": 1.844788438890447e-05,
+ "loss": 0.9901,
+ "step": 1062
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.8836199331209209,
+ "learning_rate": 1.8444547958571396e-05,
+ "loss": 0.9436,
+ "step": 1063
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.869197918578686,
+ "learning_rate": 1.84412082484732e-05,
+ "loss": 0.8845,
+ "step": 1064
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.8100663673943043,
+ "learning_rate": 1.8437865259906987e-05,
+ "loss": 0.8839,
+ "step": 1065
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.9655502745076743,
+ "learning_rate": 1.8434518994171136e-05,
+ "loss": 0.9423,
+ "step": 1066
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.8690748146944519,
+ "learning_rate": 1.84311694525653e-05,
+ "loss": 0.8431,
+ "step": 1067
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.8811276540359169,
+ "learning_rate": 1.84278166363904e-05,
+ "loss": 0.9079,
+ "step": 1068
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.0069589271499593,
+ "learning_rate": 1.8424460546948632e-05,
+ "loss": 0.9044,
+ "step": 1069
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.0234408727755917,
+ "learning_rate": 1.8421101185543463e-05,
+ "loss": 1.0407,
+ "step": 1070
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.0837464402225852,
+ "learning_rate": 1.841773855347963e-05,
+ "loss": 1.0263,
+ "step": 1071
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.0292504472504127,
+ "learning_rate": 1.841437265206314e-05,
+ "loss": 0.9548,
+ "step": 1072
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.908745860907942,
+ "learning_rate": 1.841100348260127e-05,
+ "loss": 0.9254,
+ "step": 1073
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.1646724407458005,
+ "learning_rate": 1.840763104640257e-05,
+ "loss": 0.9509,
+ "step": 1074
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.9909734683436034,
+ "learning_rate": 1.8404255344776853e-05,
+ "loss": 0.9289,
+ "step": 1075
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.002461658734012,
+ "learning_rate": 1.84008763790352e-05,
+ "loss": 1.0389,
+ "step": 1076
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.910458699203309,
+ "learning_rate": 1.8397494150489963e-05,
+ "loss": 0.9994,
+ "step": 1077
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.8121839823450351,
+ "learning_rate": 1.8394108660454766e-05,
+ "loss": 0.8556,
+ "step": 1078
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.0611317209258873,
+ "learning_rate": 1.8390719910244487e-05,
+ "loss": 0.9501,
+ "step": 1079
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.985560672984148,
+ "learning_rate": 1.8387327901175286e-05,
+ "loss": 0.9927,
+ "step": 1080
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.0521324339726432,
+ "learning_rate": 1.838393263456457e-05,
+ "loss": 0.98,
+ "step": 1081
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.9112400884123598,
+ "learning_rate": 1.838053411173103e-05,
+ "loss": 1.0643,
+ "step": 1082
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.7483426010932785,
+ "learning_rate": 1.8377132333994606e-05,
+ "loss": 0.8655,
+ "step": 1083
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.1086028513440693,
+ "learning_rate": 1.837372730267652e-05,
+ "loss": 1.0325,
+ "step": 1084
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.8236520402844943,
+ "learning_rate": 1.8370319019099236e-05,
+ "loss": 0.9421,
+ "step": 1085
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.9200072563997702,
+ "learning_rate": 1.8366907484586497e-05,
+ "loss": 0.942,
+ "step": 1086
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.7662944807350218,
+ "learning_rate": 1.83634927004633e-05,
+ "loss": 0.8268,
+ "step": 1087
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.9624631257461757,
+ "learning_rate": 1.8360074668055915e-05,
+ "loss": 0.9214,
+ "step": 1088
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.8908791958342578,
+ "learning_rate": 1.8356653388691857e-05,
+ "loss": 0.9221,
+ "step": 1089
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.9747783944581924,
+ "learning_rate": 1.8353228863699922e-05,
+ "loss": 1.0187,
+ "step": 1090
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.94025272242328,
+ "learning_rate": 1.8349801094410148e-05,
+ "loss": 0.9019,
+ "step": 1091
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.8947113472161731,
+ "learning_rate": 1.8346370082153843e-05,
+ "loss": 0.9624,
+ "step": 1092
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.985869019422871,
+ "learning_rate": 1.8342935828263574e-05,
+ "loss": 1.0161,
+ "step": 1093
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.8385657771238609,
+ "learning_rate": 1.8339498334073166e-05,
+ "loss": 0.8857,
+ "step": 1094
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.8999697279164486,
+ "learning_rate": 1.83360576009177e-05,
+ "loss": 0.9809,
+ "step": 1095
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.920623527150245,
+ "learning_rate": 1.833261363013352e-05,
+ "loss": 0.9333,
+ "step": 1096
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.075317101765974,
+ "learning_rate": 1.832916642305822e-05,
+ "loss": 0.9021,
+ "step": 1097
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.0610900088797453,
+ "learning_rate": 1.832571598103066e-05,
+ "loss": 0.9317,
+ "step": 1098
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.9197351143103815,
+ "learning_rate": 1.8322262305390948e-05,
+ "loss": 0.9651,
+ "step": 1099
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.9797419798113135,
+ "learning_rate": 1.8318805397480455e-05,
+ "loss": 0.9264,
+ "step": 1100
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.8828157469966135,
+ "learning_rate": 1.8315345258641802e-05,
+ "loss": 1.0207,
+ "step": 1101
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.0098529882564022,
+ "learning_rate": 1.8311881890218873e-05,
+ "loss": 0.8678,
+ "step": 1102
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.051989162435478,
+ "learning_rate": 1.830841529355679e-05,
+ "loss": 1.0025,
+ "step": 1103
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.927689066709106,
+ "learning_rate": 1.8304945470001948e-05,
+ "loss": 0.9607,
+ "step": 1104
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.9938916932686059,
+ "learning_rate": 1.8301472420901985e-05,
+ "loss": 1.0437,
+ "step": 1105
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.927141268019181,
+ "learning_rate": 1.8297996147605787e-05,
+ "loss": 0.9727,
+ "step": 1106
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.9726136652594168,
+ "learning_rate": 1.829451665146351e-05,
+ "loss": 0.9964,
+ "step": 1107
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.9530963671108253,
+ "learning_rate": 1.8291033933826535e-05,
+ "loss": 0.952,
+ "step": 1108
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.106990848275471,
+ "learning_rate": 1.8287547996047523e-05,
+ "loss": 0.9508,
+ "step": 1109
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.9673686147980758,
+ "learning_rate": 1.8284058839480363e-05,
+ "loss": 0.9681,
+ "step": 1110
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.8343046704531886,
+ "learning_rate": 1.8280566465480206e-05,
+ "loss": 0.9171,
+ "step": 1111
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.7441537131888142,
+ "learning_rate": 1.8277070875403455e-05,
+ "loss": 0.8558,
+ "step": 1112
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.9930369606195396,
+ "learning_rate": 1.8273572070607756e-05,
+ "loss": 0.9596,
+ "step": 1113
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.047302277217802,
+ "learning_rate": 1.8270070052451995e-05,
+ "loss": 1.0074,
+ "step": 1114
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.9635653288831207,
+ "learning_rate": 1.8266564822296323e-05,
+ "loss": 0.9943,
+ "step": 1115
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 1.0121450460420647,
+ "learning_rate": 1.826305638150213e-05,
+ "loss": 0.9459,
+ "step": 1116
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.8734428971146019,
+ "learning_rate": 1.825954473143205e-05,
+ "loss": 0.8691,
+ "step": 1117
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9891237469797581,
+ "learning_rate": 1.8256029873449976e-05,
+ "loss": 1.0021,
+ "step": 1118
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.8383843397321018,
+ "learning_rate": 1.825251180892103e-05,
+ "loss": 0.8792,
+ "step": 1119
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9169848284867886,
+ "learning_rate": 1.8248990539211596e-05,
+ "loss": 0.9782,
+ "step": 1120
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.812537991341618,
+ "learning_rate": 1.8245466065689282e-05,
+ "loss": 0.8762,
+ "step": 1121
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9712230515155104,
+ "learning_rate": 1.824193838972297e-05,
+ "loss": 1.0158,
+ "step": 1122
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 1.0467586430429845,
+ "learning_rate": 1.823840751268275e-05,
+ "loss": 0.9798,
+ "step": 1123
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9931502683538673,
+ "learning_rate": 1.8234873435939987e-05,
+ "loss": 0.9315,
+ "step": 1124
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.8856443563518548,
+ "learning_rate": 1.8231336160867275e-05,
+ "loss": 0.9519,
+ "step": 1125
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.8506641491583667,
+ "learning_rate": 1.8227795688838446e-05,
+ "loss": 0.9529,
+ "step": 1126
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 1.051464422643405,
+ "learning_rate": 1.822425202122858e-05,
+ "loss": 1.0674,
+ "step": 1127
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9251080756042125,
+ "learning_rate": 1.8220705159413996e-05,
+ "loss": 0.8493,
+ "step": 1128
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.8941700685899955,
+ "learning_rate": 1.8217155104772256e-05,
+ "loss": 1.028,
+ "step": 1129
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9002436654481246,
+ "learning_rate": 1.8213601858682158e-05,
+ "loss": 0.8781,
+ "step": 1130
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.8857809098263753,
+ "learning_rate": 1.8210045422523744e-05,
+ "loss": 1.0007,
+ "step": 1131
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.8212635799496153,
+ "learning_rate": 1.8206485797678294e-05,
+ "loss": 0.8996,
+ "step": 1132
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9372748971442931,
+ "learning_rate": 1.820292298552832e-05,
+ "loss": 0.9198,
+ "step": 1133
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.851328921929426,
+ "learning_rate": 1.819935698745759e-05,
+ "loss": 0.9322,
+ "step": 1134
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9908145107104688,
+ "learning_rate": 1.8195787804851076e-05,
+ "loss": 0.959,
+ "step": 1135
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.7937945012112343,
+ "learning_rate": 1.8192215439095025e-05,
+ "loss": 0.8538,
+ "step": 1136
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 1.087608126510884,
+ "learning_rate": 1.8188639891576893e-05,
+ "loss": 1.0047,
+ "step": 1137
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.7446048568531575,
+ "learning_rate": 1.8185061163685386e-05,
+ "loss": 0.8969,
+ "step": 1138
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 1.0822139796141856,
+ "learning_rate": 1.818147925681044e-05,
+ "loss": 0.9502,
+ "step": 1139
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9600482594870258,
+ "learning_rate": 1.8177894172343227e-05,
+ "loss": 0.9806,
+ "step": 1140
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9326920716444723,
+ "learning_rate": 1.817430591167615e-05,
+ "loss": 1.0264,
+ "step": 1141
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.97081959340405,
+ "learning_rate": 1.8170714476202848e-05,
+ "loss": 1.0173,
+ "step": 1142
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9811815614394553,
+ "learning_rate": 1.8167119867318197e-05,
+ "loss": 0.9681,
+ "step": 1143
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9219602092362942,
+ "learning_rate": 1.81635220864183e-05,
+ "loss": 0.9602,
+ "step": 1144
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9553618273012217,
+ "learning_rate": 1.8159921134900486e-05,
+ "loss": 0.988,
+ "step": 1145
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.902221747042117,
+ "learning_rate": 1.8156317014163337e-05,
+ "loss": 0.9988,
+ "step": 1146
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 1.0407944510509943,
+ "learning_rate": 1.8152709725606642e-05,
+ "loss": 1.0251,
+ "step": 1147
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 1.0741838415932836,
+ "learning_rate": 1.8149099270631434e-05,
+ "loss": 0.9604,
+ "step": 1148
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 1.0814179410634572,
+ "learning_rate": 1.8145485650639973e-05,
+ "loss": 0.9236,
+ "step": 1149
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9407307179166049,
+ "learning_rate": 1.8141868867035745e-05,
+ "loss": 0.9019,
+ "step": 1150
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 1.0587512295938475,
+ "learning_rate": 1.8138248921223465e-05,
+ "loss": 0.9648,
+ "step": 1151
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.8537552139110713,
+ "learning_rate": 1.8134625814609084e-05,
+ "loss": 0.9121,
+ "step": 1152
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.94281045754773,
+ "learning_rate": 1.8130999548599767e-05,
+ "loss": 0.9113,
+ "step": 1153
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9429053125538163,
+ "learning_rate": 1.8127370124603927e-05,
+ "loss": 0.8986,
+ "step": 1154
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.932604760192039,
+ "learning_rate": 1.8123737544031178e-05,
+ "loss": 0.9518,
+ "step": 1155
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9607272552947523,
+ "learning_rate": 1.8120101808292373e-05,
+ "loss": 0.9391,
+ "step": 1156
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9337823412461219,
+ "learning_rate": 1.81164629187996e-05,
+ "loss": 1.0413,
+ "step": 1157
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9922449201391417,
+ "learning_rate": 1.811282087696615e-05,
+ "loss": 1.0025,
+ "step": 1158
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 1.0355438362648628,
+ "learning_rate": 1.8109175684206558e-05,
+ "loss": 0.9801,
+ "step": 1159
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 1.106835762087524,
+ "learning_rate": 1.8105527341936574e-05,
+ "loss": 1.0027,
+ "step": 1160
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9230478593917503,
+ "learning_rate": 1.810187585157317e-05,
+ "loss": 0.9705,
+ "step": 1161
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 1.0355760595769947,
+ "learning_rate": 1.8098221214534543e-05,
+ "loss": 0.9471,
+ "step": 1162
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 1.0294104362813747,
+ "learning_rate": 1.8094563432240107e-05,
+ "loss": 1.0289,
+ "step": 1163
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 1.052462509540482,
+ "learning_rate": 1.8090902506110513e-05,
+ "loss": 0.9765,
+ "step": 1164
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.8886580869632086,
+ "learning_rate": 1.8087238437567614e-05,
+ "loss": 0.9855,
+ "step": 1165
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9099349721948723,
+ "learning_rate": 1.8083571228034498e-05,
+ "loss": 0.921,
+ "step": 1166
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9484292405916163,
+ "learning_rate": 1.807990087893546e-05,
+ "loss": 0.944,
+ "step": 1167
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9717620608623369,
+ "learning_rate": 1.807622739169603e-05,
+ "loss": 0.9103,
+ "step": 1168
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.9608276324247862,
+ "learning_rate": 1.807255076774294e-05,
+ "loss": 0.8674,
+ "step": 1169
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 1.001344637303453,
+ "learning_rate": 1.8068871008504153e-05,
+ "loss": 0.9837,
+ "step": 1170
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.8544404041403327,
+ "learning_rate": 1.8065188115408844e-05,
+ "loss": 0.8755,
+ "step": 1171
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9141723297334705,
+ "learning_rate": 1.8061502089887406e-05,
+ "loss": 0.9457,
+ "step": 1172
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9886223468023465,
+ "learning_rate": 1.805781293337145e-05,
+ "loss": 0.9728,
+ "step": 1173
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 1.0600081486254507,
+ "learning_rate": 1.8054120647293798e-05,
+ "loss": 0.9208,
+ "step": 1174
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.8436680768892132,
+ "learning_rate": 1.8050425233088496e-05,
+ "loss": 0.9457,
+ "step": 1175
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9626244775383591,
+ "learning_rate": 1.80467266921908e-05,
+ "loss": 0.9307,
+ "step": 1176
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9433187089471458,
+ "learning_rate": 1.8043025026037178e-05,
+ "loss": 0.9741,
+ "step": 1177
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 1.0903787871951747,
+ "learning_rate": 1.8039320236065314e-05,
+ "loss": 1.0251,
+ "step": 1178
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 1.011102846547872,
+ "learning_rate": 1.803561232371411e-05,
+ "loss": 0.9666,
+ "step": 1179
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9294396451230055,
+ "learning_rate": 1.803190129042367e-05,
+ "loss": 0.9695,
+ "step": 1180
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.973261193007203,
+ "learning_rate": 1.8028187137635325e-05,
+ "loss": 0.9897,
+ "step": 1181
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9442468983865271,
+ "learning_rate": 1.8024469866791602e-05,
+ "loss": 0.9682,
+ "step": 1182
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.8074446116689458,
+ "learning_rate": 1.802074947933625e-05,
+ "loss": 0.8829,
+ "step": 1183
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9634063394636038,
+ "learning_rate": 1.801702597671422e-05,
+ "loss": 0.9819,
+ "step": 1184
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 1.0653655707501328,
+ "learning_rate": 1.8013299360371685e-05,
+ "loss": 0.9773,
+ "step": 1185
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 1.065647363497974,
+ "learning_rate": 1.8009569631756013e-05,
+ "loss": 0.9461,
+ "step": 1186
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9984831822120185,
+ "learning_rate": 1.8005836792315793e-05,
+ "loss": 0.9614,
+ "step": 1187
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9474674675908505,
+ "learning_rate": 1.800210084350081e-05,
+ "loss": 0.9559,
+ "step": 1188
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9824152707120614,
+ "learning_rate": 1.799836178676207e-05,
+ "loss": 0.9607,
+ "step": 1189
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.8882962592401471,
+ "learning_rate": 1.799461962355178e-05,
+ "loss": 0.9513,
+ "step": 1190
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.875115814404789,
+ "learning_rate": 1.7990874355323345e-05,
+ "loss": 1.0244,
+ "step": 1191
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9643588720334034,
+ "learning_rate": 1.7987125983531393e-05,
+ "loss": 0.9125,
+ "step": 1192
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 1.1274974643025621,
+ "learning_rate": 1.7983374509631742e-05,
+ "loss": 0.9605,
+ "step": 1193
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9349648433629218,
+ "learning_rate": 1.7979619935081424e-05,
+ "loss": 0.9982,
+ "step": 1194
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.815653080520809,
+ "learning_rate": 1.797586226133867e-05,
+ "loss": 0.8316,
+ "step": 1195
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.7910781887120026,
+ "learning_rate": 1.7972101489862924e-05,
+ "loss": 0.8553,
+ "step": 1196
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.8841774936513562,
+ "learning_rate": 1.7968337622114824e-05,
+ "loss": 0.9186,
+ "step": 1197
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.8864445348989288,
+ "learning_rate": 1.7964570659556206e-05,
+ "loss": 0.991,
+ "step": 1198
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9410301349745667,
+ "learning_rate": 1.796080060365012e-05,
+ "loss": 1.0007,
+ "step": 1199
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 1.0084725960935985,
+ "learning_rate": 1.7957027455860815e-05,
+ "loss": 1.0259,
+ "step": 1200
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9242962301251781,
+ "learning_rate": 1.795325121765373e-05,
+ "loss": 0.9528,
+ "step": 1201
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.8983632041439181,
+ "learning_rate": 1.794947189049552e-05,
+ "loss": 1.0111,
+ "step": 1202
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9318108229499958,
+ "learning_rate": 1.7945689475854033e-05,
+ "loss": 0.9742,
+ "step": 1203
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9564298457717368,
+ "learning_rate": 1.7941903975198305e-05,
+ "loss": 0.9364,
+ "step": 1204
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.8748108268713141,
+ "learning_rate": 1.7938115389998595e-05,
+ "loss": 0.9488,
+ "step": 1205
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9151840630689074,
+ "learning_rate": 1.7934323721726334e-05,
+ "loss": 0.9233,
+ "step": 1206
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 1.1004063155897594,
+ "learning_rate": 1.7930528971854166e-05,
+ "loss": 0.978,
+ "step": 1207
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 1.020310681755496,
+ "learning_rate": 1.792673114185593e-05,
+ "loss": 0.9741,
+ "step": 1208
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 1.0025734462588363,
+ "learning_rate": 1.7922930233206656e-05,
+ "loss": 0.984,
+ "step": 1209
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 1.0263104225725488,
+ "learning_rate": 1.7919126247382576e-05,
+ "loss": 0.9906,
+ "step": 1210
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 1.0610648658830775,
+ "learning_rate": 1.791531918586112e-05,
+ "loss": 0.9219,
+ "step": 1211
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.8737007195313583,
+ "learning_rate": 1.7911509050120892e-05,
+ "loss": 0.8917,
+ "step": 1212
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9704653680939311,
+ "learning_rate": 1.7907695841641716e-05,
+ "loss": 0.9956,
+ "step": 1213
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.944752034675354,
+ "learning_rate": 1.7903879561904597e-05,
+ "loss": 0.9019,
+ "step": 1214
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.880769968121093,
+ "learning_rate": 1.790006021239173e-05,
+ "loss": 0.9645,
+ "step": 1215
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.8357603643593192,
+ "learning_rate": 1.789623779458651e-05,
+ "loss": 0.9863,
+ "step": 1216
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9190765177231858,
+ "learning_rate": 1.789241230997352e-05,
+ "loss": 0.9466,
+ "step": 1217
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 1.0091428692503979,
+ "learning_rate": 1.7888583760038534e-05,
+ "loss": 0.954,
+ "step": 1218
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9789893907912125,
+ "learning_rate": 1.7884752146268513e-05,
+ "loss": 0.9281,
+ "step": 1219
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.7775611238049662,
+ "learning_rate": 1.7880917470151614e-05,
+ "loss": 0.8869,
+ "step": 1220
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.9877691453435866,
+ "learning_rate": 1.7877079733177185e-05,
+ "loss": 0.9017,
+ "step": 1221
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.0823586569795929,
+ "learning_rate": 1.7873238936835754e-05,
+ "loss": 1.0658,
+ "step": 1222
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9611279006529012,
+ "learning_rate": 1.786939508261904e-05,
+ "loss": 0.9879,
+ "step": 1223
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.99106652800465,
+ "learning_rate": 1.786554817201996e-05,
+ "loss": 1.0262,
+ "step": 1224
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.8822594092653521,
+ "learning_rate": 1.78616982065326e-05,
+ "loss": 0.912,
+ "step": 1225
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.8537100476653262,
+ "learning_rate": 1.785784518765225e-05,
+ "loss": 0.9129,
+ "step": 1226
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.8768396188614327,
+ "learning_rate": 1.7853989116875373e-05,
+ "loss": 0.9473,
+ "step": 1227
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9503641946345763,
+ "learning_rate": 1.7850129995699626e-05,
+ "loss": 0.872,
+ "step": 1228
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.7876477230036979,
+ "learning_rate": 1.7846267825623843e-05,
+ "loss": 0.8937,
+ "step": 1229
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.8876088559819371,
+ "learning_rate": 1.7842402608148053e-05,
+ "loss": 0.8703,
+ "step": 1230
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9609032274449619,
+ "learning_rate": 1.7838534344773453e-05,
+ "loss": 0.9976,
+ "step": 1231
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9656891401062295,
+ "learning_rate": 1.7834663037002444e-05,
+ "loss": 0.9703,
+ "step": 1232
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.0140256801740455,
+ "learning_rate": 1.7830788686338586e-05,
+ "loss": 0.9849,
+ "step": 1233
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9256255304578437,
+ "learning_rate": 1.7826911294286636e-05,
+ "loss": 0.9887,
+ "step": 1234
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9372804605703864,
+ "learning_rate": 1.782303086235253e-05,
+ "loss": 0.9322,
+ "step": 1235
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.057818883814277,
+ "learning_rate": 1.781914739204338e-05,
+ "loss": 0.9809,
+ "step": 1236
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9676727878687101,
+ "learning_rate": 1.7815260884867486e-05,
+ "loss": 0.8472,
+ "step": 1237
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.0569684049944226,
+ "learning_rate": 1.781137134233432e-05,
+ "loss": 1.0111,
+ "step": 1238
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9478058009751226,
+ "learning_rate": 1.7807478765954532e-05,
+ "loss": 0.9973,
+ "step": 1239
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.8491612751728744,
+ "learning_rate": 1.7803583157239958e-05,
+ "loss": 0.932,
+ "step": 1240
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.063733075647585,
+ "learning_rate": 1.7799684517703605e-05,
+ "loss": 0.9402,
+ "step": 1241
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.927719158248588,
+ "learning_rate": 1.779578284885966e-05,
+ "loss": 0.9304,
+ "step": 1242
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.051457256994107,
+ "learning_rate": 1.779187815222349e-05,
+ "loss": 1.0014,
+ "step": 1243
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9145175644707474,
+ "learning_rate": 1.778797042931163e-05,
+ "loss": 0.8904,
+ "step": 1244
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9673432670172137,
+ "learning_rate": 1.7784059681641798e-05,
+ "loss": 0.9841,
+ "step": 1245
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.0263580266051877,
+ "learning_rate": 1.778014591073288e-05,
+ "loss": 0.8794,
+ "step": 1246
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9370820179122746,
+ "learning_rate": 1.777622911810494e-05,
+ "loss": 0.9087,
+ "step": 1247
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.0194286821464282,
+ "learning_rate": 1.777230930527922e-05,
+ "loss": 1.0346,
+ "step": 1248
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9636997168720651,
+ "learning_rate": 1.7768386473778124e-05,
+ "loss": 0.9335,
+ "step": 1249
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.0358575002137034,
+ "learning_rate": 1.7764460625125236e-05,
+ "loss": 1.0072,
+ "step": 1250
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9861474572796306,
+ "learning_rate": 1.776053176084531e-05,
+ "loss": 0.8985,
+ "step": 1251
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.0167985703717612,
+ "learning_rate": 1.7756599882464274e-05,
+ "loss": 1.0352,
+ "step": 1252
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.8661834944686028,
+ "learning_rate": 1.7752664991509224e-05,
+ "loss": 0.8714,
+ "step": 1253
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.918220279098925,
+ "learning_rate": 1.7748727089508423e-05,
+ "loss": 0.9672,
+ "step": 1254
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.1654835314400813,
+ "learning_rate": 1.7744786177991307e-05,
+ "loss": 0.9206,
+ "step": 1255
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.8782864953837353,
+ "learning_rate": 1.774084225848849e-05,
+ "loss": 0.94,
+ "step": 1256
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.001155049995312,
+ "learning_rate": 1.773689533253173e-05,
+ "loss": 0.9866,
+ "step": 1257
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.0195402057298208,
+ "learning_rate": 1.7732945401653978e-05,
+ "loss": 0.9989,
+ "step": 1258
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.8561818909574825,
+ "learning_rate": 1.7728992467389342e-05,
+ "loss": 0.9136,
+ "step": 1259
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9988571455787769,
+ "learning_rate": 1.7725036531273087e-05,
+ "loss": 0.9246,
+ "step": 1260
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.8682760409408626,
+ "learning_rate": 1.7721077594841663e-05,
+ "loss": 0.9751,
+ "step": 1261
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.111973346218321,
+ "learning_rate": 1.771711565963267e-05,
+ "loss": 0.9218,
+ "step": 1262
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9717967427736105,
+ "learning_rate": 1.7713150727184878e-05,
+ "loss": 0.8805,
+ "step": 1263
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.8552273677571272,
+ "learning_rate": 1.770918279903822e-05,
+ "loss": 0.9544,
+ "step": 1264
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.8998533409942734,
+ "learning_rate": 1.77052118767338e-05,
+ "loss": 0.9733,
+ "step": 1265
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9964597092880161,
+ "learning_rate": 1.7701237961813874e-05,
+ "loss": 1.0174,
+ "step": 1266
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9465538103341393,
+ "learning_rate": 1.7697261055821864e-05,
+ "loss": 0.9353,
+ "step": 1267
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.965431191161349,
+ "learning_rate": 1.7693281160302354e-05,
+ "loss": 0.9351,
+ "step": 1268
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 1.264881078759489,
+ "learning_rate": 1.7689298276801095e-05,
+ "loss": 0.9759,
+ "step": 1269
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.8668281934705195,
+ "learning_rate": 1.7685312406864986e-05,
+ "loss": 0.9194,
+ "step": 1270
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9861236166681108,
+ "learning_rate": 1.7681323552042094e-05,
+ "loss": 0.9005,
+ "step": 1271
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.8703101375822144,
+ "learning_rate": 1.767733171388165e-05,
+ "loss": 0.9608,
+ "step": 1272
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.9571789034000655,
+ "learning_rate": 1.7673336893934033e-05,
+ "loss": 1.0034,
+ "step": 1273
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.8375835970467771,
+ "learning_rate": 1.7669339093750786e-05,
+ "loss": 0.9383,
+ "step": 1274
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.873469492585692,
+ "learning_rate": 1.766533831488461e-05,
+ "loss": 0.8697,
+ "step": 1275
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.8574427602674748,
+ "learning_rate": 1.7661334558889357e-05,
+ "loss": 0.9356,
+ "step": 1276
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.0067934798289755,
+ "learning_rate": 1.7657327827320046e-05,
+ "loss": 1.0,
+ "step": 1277
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9170434737361712,
+ "learning_rate": 1.765331812173284e-05,
+ "loss": 1.0018,
+ "step": 1278
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9622237370244004,
+ "learning_rate": 1.7649305443685068e-05,
+ "loss": 0.9527,
+ "step": 1279
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9526954216812155,
+ "learning_rate": 1.76452897947352e-05,
+ "loss": 0.9224,
+ "step": 1280
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.8787480910042309,
+ "learning_rate": 1.7641271176442876e-05,
+ "loss": 0.9485,
+ "step": 1281
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9787231008114128,
+ "learning_rate": 1.7637249590368878e-05,
+ "loss": 1.071,
+ "step": 1282
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.074853454478588,
+ "learning_rate": 1.763322503807514e-05,
+ "loss": 0.9908,
+ "step": 1283
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.0235177131060391,
+ "learning_rate": 1.7629197521124758e-05,
+ "loss": 0.9707,
+ "step": 1284
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.0875192420075175,
+ "learning_rate": 1.7625167041081967e-05,
+ "loss": 0.9887,
+ "step": 1285
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9522200400988253,
+ "learning_rate": 1.7621133599512163e-05,
+ "loss": 0.9261,
+ "step": 1286
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.8772882364742024,
+ "learning_rate": 1.761709719798189e-05,
+ "loss": 0.9698,
+ "step": 1287
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.0802712789454298,
+ "learning_rate": 1.761305783805883e-05,
+ "loss": 0.9234,
+ "step": 1288
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9513937791159559,
+ "learning_rate": 1.7609015521311836e-05,
+ "loss": 0.9386,
+ "step": 1289
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9495223336458697,
+ "learning_rate": 1.7604970249310893e-05,
+ "loss": 0.8736,
+ "step": 1290
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9214574673472887,
+ "learning_rate": 1.7600922023627137e-05,
+ "loss": 0.9226,
+ "step": 1291
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9482194716583879,
+ "learning_rate": 1.759687084583285e-05,
+ "loss": 0.9556,
+ "step": 1292
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9567744881336991,
+ "learning_rate": 1.759281671750147e-05,
+ "loss": 0.8896,
+ "step": 1293
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.0176557179080163,
+ "learning_rate": 1.7588759640207564e-05,
+ "loss": 0.969,
+ "step": 1294
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.0003284870829507,
+ "learning_rate": 1.7584699615526857e-05,
+ "loss": 0.9976,
+ "step": 1295
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.0040582437896988,
+ "learning_rate": 1.7580636645036224e-05,
+ "loss": 0.9894,
+ "step": 1296
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.918876339799899,
+ "learning_rate": 1.757657073031367e-05,
+ "loss": 0.9548,
+ "step": 1297
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9850769224964281,
+ "learning_rate": 1.7572501872938343e-05,
+ "loss": 0.9577,
+ "step": 1298
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.8495345907621838,
+ "learning_rate": 1.756843007449055e-05,
+ "loss": 0.9728,
+ "step": 1299
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.0372223951877135,
+ "learning_rate": 1.7564355336551727e-05,
+ "loss": 0.9794,
+ "step": 1300
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.0085653027349983,
+ "learning_rate": 1.7560277660704455e-05,
+ "loss": 1.0044,
+ "step": 1301
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.7253238694380731,
+ "learning_rate": 1.755619704853246e-05,
+ "loss": 0.8192,
+ "step": 1302
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.0086435192691192,
+ "learning_rate": 1.7552113501620595e-05,
+ "loss": 0.939,
+ "step": 1303
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.8356277905093833,
+ "learning_rate": 1.7548027021554874e-05,
+ "loss": 0.9647,
+ "step": 1304
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.077391101906165,
+ "learning_rate": 1.754393760992243e-05,
+ "loss": 0.9581,
+ "step": 1305
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.950685147204352,
+ "learning_rate": 1.7539845268311548e-05,
+ "loss": 0.9141,
+ "step": 1306
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9903336045903749,
+ "learning_rate": 1.7535749998311645e-05,
+ "loss": 1.004,
+ "step": 1307
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.886512493678247,
+ "learning_rate": 1.753165180151328e-05,
+ "loss": 0.9016,
+ "step": 1308
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.8966168752937466,
+ "learning_rate": 1.752755067950814e-05,
+ "loss": 0.8623,
+ "step": 1309
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.1163911336091636,
+ "learning_rate": 1.752344663388906e-05,
+ "loss": 1.0104,
+ "step": 1310
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9704813893448934,
+ "learning_rate": 1.7519339666249997e-05,
+ "loss": 0.9913,
+ "step": 1311
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.8654900747799749,
+ "learning_rate": 1.7515229778186052e-05,
+ "loss": 0.9129,
+ "step": 1312
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.8695657587765406,
+ "learning_rate": 1.7511116971293463e-05,
+ "loss": 0.9766,
+ "step": 1313
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.0624852345997384,
+ "learning_rate": 1.7507001247169587e-05,
+ "loss": 1.0302,
+ "step": 1314
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.009772185178878,
+ "learning_rate": 1.7502882607412933e-05,
+ "loss": 0.8837,
+ "step": 1315
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9065866430845643,
+ "learning_rate": 1.749876105362313e-05,
+ "loss": 0.93,
+ "step": 1316
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9044651105180835,
+ "learning_rate": 1.7494636587400942e-05,
+ "loss": 0.8793,
+ "step": 1317
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.8743606898363903,
+ "learning_rate": 1.749050921034826e-05,
+ "loss": 0.9691,
+ "step": 1318
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.8690789555787685,
+ "learning_rate": 1.7486378924068123e-05,
+ "loss": 0.9389,
+ "step": 1319
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.8684902881631846,
+ "learning_rate": 1.748224573016467e-05,
+ "loss": 0.9315,
+ "step": 1320
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.9516158888783537,
+ "learning_rate": 1.7478109630243195e-05,
+ "loss": 0.9167,
+ "step": 1321
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.04736303605119,
+ "learning_rate": 1.747397062591011e-05,
+ "loss": 0.9415,
+ "step": 1322
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.0474196172115005,
+ "learning_rate": 1.746982871877296e-05,
+ "loss": 0.993,
+ "step": 1323
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.8808689748638467,
+ "learning_rate": 1.7465683910440405e-05,
+ "loss": 0.9259,
+ "step": 1324
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 1.5210880159917515,
+ "learning_rate": 1.7461536202522248e-05,
+ "loss": 0.8962,
+ "step": 1325
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.0686317969125403,
+ "learning_rate": 1.745738559662941e-05,
+ "loss": 0.9928,
+ "step": 1326
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9764220372749869,
+ "learning_rate": 1.7453232094373936e-05,
+ "loss": 0.9462,
+ "step": 1327
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9282082567096386,
+ "learning_rate": 1.7449075697369005e-05,
+ "loss": 0.8972,
+ "step": 1328
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.0169851063290778,
+ "learning_rate": 1.7444916407228904e-05,
+ "loss": 1.0223,
+ "step": 1329
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.1104902130143832,
+ "learning_rate": 1.744075422556906e-05,
+ "loss": 0.9622,
+ "step": 1330
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.0214960904935697,
+ "learning_rate": 1.7436589154006014e-05,
+ "loss": 0.9756,
+ "step": 1331
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.837251199700536,
+ "learning_rate": 1.743242119415743e-05,
+ "loss": 0.9294,
+ "step": 1332
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9962820179048386,
+ "learning_rate": 1.7428250347642102e-05,
+ "loss": 0.968,
+ "step": 1333
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.8767083473968104,
+ "learning_rate": 1.7424076616079933e-05,
+ "loss": 0.8565,
+ "step": 1334
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.010031928570089,
+ "learning_rate": 1.7419900001091953e-05,
+ "loss": 1.0199,
+ "step": 1335
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.1472981012746335,
+ "learning_rate": 1.7415720504300314e-05,
+ "loss": 0.9862,
+ "step": 1336
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.0177100131803676,
+ "learning_rate": 1.741153812732828e-05,
+ "loss": 1.0558,
+ "step": 1337
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.8399003820676054,
+ "learning_rate": 1.7407352871800246e-05,
+ "loss": 0.8926,
+ "step": 1338
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.8163751235189033,
+ "learning_rate": 1.7403164739341708e-05,
+ "loss": 0.8762,
+ "step": 1339
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9110139912937479,
+ "learning_rate": 1.739897373157929e-05,
+ "loss": 0.9706,
+ "step": 1340
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.0523904592959699,
+ "learning_rate": 1.7394779850140736e-05,
+ "loss": 0.9904,
+ "step": 1341
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.0046387478908356,
+ "learning_rate": 1.7390583096654895e-05,
+ "loss": 0.9543,
+ "step": 1342
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9421259997094655,
+ "learning_rate": 1.7386383472751745e-05,
+ "loss": 0.9508,
+ "step": 1343
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9915527167921139,
+ "learning_rate": 1.7382180980062365e-05,
+ "loss": 0.9085,
+ "step": 1344
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9711906402895569,
+ "learning_rate": 1.7377975620218954e-05,
+ "loss": 0.9789,
+ "step": 1345
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9636222306821435,
+ "learning_rate": 1.7373767394854836e-05,
+ "loss": 0.9992,
+ "step": 1346
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9504937840612754,
+ "learning_rate": 1.7369556305604422e-05,
+ "loss": 0.9774,
+ "step": 1347
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.85508756889946,
+ "learning_rate": 1.736534235410326e-05,
+ "loss": 0.9298,
+ "step": 1348
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.001724464321067,
+ "learning_rate": 1.7361125541988e-05,
+ "loss": 0.8969,
+ "step": 1349
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.0795430574842528,
+ "learning_rate": 1.7356905870896407e-05,
+ "loss": 1.0655,
+ "step": 1350
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.8365748326097373,
+ "learning_rate": 1.735268334246734e-05,
+ "loss": 0.8813,
+ "step": 1351
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.8549159907728331,
+ "learning_rate": 1.7348457958340792e-05,
+ "loss": 0.9173,
+ "step": 1352
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9514007537768869,
+ "learning_rate": 1.7344229720157846e-05,
+ "loss": 0.899,
+ "step": 1353
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9243396251319407,
+ "learning_rate": 1.7339998629560705e-05,
+ "loss": 0.9007,
+ "step": 1354
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9386279339333949,
+ "learning_rate": 1.7335764688192676e-05,
+ "loss": 0.9582,
+ "step": 1355
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.938863003614121,
+ "learning_rate": 1.733152789769817e-05,
+ "loss": 0.9466,
+ "step": 1356
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.7746920848726053,
+ "learning_rate": 1.7327288259722714e-05,
+ "loss": 0.8744,
+ "step": 1357
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.8800209367790253,
+ "learning_rate": 1.7323045775912927e-05,
+ "loss": 0.9296,
+ "step": 1358
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9563591592089701,
+ "learning_rate": 1.7318800447916543e-05,
+ "loss": 0.9415,
+ "step": 1359
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.0823425450262547,
+ "learning_rate": 1.7314552277382403e-05,
+ "loss": 0.9155,
+ "step": 1360
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9294769199886521,
+ "learning_rate": 1.7310301265960446e-05,
+ "loss": 0.9396,
+ "step": 1361
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.0128160910927457,
+ "learning_rate": 1.7306047415301706e-05,
+ "loss": 0.9102,
+ "step": 1362
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.7193778243209328,
+ "learning_rate": 1.7301790727058344e-05,
+ "loss": 0.8595,
+ "step": 1363
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.912559188534125,
+ "learning_rate": 1.7297531202883598e-05,
+ "loss": 0.9292,
+ "step": 1364
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9827161963308617,
+ "learning_rate": 1.7293268844431826e-05,
+ "loss": 0.9035,
+ "step": 1365
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.8175980382563796,
+ "learning_rate": 1.7289003653358472e-05,
+ "loss": 0.8728,
+ "step": 1366
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9892005407224478,
+ "learning_rate": 1.7284735631320093e-05,
+ "loss": 0.9637,
+ "step": 1367
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.8617142576245806,
+ "learning_rate": 1.7280464779974335e-05,
+ "loss": 0.8283,
+ "step": 1368
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.7774387917332699,
+ "learning_rate": 1.7276191100979952e-05,
+ "loss": 0.8982,
+ "step": 1369
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.0089743260360584,
+ "learning_rate": 1.7271914595996784e-05,
+ "loss": 0.9725,
+ "step": 1370
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.0235716444291723,
+ "learning_rate": 1.7267635266685782e-05,
+ "loss": 0.9613,
+ "step": 1371
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.8920760393771107,
+ "learning_rate": 1.7263353114708993e-05,
+ "loss": 0.8932,
+ "step": 1372
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9677838999532018,
+ "learning_rate": 1.7259068141729542e-05,
+ "loss": 0.9674,
+ "step": 1373
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.0557970334664732,
+ "learning_rate": 1.7254780349411677e-05,
+ "loss": 0.889,
+ "step": 1374
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.9006718214048022,
+ "learning_rate": 1.7250489739420718e-05,
+ "loss": 0.9292,
+ "step": 1375
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.8978208423654963,
+ "learning_rate": 1.7246196313423095e-05,
+ "loss": 0.9762,
+ "step": 1376
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 1.012413888892859,
+ "learning_rate": 1.7241900073086318e-05,
+ "loss": 0.9616,
+ "step": 1377
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9219652935841612,
+ "learning_rate": 1.7237601020079003e-05,
+ "loss": 0.9597,
+ "step": 1378
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 1.1667935403837504,
+ "learning_rate": 1.7233299156070852e-05,
+ "loss": 0.952,
+ "step": 1379
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 1.0657355088586513,
+ "learning_rate": 1.7228994482732653e-05,
+ "loss": 0.9978,
+ "step": 1380
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.8668487883174316,
+ "learning_rate": 1.72246870017363e-05,
+ "loss": 0.9998,
+ "step": 1381
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.83077840213205,
+ "learning_rate": 1.7220376714754766e-05,
+ "loss": 0.9163,
+ "step": 1382
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 1.0103332678264763,
+ "learning_rate": 1.7216063623462112e-05,
+ "loss": 0.9694,
+ "step": 1383
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9523874223780286,
+ "learning_rate": 1.7211747729533504e-05,
+ "loss": 0.9678,
+ "step": 1384
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.8803444409792228,
+ "learning_rate": 1.7207429034645176e-05,
+ "loss": 1.0225,
+ "step": 1385
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9128646750795694,
+ "learning_rate": 1.720310754047446e-05,
+ "loss": 0.8621,
+ "step": 1386
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9722068964197508,
+ "learning_rate": 1.719878324869978e-05,
+ "loss": 0.965,
+ "step": 1387
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.8680942448861937,
+ "learning_rate": 1.7194456161000634e-05,
+ "loss": 0.9419,
+ "step": 1388
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.8871975637381099,
+ "learning_rate": 1.719012627905762e-05,
+ "loss": 0.94,
+ "step": 1389
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 1.1752264909759393,
+ "learning_rate": 1.718579360455241e-05,
+ "loss": 0.9567,
+ "step": 1390
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.942533816212278,
+ "learning_rate": 1.7181458139167767e-05,
+ "loss": 1.0213,
+ "step": 1391
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 1.0487718670291166,
+ "learning_rate": 1.7177119884587536e-05,
+ "loss": 0.9706,
+ "step": 1392
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.8596585129841071,
+ "learning_rate": 1.717277884249664e-05,
+ "loss": 0.9062,
+ "step": 1393
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9969760294900244,
+ "learning_rate": 1.716843501458109e-05,
+ "loss": 0.9547,
+ "step": 1394
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.8619602284633182,
+ "learning_rate": 1.716408840252799e-05,
+ "loss": 0.9775,
+ "step": 1395
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9848956550531245,
+ "learning_rate": 1.7159739008025503e-05,
+ "loss": 0.8821,
+ "step": 1396
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.972351111094236,
+ "learning_rate": 1.7155386832762892e-05,
+ "loss": 0.9936,
+ "step": 1397
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9780137870066115,
+ "learning_rate": 1.715103187843048e-05,
+ "loss": 0.9961,
+ "step": 1398
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9471594062714703,
+ "learning_rate": 1.7146674146719688e-05,
+ "loss": 0.9669,
+ "step": 1399
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.8751949009152656,
+ "learning_rate": 1.7142313639323012e-05,
+ "loss": 0.9254,
+ "step": 1400
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.8897100420997975,
+ "learning_rate": 1.7137950357934017e-05,
+ "loss": 1.01,
+ "step": 1401
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9184010627795944,
+ "learning_rate": 1.7133584304247354e-05,
+ "loss": 1.049,
+ "step": 1402
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 1.0906008522685957,
+ "learning_rate": 1.7129215479958747e-05,
+ "loss": 0.8526,
+ "step": 1403
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9814138374215998,
+ "learning_rate": 1.7124843886765e-05,
+ "loss": 0.9829,
+ "step": 1404
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9558542792106863,
+ "learning_rate": 1.712046952636398e-05,
+ "loss": 0.9828,
+ "step": 1405
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 1.0645694257914269,
+ "learning_rate": 1.7116092400454655e-05,
+ "loss": 0.9607,
+ "step": 1406
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9576178635912473,
+ "learning_rate": 1.7111712510737035e-05,
+ "loss": 0.9126,
+ "step": 1407
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9954897980335197,
+ "learning_rate": 1.7107329858912226e-05,
+ "loss": 0.9274,
+ "step": 1408
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9924604001165576,
+ "learning_rate": 1.7102944446682393e-05,
+ "loss": 0.8743,
+ "step": 1409
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9578220938883492,
+ "learning_rate": 1.709855627575079e-05,
+ "loss": 0.9546,
+ "step": 1410
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 1.0133446370102202,
+ "learning_rate": 1.7094165347821724e-05,
+ "loss": 1.0115,
+ "step": 1411
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.8321384838785534,
+ "learning_rate": 1.7089771664600584e-05,
+ "loss": 0.9437,
+ "step": 1412
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 1.092412274875756,
+ "learning_rate": 1.708537522779382e-05,
+ "loss": 0.9602,
+ "step": 1413
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.8434696929509511,
+ "learning_rate": 1.7080976039108964e-05,
+ "loss": 0.8267,
+ "step": 1414
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.8888575481756741,
+ "learning_rate": 1.7076574100254614e-05,
+ "loss": 0.9449,
+ "step": 1415
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 1.04536133976919,
+ "learning_rate": 1.707216941294042e-05,
+ "loss": 0.9354,
+ "step": 1416
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9264301612973153,
+ "learning_rate": 1.706776197887712e-05,
+ "loss": 0.993,
+ "step": 1417
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 1.0448132360437183,
+ "learning_rate": 1.7063351799776514e-05,
+ "loss": 0.9921,
+ "step": 1418
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 1.045890976485631,
+ "learning_rate": 1.7058938877351456e-05,
+ "loss": 0.9247,
+ "step": 1419
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9208093556615694,
+ "learning_rate": 1.705452321331588e-05,
+ "loss": 0.9543,
+ "step": 1420
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.7828588302711406,
+ "learning_rate": 1.7050104809384774e-05,
+ "loss": 0.8153,
+ "step": 1421
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9600069794377464,
+ "learning_rate": 1.70456836672742e-05,
+ "loss": 1.0262,
+ "step": 1422
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 1.0360936079688903,
+ "learning_rate": 1.704125978870128e-05,
+ "loss": 1.0395,
+ "step": 1423
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9443134361395065,
+ "learning_rate": 1.7036833175384192e-05,
+ "loss": 0.9432,
+ "step": 1424
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.8957425811268978,
+ "learning_rate": 1.7032403829042182e-05,
+ "loss": 0.966,
+ "step": 1425
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9221316446068092,
+ "learning_rate": 1.7027971751395563e-05,
+ "loss": 0.9855,
+ "step": 1426
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.9926472916387251,
+ "learning_rate": 1.7023536944165697e-05,
+ "loss": 0.912,
+ "step": 1427
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.7608951737971176,
+ "learning_rate": 1.7019099409075014e-05,
+ "loss": 0.9003,
+ "step": 1428
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 1.013479925276139,
+ "learning_rate": 1.7014659147847005e-05,
+ "loss": 0.9588,
+ "step": 1429
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9573690285449755,
+ "learning_rate": 1.701021616220621e-05,
+ "loss": 0.9775,
+ "step": 1430
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9034380119557153,
+ "learning_rate": 1.7005770453878234e-05,
+ "loss": 0.9887,
+ "step": 1431
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 1.0344792829013392,
+ "learning_rate": 1.7001322024589742e-05,
+ "loss": 1.0164,
+ "step": 1432
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.962111819460091,
+ "learning_rate": 1.6996870876068455e-05,
+ "loss": 0.9532,
+ "step": 1433
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9307416216103737,
+ "learning_rate": 1.6992417010043144e-05,
+ "loss": 0.9921,
+ "step": 1434
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9097415683906258,
+ "learning_rate": 1.6987960428243637e-05,
+ "loss": 0.945,
+ "step": 1435
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9216637422367028,
+ "learning_rate": 1.6983501132400825e-05,
+ "loss": 0.9544,
+ "step": 1436
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 1.0345086013912552,
+ "learning_rate": 1.6979039124246643e-05,
+ "loss": 0.9326,
+ "step": 1437
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.860918853096138,
+ "learning_rate": 1.6974574405514083e-05,
+ "loss": 0.9159,
+ "step": 1438
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 1.052868992385184,
+ "learning_rate": 1.6970106977937192e-05,
+ "loss": 1.0088,
+ "step": 1439
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.976301583243505,
+ "learning_rate": 1.696563684325107e-05,
+ "loss": 1.0028,
+ "step": 1440
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.8985498819345825,
+ "learning_rate": 1.6961164003191862e-05,
+ "loss": 0.9355,
+ "step": 1441
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.959126962988729,
+ "learning_rate": 1.6956688459496767e-05,
+ "loss": 0.9988,
+ "step": 1442
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 1.000398571568602,
+ "learning_rate": 1.695221021390404e-05,
+ "loss": 0.9796,
+ "step": 1443
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.948004513811074,
+ "learning_rate": 1.6947729268152972e-05,
+ "loss": 0.9664,
+ "step": 1444
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.8547904586649323,
+ "learning_rate": 1.6943245623983918e-05,
+ "loss": 0.9382,
+ "step": 1445
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.8169213509760057,
+ "learning_rate": 1.6938759283138268e-05,
+ "loss": 0.9215,
+ "step": 1446
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 1.0583824570606166,
+ "learning_rate": 1.693427024735847e-05,
+ "loss": 1.0131,
+ "step": 1447
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 1.1342358394785241,
+ "learning_rate": 1.692977851838801e-05,
+ "loss": 0.9261,
+ "step": 1448
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9996791553998676,
+ "learning_rate": 1.6925284097971427e-05,
+ "loss": 0.9718,
+ "step": 1449
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9034438119698405,
+ "learning_rate": 1.6920786987854296e-05,
+ "loss": 0.991,
+ "step": 1450
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9447949661240993,
+ "learning_rate": 1.691628718978325e-05,
+ "loss": 0.9383,
+ "step": 1451
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9586454035674055,
+ "learning_rate": 1.691178470550596e-05,
+ "loss": 0.9168,
+ "step": 1452
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9428822661438724,
+ "learning_rate": 1.6907279536771127e-05,
+ "loss": 0.91,
+ "step": 1453
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9251971302121317,
+ "learning_rate": 1.6902771685328524e-05,
+ "loss": 1.0019,
+ "step": 1454
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.918862537239612,
+ "learning_rate": 1.6898261152928933e-05,
+ "loss": 0.9831,
+ "step": 1455
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.8880369061076363,
+ "learning_rate": 1.6893747941324197e-05,
+ "loss": 0.9869,
+ "step": 1456
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9157286966793228,
+ "learning_rate": 1.6889232052267203e-05,
+ "loss": 0.9341,
+ "step": 1457
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9134731812829485,
+ "learning_rate": 1.688471348751186e-05,
+ "loss": 0.9283,
+ "step": 1458
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9093999478065837,
+ "learning_rate": 1.688019224881313e-05,
+ "loss": 1.0232,
+ "step": 1459
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 1.0429037065797877,
+ "learning_rate": 1.6875668337927014e-05,
+ "loss": 0.9529,
+ "step": 1460
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9601967408948001,
+ "learning_rate": 1.6871141756610544e-05,
+ "loss": 1.0154,
+ "step": 1461
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9615543416593485,
+ "learning_rate": 1.6866612506621788e-05,
+ "loss": 0.9286,
+ "step": 1462
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.912076570285461,
+ "learning_rate": 1.6862080589719863e-05,
+ "loss": 0.902,
+ "step": 1463
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.8642930007495335,
+ "learning_rate": 1.6857546007664908e-05,
+ "loss": 0.9412,
+ "step": 1464
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9544234669861017,
+ "learning_rate": 1.6853008762218103e-05,
+ "loss": 0.9903,
+ "step": 1465
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.8391242045717849,
+ "learning_rate": 1.684846885514166e-05,
+ "loss": 0.8772,
+ "step": 1466
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.8676096900956322,
+ "learning_rate": 1.6843926288198828e-05,
+ "loss": 0.9685,
+ "step": 1467
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9146017413241526,
+ "learning_rate": 1.683938106315389e-05,
+ "loss": 1.0165,
+ "step": 1468
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.8709824758554244,
+ "learning_rate": 1.683483318177216e-05,
+ "loss": 0.9719,
+ "step": 1469
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9430879983467464,
+ "learning_rate": 1.6830282645819974e-05,
+ "loss": 0.9654,
+ "step": 1470
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9178135352600507,
+ "learning_rate": 1.6825729457064718e-05,
+ "loss": 0.9424,
+ "step": 1471
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 1.0915652350945149,
+ "learning_rate": 1.6821173617274793e-05,
+ "loss": 0.9302,
+ "step": 1472
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9417539872874993,
+ "learning_rate": 1.6816615128219635e-05,
+ "loss": 0.9071,
+ "step": 1473
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9151500094703141,
+ "learning_rate": 1.681205399166971e-05,
+ "loss": 0.9681,
+ "step": 1474
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9286394678407768,
+ "learning_rate": 1.6807490209396506e-05,
+ "loss": 0.9415,
+ "step": 1475
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.997686222732575,
+ "learning_rate": 1.6802923783172553e-05,
+ "loss": 0.9448,
+ "step": 1476
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9997420392693974,
+ "learning_rate": 1.679835471477139e-05,
+ "loss": 0.9966,
+ "step": 1477
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 1.025454770988222,
+ "learning_rate": 1.6793783005967593e-05,
+ "loss": 1.0061,
+ "step": 1478
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.9004400517970723,
+ "learning_rate": 1.678920865853676e-05,
+ "loss": 0.9713,
+ "step": 1479
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 1.035609350141977,
+ "learning_rate": 1.678463167425552e-05,
+ "loss": 0.925,
+ "step": 1480
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.8205620842968449,
+ "learning_rate": 1.6780052054901512e-05,
+ "loss": 0.9319,
+ "step": 1481
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 1.0026055411591541,
+ "learning_rate": 1.6775469802253416e-05,
+ "loss": 0.9171,
+ "step": 1482
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.8729671855534491,
+ "learning_rate": 1.6770884918090923e-05,
+ "loss": 0.9424,
+ "step": 1483
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9003800462124079,
+ "learning_rate": 1.6766297404194745e-05,
+ "loss": 0.9922,
+ "step": 1484
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 1.0235000465659894,
+ "learning_rate": 1.6761707262346624e-05,
+ "loss": 0.9668,
+ "step": 1485
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9256919446053998,
+ "learning_rate": 1.675711449432932e-05,
+ "loss": 0.9285,
+ "step": 1486
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.8698603521807748,
+ "learning_rate": 1.6752519101926606e-05,
+ "loss": 0.9668,
+ "step": 1487
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9720860628297219,
+ "learning_rate": 1.6747921086923284e-05,
+ "loss": 0.99,
+ "step": 1488
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.8167870246160064,
+ "learning_rate": 1.674332045110517e-05,
+ "loss": 0.9021,
+ "step": 1489
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 1.069653115198386,
+ "learning_rate": 1.6738717196259092e-05,
+ "loss": 0.9952,
+ "step": 1490
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9081746219961461,
+ "learning_rate": 1.673411132417291e-05,
+ "loss": 0.9976,
+ "step": 1491
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9091308363718774,
+ "learning_rate": 1.672950283663548e-05,
+ "loss": 0.953,
+ "step": 1492
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9170653072328966,
+ "learning_rate": 1.6724891735436697e-05,
+ "loss": 0.9369,
+ "step": 1493
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9447879394939125,
+ "learning_rate": 1.6720278022367453e-05,
+ "loss": 0.9319,
+ "step": 1494
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 1.063860724905578,
+ "learning_rate": 1.6715661699219664e-05,
+ "loss": 0.8929,
+ "step": 1495
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9372827125363168,
+ "learning_rate": 1.6711042767786257e-05,
+ "loss": 0.9613,
+ "step": 1496
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9973853986012087,
+ "learning_rate": 1.6706421229861168e-05,
+ "loss": 0.9321,
+ "step": 1497
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.933708910044373,
+ "learning_rate": 1.6701797087239354e-05,
+ "loss": 0.9819,
+ "step": 1498
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.8501823140475498,
+ "learning_rate": 1.6697170341716772e-05,
+ "loss": 0.9083,
+ "step": 1499
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.8968192349851679,
+ "learning_rate": 1.6692540995090403e-05,
+ "loss": 0.9311,
+ "step": 1500
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9467954363380379,
+ "learning_rate": 1.668790904915823e-05,
+ "loss": 0.9867,
+ "step": 1501
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9770498456859923,
+ "learning_rate": 1.6683274505719248e-05,
+ "loss": 0.9755,
+ "step": 1502
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9244002799395564,
+ "learning_rate": 1.6678637366573455e-05,
+ "loss": 0.9607,
+ "step": 1503
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 1.0089997803593413,
+ "learning_rate": 1.667399763352187e-05,
+ "loss": 1.0077,
+ "step": 1504
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9729824091419037,
+ "learning_rate": 1.666935530836651e-05,
+ "loss": 0.9754,
+ "step": 1505
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.8937345069773265,
+ "learning_rate": 1.6664710392910396e-05,
+ "loss": 0.9528,
+ "step": 1506
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.7828933226169947,
+ "learning_rate": 1.6660062888957564e-05,
+ "loss": 0.8448,
+ "step": 1507
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.6881530097825312,
+ "learning_rate": 1.665541279831305e-05,
+ "loss": 0.8297,
+ "step": 1508
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.8383643213490903,
+ "learning_rate": 1.6650760122782898e-05,
+ "loss": 0.8944,
+ "step": 1509
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 1.0492979343611466,
+ "learning_rate": 1.6646104864174147e-05,
+ "loss": 0.9559,
+ "step": 1510
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.7860944374266666,
+ "learning_rate": 1.664144702429485e-05,
+ "loss": 0.8304,
+ "step": 1511
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9289932358432823,
+ "learning_rate": 1.663678660495406e-05,
+ "loss": 0.9132,
+ "step": 1512
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9193187827812805,
+ "learning_rate": 1.663212360796183e-05,
+ "loss": 0.9172,
+ "step": 1513
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.8729349335008283,
+ "learning_rate": 1.662745803512921e-05,
+ "loss": 0.9643,
+ "step": 1514
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9358818082059703,
+ "learning_rate": 1.662278988826826e-05,
+ "loss": 0.8925,
+ "step": 1515
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.8943626139959501,
+ "learning_rate": 1.6618119169192027e-05,
+ "loss": 0.9392,
+ "step": 1516
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9151007624865264,
+ "learning_rate": 1.661344587971457e-05,
+ "loss": 0.9498,
+ "step": 1517
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9338211772395434,
+ "learning_rate": 1.6608770021650945e-05,
+ "loss": 0.9619,
+ "step": 1518
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9029817897960257,
+ "learning_rate": 1.6604091596817193e-05,
+ "loss": 1.0062,
+ "step": 1519
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9996484500817483,
+ "learning_rate": 1.6599410607030363e-05,
+ "loss": 0.9812,
+ "step": 1520
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.8765015379256176,
+ "learning_rate": 1.6594727054108498e-05,
+ "loss": 0.9065,
+ "step": 1521
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 1.0423180898129178,
+ "learning_rate": 1.659004093987064e-05,
+ "loss": 0.9466,
+ "step": 1522
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.7874776388995165,
+ "learning_rate": 1.6585352266136814e-05,
+ "loss": 0.8455,
+ "step": 1523
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.7938745462427268,
+ "learning_rate": 1.6580661034728055e-05,
+ "loss": 0.9201,
+ "step": 1524
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.8100864066177429,
+ "learning_rate": 1.6575967247466376e-05,
+ "loss": 0.8825,
+ "step": 1525
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.7304176748429774,
+ "learning_rate": 1.657127090617479e-05,
+ "loss": 0.9078,
+ "step": 1526
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.8454782615675868,
+ "learning_rate": 1.656657201267731e-05,
+ "loss": 0.9717,
+ "step": 1527
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 1.0033589088155495,
+ "learning_rate": 1.6561870568798927e-05,
+ "loss": 0.9113,
+ "step": 1528
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 1.0045541920143346,
+ "learning_rate": 1.655716657636562e-05,
+ "loss": 0.9802,
+ "step": 1529
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.9549525759581301,
+ "learning_rate": 1.6552460037204382e-05,
+ "loss": 0.902,
+ "step": 1530
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 1.0385600265264971,
+ "learning_rate": 1.6547750953143168e-05,
+ "loss": 0.9371,
+ "step": 1531
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 1.026356414371794,
+ "learning_rate": 1.654303932601093e-05,
+ "loss": 0.9704,
+ "step": 1532
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.8946408892785592,
+ "learning_rate": 1.6538325157637614e-05,
+ "loss": 0.9272,
+ "step": 1533
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9433836414294297,
+ "learning_rate": 1.653360844985415e-05,
+ "loss": 1.0327,
+ "step": 1534
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 1.0136214861170298,
+ "learning_rate": 1.652888920449245e-05,
+ "loss": 0.9562,
+ "step": 1535
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8516349942077444,
+ "learning_rate": 1.6524167423385414e-05,
+ "loss": 0.9044,
+ "step": 1536
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8606918324559609,
+ "learning_rate": 1.651944310836693e-05,
+ "loss": 0.889,
+ "step": 1537
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9621898897088845,
+ "learning_rate": 1.6514716261271866e-05,
+ "loss": 0.9425,
+ "step": 1538
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8999210884555426,
+ "learning_rate": 1.6509986883936073e-05,
+ "loss": 0.9559,
+ "step": 1539
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9132688261405465,
+ "learning_rate": 1.650525497819639e-05,
+ "loss": 0.9636,
+ "step": 1540
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.96739887282077,
+ "learning_rate": 1.6500520545890634e-05,
+ "loss": 0.958,
+ "step": 1541
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8669479456473806,
+ "learning_rate": 1.6495783588857605e-05,
+ "loss": 0.9078,
+ "step": 1542
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8583747225253263,
+ "learning_rate": 1.649104410893708e-05,
+ "loss": 0.992,
+ "step": 1543
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8029229062809408,
+ "learning_rate": 1.648630210796982e-05,
+ "loss": 0.9104,
+ "step": 1544
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 1.0517436166476481,
+ "learning_rate": 1.6481557587797562e-05,
+ "loss": 0.9127,
+ "step": 1545
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8792737661317848,
+ "learning_rate": 1.6476810550263023e-05,
+ "loss": 0.9328,
+ "step": 1546
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8118400865453468,
+ "learning_rate": 1.6472060997209898e-05,
+ "loss": 0.9258,
+ "step": 1547
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.894095224382569,
+ "learning_rate": 1.6467308930482863e-05,
+ "loss": 0.9422,
+ "step": 1548
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8008306113009263,
+ "learning_rate": 1.6462554351927558e-05,
+ "loss": 0.8864,
+ "step": 1549
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 1.0824491035305586,
+ "learning_rate": 1.6457797263390613e-05,
+ "loss": 0.9603,
+ "step": 1550
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9440647145528336,
+ "learning_rate": 1.6453037666719624e-05,
+ "loss": 0.9356,
+ "step": 1551
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9523845323654504,
+ "learning_rate": 1.6448275563763162e-05,
+ "loss": 0.9583,
+ "step": 1552
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8590795033317857,
+ "learning_rate": 1.644351095637078e-05,
+ "loss": 0.923,
+ "step": 1553
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 1.0084967034937304,
+ "learning_rate": 1.6438743846392987e-05,
+ "loss": 0.972,
+ "step": 1554
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9013444708051802,
+ "learning_rate": 1.6433974235681274e-05,
+ "loss": 0.9805,
+ "step": 1555
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8851420202420345,
+ "learning_rate": 1.6429202126088112e-05,
+ "loss": 0.9088,
+ "step": 1556
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 1.1459972501861888,
+ "learning_rate": 1.6424427519466925e-05,
+ "loss": 0.9487,
+ "step": 1557
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9391257487421422,
+ "learning_rate": 1.641965041767212e-05,
+ "loss": 0.9777,
+ "step": 1558
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 1.004314381160487,
+ "learning_rate": 1.6414870822559064e-05,
+ "loss": 0.8921,
+ "step": 1559
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9153049856368219,
+ "learning_rate": 1.6410088735984103e-05,
+ "loss": 0.9034,
+ "step": 1560
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9175691044105417,
+ "learning_rate": 1.6405304159804534e-05,
+ "loss": 0.9555,
+ "step": 1561
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 1.0218738680397261,
+ "learning_rate": 1.6400517095878644e-05,
+ "loss": 0.9464,
+ "step": 1562
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8311183845638194,
+ "learning_rate": 1.6395727546065665e-05,
+ "loss": 0.9857,
+ "step": 1563
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8372882474738428,
+ "learning_rate": 1.6390935512225806e-05,
+ "loss": 0.9277,
+ "step": 1564
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9439256261421974,
+ "learning_rate": 1.6386140996220232e-05,
+ "loss": 0.9889,
+ "step": 1565
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 1.0188128413048634,
+ "learning_rate": 1.6381343999911088e-05,
+ "loss": 0.9182,
+ "step": 1566
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9367484244107676,
+ "learning_rate": 1.6376544525161463e-05,
+ "loss": 1.0082,
+ "step": 1567
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 1.2649438792775638,
+ "learning_rate": 1.6371742573835426e-05,
+ "loss": 1.0295,
+ "step": 1568
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9343919815578396,
+ "learning_rate": 1.636693814779799e-05,
+ "loss": 0.9987,
+ "step": 1569
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 1.026191881903781,
+ "learning_rate": 1.6362131248915145e-05,
+ "loss": 1.0093,
+ "step": 1570
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8863813583139732,
+ "learning_rate": 1.6357321879053833e-05,
+ "loss": 0.8857,
+ "step": 1571
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9310895226485517,
+ "learning_rate": 1.6352510040081962e-05,
+ "loss": 0.9583,
+ "step": 1572
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8785760391659503,
+ "learning_rate": 1.634769573386839e-05,
+ "loss": 0.9509,
+ "step": 1573
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.810865397517544,
+ "learning_rate": 1.634287896228294e-05,
+ "loss": 0.8363,
+ "step": 1574
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 1.0450274779532105,
+ "learning_rate": 1.6338059727196386e-05,
+ "loss": 0.9478,
+ "step": 1575
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8563817460351204,
+ "learning_rate": 1.6333238030480473e-05,
+ "loss": 0.9341,
+ "step": 1576
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9202654722314237,
+ "learning_rate": 1.6328413874007884e-05,
+ "loss": 0.9441,
+ "step": 1577
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 1.0921613482811823,
+ "learning_rate": 1.6323587259652267e-05,
+ "loss": 0.9607,
+ "step": 1578
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9704540789978041,
+ "learning_rate": 1.6318758189288227e-05,
+ "loss": 0.9413,
+ "step": 1579
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9560575287925731,
+ "learning_rate": 1.6313926664791316e-05,
+ "loss": 0.9676,
+ "step": 1580
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 1.0573638071375642,
+ "learning_rate": 1.6309092688038047e-05,
+ "loss": 0.9644,
+ "step": 1581
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.9045907983710009,
+ "learning_rate": 1.6304256260905872e-05,
+ "loss": 0.9729,
+ "step": 1582
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 1.158506692783041,
+ "learning_rate": 1.6299417385273216e-05,
+ "loss": 1.0491,
+ "step": 1583
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8630420509315611,
+ "learning_rate": 1.629457606301943e-05,
+ "loss": 0.8856,
+ "step": 1584
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.8794259243667014,
+ "learning_rate": 1.6289732296024837e-05,
+ "loss": 0.9319,
+ "step": 1585
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9083656791090297,
+ "learning_rate": 1.6284886086170697e-05,
+ "loss": 0.9013,
+ "step": 1586
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9551313866642618,
+ "learning_rate": 1.628003743533922e-05,
+ "loss": 0.9521,
+ "step": 1587
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9662740779782306,
+ "learning_rate": 1.6275186345413566e-05,
+ "loss": 1.0104,
+ "step": 1588
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9321753471339548,
+ "learning_rate": 1.627033281827785e-05,
+ "loss": 0.8977,
+ "step": 1589
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9744976506191133,
+ "learning_rate": 1.6265476855817116e-05,
+ "loss": 0.9655,
+ "step": 1590
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8960702114476669,
+ "learning_rate": 1.6260618459917366e-05,
+ "loss": 0.9226,
+ "step": 1591
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8867331235304629,
+ "learning_rate": 1.6255757632465553e-05,
+ "loss": 0.9158,
+ "step": 1592
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8162593563955296,
+ "learning_rate": 1.625089437534956e-05,
+ "loss": 0.8893,
+ "step": 1593
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 1.092573754242538,
+ "learning_rate": 1.624602869045822e-05,
+ "loss": 0.992,
+ "step": 1594
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8359118484411704,
+ "learning_rate": 1.624116057968131e-05,
+ "loss": 0.9061,
+ "step": 1595
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8157544465859347,
+ "learning_rate": 1.6236290044909543e-05,
+ "loss": 0.8577,
+ "step": 1596
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9343697029660539,
+ "learning_rate": 1.6231417088034585e-05,
+ "loss": 1.0001,
+ "step": 1597
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8888199191652654,
+ "learning_rate": 1.622654171094904e-05,
+ "loss": 0.9049,
+ "step": 1598
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8916764016407774,
+ "learning_rate": 1.6221663915546437e-05,
+ "loss": 0.9234,
+ "step": 1599
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 1.0462153793340085,
+ "learning_rate": 1.6216783703721265e-05,
+ "loss": 0.9814,
+ "step": 1600
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9179145616912302,
+ "learning_rate": 1.6211901077368937e-05,
+ "loss": 0.9493,
+ "step": 1601
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9001674153041553,
+ "learning_rate": 1.620701603838581e-05,
+ "loss": 0.9446,
+ "step": 1602
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 1.2033915557290602,
+ "learning_rate": 1.6202128588669177e-05,
+ "loss": 0.9634,
+ "step": 1603
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8090382291036919,
+ "learning_rate": 1.619723873011727e-05,
+ "loss": 0.9208,
+ "step": 1604
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8668030797487888,
+ "learning_rate": 1.6192346464629247e-05,
+ "loss": 0.9509,
+ "step": 1605
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9079607458115487,
+ "learning_rate": 1.6187451794105212e-05,
+ "loss": 0.9816,
+ "step": 1606
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9660764324244697,
+ "learning_rate": 1.61825547204462e-05,
+ "loss": 1.0215,
+ "step": 1607
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9707479728013486,
+ "learning_rate": 1.6177655245554177e-05,
+ "loss": 1.0278,
+ "step": 1608
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9116418133277676,
+ "learning_rate": 1.617275337133204e-05,
+ "loss": 0.955,
+ "step": 1609
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9842417634853147,
+ "learning_rate": 1.6167849099683623e-05,
+ "loss": 0.9409,
+ "step": 1610
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9733329443171795,
+ "learning_rate": 1.6162942432513687e-05,
+ "loss": 0.9357,
+ "step": 1611
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.801403566635771,
+ "learning_rate": 1.6158033371727924e-05,
+ "loss": 0.8624,
+ "step": 1612
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9661459166620155,
+ "learning_rate": 1.6153121919232962e-05,
+ "loss": 0.9435,
+ "step": 1613
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8764119756580947,
+ "learning_rate": 1.614820807693635e-05,
+ "loss": 0.9952,
+ "step": 1614
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9749522107857632,
+ "learning_rate": 1.6143291846746563e-05,
+ "loss": 0.9781,
+ "step": 1615
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8900510283221014,
+ "learning_rate": 1.613837323057301e-05,
+ "loss": 0.8868,
+ "step": 1616
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9346525229346695,
+ "learning_rate": 1.6133452230326035e-05,
+ "loss": 0.9183,
+ "step": 1617
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9925792295783066,
+ "learning_rate": 1.6128528847916883e-05,
+ "loss": 0.9407,
+ "step": 1618
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.898656388238625,
+ "learning_rate": 1.6123603085257746e-05,
+ "loss": 0.9664,
+ "step": 1619
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 1.0469184473259812,
+ "learning_rate": 1.6118674944261732e-05,
+ "loss": 0.9371,
+ "step": 1620
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9487561282792712,
+ "learning_rate": 1.6113744426842882e-05,
+ "loss": 0.953,
+ "step": 1621
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9421710034497124,
+ "learning_rate": 1.6108811534916137e-05,
+ "loss": 0.9241,
+ "step": 1622
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8810989743636531,
+ "learning_rate": 1.6103876270397387e-05,
+ "loss": 0.8962,
+ "step": 1623
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9968793884243532,
+ "learning_rate": 1.609893863520343e-05,
+ "loss": 1.0071,
+ "step": 1624
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8305670777743684,
+ "learning_rate": 1.609399863125198e-05,
+ "loss": 0.9459,
+ "step": 1625
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8219634437237389,
+ "learning_rate": 1.6089056260461687e-05,
+ "loss": 0.8953,
+ "step": 1626
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 1.1505444861757854,
+ "learning_rate": 1.6084111524752107e-05,
+ "loss": 0.986,
+ "step": 1627
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8523137932717626,
+ "learning_rate": 1.607916442604372e-05,
+ "loss": 0.9969,
+ "step": 1628
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9113958021126214,
+ "learning_rate": 1.6074214966257914e-05,
+ "loss": 0.9257,
+ "step": 1629
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9641508088054317,
+ "learning_rate": 1.6069263147317015e-05,
+ "loss": 0.9442,
+ "step": 1630
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8907370688959207,
+ "learning_rate": 1.6064308971144236e-05,
+ "loss": 0.9364,
+ "step": 1631
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9715843489375122,
+ "learning_rate": 1.605935243966374e-05,
+ "loss": 0.9028,
+ "step": 1632
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.8060891755145814,
+ "learning_rate": 1.6054393554800574e-05,
+ "loss": 0.9515,
+ "step": 1633
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9235647289276558,
+ "learning_rate": 1.604943231848072e-05,
+ "loss": 0.9238,
+ "step": 1634
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9883917004228607,
+ "learning_rate": 1.604446873263106e-05,
+ "loss": 0.9704,
+ "step": 1635
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.929373229477013,
+ "learning_rate": 1.6039502799179394e-05,
+ "loss": 0.9839,
+ "step": 1636
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.9518091827387594,
+ "learning_rate": 1.6034534520054435e-05,
+ "loss": 0.968,
+ "step": 1637
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.8240557236482343,
+ "learning_rate": 1.60295638971858e-05,
+ "loss": 0.8758,
+ "step": 1638
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.8988646170980703,
+ "learning_rate": 1.602459093250403e-05,
+ "loss": 0.9427,
+ "step": 1639
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 1.0399035690145213,
+ "learning_rate": 1.601961562794056e-05,
+ "loss": 1.0229,
+ "step": 1640
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9054750140407912,
+ "learning_rate": 1.601463798542775e-05,
+ "loss": 0.9607,
+ "step": 1641
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.8685646231286541,
+ "learning_rate": 1.6009658006898848e-05,
+ "loss": 0.9334,
+ "step": 1642
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.8687059821196736,
+ "learning_rate": 1.600467569428803e-05,
+ "loss": 0.9283,
+ "step": 1643
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9373054769488423,
+ "learning_rate": 1.599969104953036e-05,
+ "loss": 0.9141,
+ "step": 1644
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.8599121214184482,
+ "learning_rate": 1.599470407456182e-05,
+ "loss": 0.9604,
+ "step": 1645
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.886202487687736,
+ "learning_rate": 1.5989714771319297e-05,
+ "loss": 0.9236,
+ "step": 1646
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9637264303046981,
+ "learning_rate": 1.5984723141740578e-05,
+ "loss": 0.9264,
+ "step": 1647
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9532559907710517,
+ "learning_rate": 1.597972918776435e-05,
+ "loss": 1.0119,
+ "step": 1648
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9386242314684291,
+ "learning_rate": 1.5974732911330208e-05,
+ "loss": 0.9295,
+ "step": 1649
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9951195295678786,
+ "learning_rate": 1.5969734314378654e-05,
+ "loss": 0.9378,
+ "step": 1650
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 1.0412107333124232,
+ "learning_rate": 1.5964733398851078e-05,
+ "loss": 0.9474,
+ "step": 1651
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.8952264426932172,
+ "learning_rate": 1.5959730166689783e-05,
+ "loss": 0.9031,
+ "step": 1652
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.988442495445306,
+ "learning_rate": 1.5954724619837966e-05,
+ "loss": 0.9892,
+ "step": 1653
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9511420149297078,
+ "learning_rate": 1.5949716760239722e-05,
+ "loss": 0.9458,
+ "step": 1654
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.8964385327916379,
+ "learning_rate": 1.5944706589840046e-05,
+ "loss": 0.8642,
+ "step": 1655
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.8590231471822083,
+ "learning_rate": 1.5939694110584833e-05,
+ "loss": 0.8998,
+ "step": 1656
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.8920728082079487,
+ "learning_rate": 1.593467932442087e-05,
+ "loss": 0.9607,
+ "step": 1657
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9544576933919202,
+ "learning_rate": 1.5929662233295846e-05,
+ "loss": 0.935,
+ "step": 1658
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.8502120316835678,
+ "learning_rate": 1.5924642839158334e-05,
+ "loss": 0.9636,
+ "step": 1659
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9514711201063468,
+ "learning_rate": 1.591962114395781e-05,
+ "loss": 0.9251,
+ "step": 1660
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 1.0858296479725027,
+ "learning_rate": 1.5914597149644654e-05,
+ "loss": 0.9738,
+ "step": 1661
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.8945887283740663,
+ "learning_rate": 1.5909570858170115e-05,
+ "loss": 0.9372,
+ "step": 1662
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 1.009456322676137,
+ "learning_rate": 1.5904542271486346e-05,
+ "loss": 0.9836,
+ "step": 1663
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9302074749871171,
+ "learning_rate": 1.5899511391546403e-05,
+ "loss": 0.9074,
+ "step": 1664
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9317359260782803,
+ "learning_rate": 1.5894478220304215e-05,
+ "loss": 0.8998,
+ "step": 1665
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.8862048439650887,
+ "learning_rate": 1.5889442759714603e-05,
+ "loss": 0.9158,
+ "step": 1666
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.8615007126234028,
+ "learning_rate": 1.5884405011733294e-05,
+ "loss": 0.9098,
+ "step": 1667
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9394511331370565,
+ "learning_rate": 1.587936497831688e-05,
+ "loss": 0.9882,
+ "step": 1668
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9331670670411267,
+ "learning_rate": 1.5874322661422856e-05,
+ "loss": 0.9461,
+ "step": 1669
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9574709930547879,
+ "learning_rate": 1.5869278063009602e-05,
+ "loss": 0.9056,
+ "step": 1670
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9897335453489471,
+ "learning_rate": 1.586423118503638e-05,
+ "loss": 0.9442,
+ "step": 1671
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 1.0220996200971046,
+ "learning_rate": 1.585918202946334e-05,
+ "loss": 0.9034,
+ "step": 1672
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9193755218614106,
+ "learning_rate": 1.5854130598251514e-05,
+ "loss": 0.9581,
+ "step": 1673
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9870346970797649,
+ "learning_rate": 1.5849076893362822e-05,
+ "loss": 0.9264,
+ "step": 1674
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 1.039457898673744,
+ "learning_rate": 1.584402091676006e-05,
+ "loss": 0.9098,
+ "step": 1675
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 1.1401573467226491,
+ "learning_rate": 1.5838962670406918e-05,
+ "loss": 1.0577,
+ "step": 1676
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.894286038150505,
+ "learning_rate": 1.5833902156267956e-05,
+ "loss": 0.8931,
+ "step": 1677
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9652079324928932,
+ "learning_rate": 1.582883937630862e-05,
+ "loss": 1.0096,
+ "step": 1678
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9401791514652397,
+ "learning_rate": 1.5823774332495236e-05,
+ "loss": 0.9264,
+ "step": 1679
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9620125826617901,
+ "learning_rate": 1.581870702679501e-05,
+ "loss": 0.9533,
+ "step": 1680
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9589747071021635,
+ "learning_rate": 1.581363746117602e-05,
+ "loss": 0.9813,
+ "step": 1681
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.919466288168128,
+ "learning_rate": 1.580856563760724e-05,
+ "loss": 0.9512,
+ "step": 1682
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9267699720189961,
+ "learning_rate": 1.5803491558058486e-05,
+ "loss": 0.9616,
+ "step": 1683
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9352422133052664,
+ "learning_rate": 1.579841522450049e-05,
+ "loss": 0.9843,
+ "step": 1684
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9370435738627757,
+ "learning_rate": 1.5793336638904838e-05,
+ "loss": 0.912,
+ "step": 1685
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.9332076370582065,
+ "learning_rate": 1.578825580324399e-05,
+ "loss": 0.9923,
+ "step": 1686
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.951260585951387,
+ "learning_rate": 1.5783172719491288e-05,
+ "loss": 0.9523,
+ "step": 1687
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.7654624186718446,
+ "learning_rate": 1.577808738962094e-05,
+ "loss": 0.9168,
+ "step": 1688
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.8394142240439104,
+ "learning_rate": 1.577299981560803e-05,
+ "loss": 0.9731,
+ "step": 1689
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9373661181598301,
+ "learning_rate": 1.5767909999428513e-05,
+ "loss": 0.9761,
+ "step": 1690
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9951245389073456,
+ "learning_rate": 1.576281794305922e-05,
+ "loss": 0.9156,
+ "step": 1691
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9027336014340304,
+ "learning_rate": 1.575772364847784e-05,
+ "loss": 0.9491,
+ "step": 1692
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.919806862989453,
+ "learning_rate": 1.575262711766294e-05,
+ "loss": 0.9288,
+ "step": 1693
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.922166860727834,
+ "learning_rate": 1.5747528352593956e-05,
+ "loss": 0.9126,
+ "step": 1694
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.758448753362842,
+ "learning_rate": 1.574242735525119e-05,
+ "loss": 0.8826,
+ "step": 1695
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.0925031705747983,
+ "learning_rate": 1.5737324127615808e-05,
+ "loss": 0.9526,
+ "step": 1696
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9315342222587545,
+ "learning_rate": 1.5732218671669847e-05,
+ "loss": 0.9478,
+ "step": 1697
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.8836753853774646,
+ "learning_rate": 1.5727110989396205e-05,
+ "loss": 0.9345,
+ "step": 1698
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.0748642897816478,
+ "learning_rate": 1.5722001082778645e-05,
+ "loss": 1.019,
+ "step": 1699
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.1020416653196514,
+ "learning_rate": 1.5716888953801805e-05,
+ "loss": 1.0358,
+ "step": 1700
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9173402686748258,
+ "learning_rate": 1.5711774604451168e-05,
+ "loss": 0.9385,
+ "step": 1701
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9366699045544487,
+ "learning_rate": 1.5706658036713093e-05,
+ "loss": 0.943,
+ "step": 1702
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.7891702619702629,
+ "learning_rate": 1.5701539252574795e-05,
+ "loss": 0.8825,
+ "step": 1703
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.017602644064826,
+ "learning_rate": 1.5696418254024344e-05,
+ "loss": 0.8916,
+ "step": 1704
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.3463408000185373,
+ "learning_rate": 1.569129504305069e-05,
+ "loss": 1.0137,
+ "step": 1705
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.8712942647447294,
+ "learning_rate": 1.568616962164362e-05,
+ "loss": 0.9353,
+ "step": 1706
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.8650729231108287,
+ "learning_rate": 1.5681041991793788e-05,
+ "loss": 0.9479,
+ "step": 1707
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9822409711635433,
+ "learning_rate": 1.567591215549271e-05,
+ "loss": 0.9564,
+ "step": 1708
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.8814317638018199,
+ "learning_rate": 1.567078011473276e-05,
+ "loss": 0.9055,
+ "step": 1709
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.1831561052929551,
+ "learning_rate": 1.5665645871507152e-05,
+ "loss": 0.9414,
+ "step": 1710
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9461182275489118,
+ "learning_rate": 1.5660509427809973e-05,
+ "loss": 0.8379,
+ "step": 1711
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.8260539390039969,
+ "learning_rate": 1.565537078563616e-05,
+ "loss": 0.8412,
+ "step": 1712
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9858948315309966,
+ "learning_rate": 1.56502299469815e-05,
+ "loss": 0.8946,
+ "step": 1713
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.0240841326059864,
+ "learning_rate": 1.564508691384264e-05,
+ "loss": 0.9578,
+ "step": 1714
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.7651010798958877,
+ "learning_rate": 1.5639941688217063e-05,
+ "loss": 0.8796,
+ "step": 1715
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.1230359951819133,
+ "learning_rate": 1.5634794272103126e-05,
+ "loss": 1.0366,
+ "step": 1716
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.935980380559438,
+ "learning_rate": 1.562964466750003e-05,
+ "loss": 0.9666,
+ "step": 1717
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9957875414558593,
+ "learning_rate": 1.562449287640781e-05,
+ "loss": 0.9951,
+ "step": 1718
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.8744195717038817,
+ "learning_rate": 1.5619338900827368e-05,
+ "loss": 0.8881,
+ "step": 1719
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.123415163024355,
+ "learning_rate": 1.5614182742760448e-05,
+ "loss": 0.9967,
+ "step": 1720
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.859045065460368,
+ "learning_rate": 1.5609024404209643e-05,
+ "loss": 0.9039,
+ "step": 1721
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9674654018347075,
+ "learning_rate": 1.5603863887178393e-05,
+ "loss": 0.9268,
+ "step": 1722
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9149679270302562,
+ "learning_rate": 1.5598701193670983e-05,
+ "loss": 0.9366,
+ "step": 1723
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9167962507331943,
+ "learning_rate": 1.559353632569254e-05,
+ "loss": 1.0223,
+ "step": 1724
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9813654701057842,
+ "learning_rate": 1.5588369285249048e-05,
+ "loss": 0.9668,
+ "step": 1725
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9413559871033231,
+ "learning_rate": 1.5583200074347318e-05,
+ "loss": 0.9297,
+ "step": 1726
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9073621845606187,
+ "learning_rate": 1.557802869499501e-05,
+ "loss": 0.9528,
+ "step": 1727
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.2468338184260404,
+ "learning_rate": 1.5572855149200637e-05,
+ "loss": 0.9368,
+ "step": 1728
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9798699092319569,
+ "learning_rate": 1.5567679438973543e-05,
+ "loss": 0.951,
+ "step": 1729
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.8833200037199986,
+ "learning_rate": 1.5562501566323906e-05,
+ "loss": 0.8742,
+ "step": 1730
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9096483030358838,
+ "learning_rate": 1.555732153326276e-05,
+ "loss": 0.9921,
+ "step": 1731
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.0721509786194834,
+ "learning_rate": 1.5552139341801965e-05,
+ "loss": 0.9341,
+ "step": 1732
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9381596829629454,
+ "learning_rate": 1.554695499395423e-05,
+ "loss": 0.9631,
+ "step": 1733
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.9777345180892383,
+ "learning_rate": 1.5541768491733092e-05,
+ "loss": 0.9804,
+ "step": 1734
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.0917306506317828,
+ "learning_rate": 1.5536579837152927e-05,
+ "loss": 0.9922,
+ "step": 1735
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.0477642076686153,
+ "learning_rate": 1.5531389032228955e-05,
+ "loss": 1.0333,
+ "step": 1736
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.870623788143449,
+ "learning_rate": 1.552619607897722e-05,
+ "loss": 0.8795,
+ "step": 1737
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.055005728652181,
+ "learning_rate": 1.55210009794146e-05,
+ "loss": 0.9359,
+ "step": 1738
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.8410838945685877,
+ "learning_rate": 1.5515803735558827e-05,
+ "loss": 0.9358,
+ "step": 1739
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 1.0248185442014413,
+ "learning_rate": 1.5510604349428438e-05,
+ "loss": 0.9276,
+ "step": 1740
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.908309785158247,
+ "learning_rate": 1.550540282304282e-05,
+ "loss": 0.8945,
+ "step": 1741
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9716596627688002,
+ "learning_rate": 1.550019915842218e-05,
+ "loss": 0.9354,
+ "step": 1742
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9215256625889569,
+ "learning_rate": 1.549499335758757e-05,
+ "loss": 0.9204,
+ "step": 1743
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 1.0485240710442505,
+ "learning_rate": 1.548978542256086e-05,
+ "loss": 0.9759,
+ "step": 1744
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.936937516570783,
+ "learning_rate": 1.5484575355364744e-05,
+ "loss": 0.903,
+ "step": 1745
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8799230397541101,
+ "learning_rate": 1.5479363158022763e-05,
+ "loss": 0.9482,
+ "step": 1746
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9926020716383587,
+ "learning_rate": 1.547414883255927e-05,
+ "loss": 1.0108,
+ "step": 1747
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 1.0724867931817301,
+ "learning_rate": 1.546893238099945e-05,
+ "loss": 0.9212,
+ "step": 1748
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9441285736579836,
+ "learning_rate": 1.5463713805369312e-05,
+ "loss": 0.974,
+ "step": 1749
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.921483222049883,
+ "learning_rate": 1.5458493107695688e-05,
+ "loss": 0.951,
+ "step": 1750
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.960342092289876,
+ "learning_rate": 1.5453270290006237e-05,
+ "loss": 0.9335,
+ "step": 1751
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9828879812954129,
+ "learning_rate": 1.544804535432945e-05,
+ "loss": 0.9867,
+ "step": 1752
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8234945392114452,
+ "learning_rate": 1.544281830269462e-05,
+ "loss": 0.8914,
+ "step": 1753
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 1.0000826402879177,
+ "learning_rate": 1.5437589137131882e-05,
+ "loss": 0.9773,
+ "step": 1754
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 1.0033786882011886,
+ "learning_rate": 1.5432357859672177e-05,
+ "loss": 0.9349,
+ "step": 1755
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.7510089365029284,
+ "learning_rate": 1.542712447234728e-05,
+ "loss": 0.8271,
+ "step": 1756
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8384109135762632,
+ "learning_rate": 1.542188897718977e-05,
+ "loss": 0.9021,
+ "step": 1757
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 1.0363702461555846,
+ "learning_rate": 1.5416651376233062e-05,
+ "loss": 0.9671,
+ "step": 1758
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8585982568857289,
+ "learning_rate": 1.5411411671511376e-05,
+ "loss": 0.8992,
+ "step": 1759
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8840105709138143,
+ "learning_rate": 1.5406169865059747e-05,
+ "loss": 1.0145,
+ "step": 1760
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9339167697478992,
+ "learning_rate": 1.5400925958914045e-05,
+ "loss": 0.8929,
+ "step": 1761
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 1.1207516317267792,
+ "learning_rate": 1.5395679955110927e-05,
+ "loss": 1.0126,
+ "step": 1762
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 1.0200583417237226,
+ "learning_rate": 1.53904318556879e-05,
+ "loss": 0.9466,
+ "step": 1763
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9513314620444498,
+ "learning_rate": 1.5385181662683244e-05,
+ "loss": 0.8953,
+ "step": 1764
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9586741531772605,
+ "learning_rate": 1.5379929378136088e-05,
+ "loss": 0.9473,
+ "step": 1765
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9022331216422342,
+ "learning_rate": 1.5374675004086353e-05,
+ "loss": 0.9663,
+ "step": 1766
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8490262990446552,
+ "learning_rate": 1.5369418542574782e-05,
+ "loss": 0.8788,
+ "step": 1767
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8058145521238111,
+ "learning_rate": 1.536415999564292e-05,
+ "loss": 0.8929,
+ "step": 1768
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8345851041186947,
+ "learning_rate": 1.5358899365333123e-05,
+ "loss": 0.9236,
+ "step": 1769
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.826077318304091,
+ "learning_rate": 1.5353636653688563e-05,
+ "loss": 0.8243,
+ "step": 1770
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9184599602068002,
+ "learning_rate": 1.534837186275322e-05,
+ "loss": 0.9559,
+ "step": 1771
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.863563339396089,
+ "learning_rate": 1.5343104994571877e-05,
+ "loss": 0.8943,
+ "step": 1772
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9687564666016926,
+ "learning_rate": 1.533783605119012e-05,
+ "loss": 1.0246,
+ "step": 1773
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8016700165126284,
+ "learning_rate": 1.5332565034654344e-05,
+ "loss": 0.779,
+ "step": 1774
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9272282536162947,
+ "learning_rate": 1.5327291947011763e-05,
+ "loss": 0.9734,
+ "step": 1775
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 1.0683084960482627,
+ "learning_rate": 1.5322016790310373e-05,
+ "loss": 0.9624,
+ "step": 1776
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8605987032033153,
+ "learning_rate": 1.5316739566598985e-05,
+ "loss": 0.9616,
+ "step": 1777
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8935171980628035,
+ "learning_rate": 1.531146027792722e-05,
+ "loss": 0.9985,
+ "step": 1778
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8958522226373784,
+ "learning_rate": 1.530617892634548e-05,
+ "loss": 0.9093,
+ "step": 1779
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9414368976653644,
+ "learning_rate": 1.5300895513904993e-05,
+ "loss": 1.0025,
+ "step": 1780
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8947294596843949,
+ "learning_rate": 1.529561004265777e-05,
+ "loss": 0.9433,
+ "step": 1781
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9481206915774962,
+ "learning_rate": 1.5290322514656624e-05,
+ "loss": 0.9654,
+ "step": 1782
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.7649948174764278,
+ "learning_rate": 1.5285032931955177e-05,
+ "loss": 0.7975,
+ "step": 1783
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9290340911306668,
+ "learning_rate": 1.527974129660784e-05,
+ "loss": 0.8933,
+ "step": 1784
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9044986483269645,
+ "learning_rate": 1.527444761066982e-05,
+ "loss": 0.9713,
+ "step": 1785
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9253380811005857,
+ "learning_rate": 1.5269151876197127e-05,
+ "loss": 0.9433,
+ "step": 1786
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8757542968221452,
+ "learning_rate": 1.5263854095246557e-05,
+ "loss": 0.8957,
+ "step": 1787
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.7785950358937312,
+ "learning_rate": 1.5258554269875716e-05,
+ "loss": 0.8482,
+ "step": 1788
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.8628071590588661,
+ "learning_rate": 1.5253252402142989e-05,
+ "loss": 0.9646,
+ "step": 1789
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.940808457354721,
+ "learning_rate": 1.5247948494107566e-05,
+ "loss": 0.9177,
+ "step": 1790
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.9681807352846368,
+ "learning_rate": 1.5242642547829416e-05,
+ "loss": 0.9723,
+ "step": 1791
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.821825971484946,
+ "learning_rate": 1.523733456536931e-05,
+ "loss": 0.9946,
+ "step": 1792
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 1.1533371339083218,
+ "learning_rate": 1.5232024548788813e-05,
+ "loss": 0.9811,
+ "step": 1793
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.9144868418475506,
+ "learning_rate": 1.5226712500150267e-05,
+ "loss": 0.8728,
+ "step": 1794
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.944671776521524,
+ "learning_rate": 1.5221398421516816e-05,
+ "loss": 1.0094,
+ "step": 1795
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.7670008530648152,
+ "learning_rate": 1.5216082314952383e-05,
+ "loss": 0.9459,
+ "step": 1796
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.9327346622916476,
+ "learning_rate": 1.521076418252168e-05,
+ "loss": 0.9518,
+ "step": 1797
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8943519516706805,
+ "learning_rate": 1.5205444026290218e-05,
+ "loss": 0.9016,
+ "step": 1798
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.836843296484399,
+ "learning_rate": 1.5200121848324276e-05,
+ "loss": 0.9211,
+ "step": 1799
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 1.0837447047544206,
+ "learning_rate": 1.5194797650690926e-05,
+ "loss": 0.9503,
+ "step": 1800
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.866782774054129,
+ "learning_rate": 1.5189471435458032e-05,
+ "loss": 0.8956,
+ "step": 1801
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 1.0952009207818028,
+ "learning_rate": 1.5184143204694231e-05,
+ "loss": 0.9741,
+ "step": 1802
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8934284107949934,
+ "learning_rate": 1.5178812960468945e-05,
+ "loss": 0.9812,
+ "step": 1803
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.9055985900214036,
+ "learning_rate": 1.5173480704852379e-05,
+ "loss": 0.98,
+ "step": 1804
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 1.0521748629311196,
+ "learning_rate": 1.5168146439915525e-05,
+ "loss": 0.9679,
+ "step": 1805
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8971230691493547,
+ "learning_rate": 1.5162810167730144e-05,
+ "loss": 0.9648,
+ "step": 1806
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.7884612665388886,
+ "learning_rate": 1.5157471890368785e-05,
+ "loss": 0.8784,
+ "step": 1807
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.9183625147776798,
+ "learning_rate": 1.5152131609904773e-05,
+ "loss": 0.9053,
+ "step": 1808
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 1.331903266211853,
+ "learning_rate": 1.5146789328412213e-05,
+ "loss": 0.928,
+ "step": 1809
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8890745911546998,
+ "learning_rate": 1.5141445047965984e-05,
+ "loss": 1.0026,
+ "step": 1810
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8178399292146777,
+ "learning_rate": 1.5136098770641741e-05,
+ "loss": 0.9229,
+ "step": 1811
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.7975421469547915,
+ "learning_rate": 1.513075049851592e-05,
+ "loss": 0.8221,
+ "step": 1812
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.7856140415304413,
+ "learning_rate": 1.5125400233665728e-05,
+ "loss": 0.8835,
+ "step": 1813
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.9212337717189589,
+ "learning_rate": 1.5120047978169146e-05,
+ "loss": 0.8905,
+ "step": 1814
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.9664484359458227,
+ "learning_rate": 1.5114693734104926e-05,
+ "loss": 0.9473,
+ "step": 1815
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.901758224483286,
+ "learning_rate": 1.5109337503552594e-05,
+ "loss": 0.9158,
+ "step": 1816
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.9341148763526235,
+ "learning_rate": 1.5103979288592454e-05,
+ "loss": 0.9773,
+ "step": 1817
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 1.062966710994553,
+ "learning_rate": 1.5098619091305571e-05,
+ "loss": 0.9751,
+ "step": 1818
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.9580639512609573,
+ "learning_rate": 1.5093256913773786e-05,
+ "loss": 0.95,
+ "step": 1819
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.9326599922642728,
+ "learning_rate": 1.50878927580797e-05,
+ "loss": 1.0127,
+ "step": 1820
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.9152850976254324,
+ "learning_rate": 1.5082526626306698e-05,
+ "loss": 0.9637,
+ "step": 1821
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.9167252904757152,
+ "learning_rate": 1.5077158520538921e-05,
+ "loss": 0.9266,
+ "step": 1822
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8893893633564427,
+ "learning_rate": 1.5071788442861277e-05,
+ "loss": 0.9904,
+ "step": 1823
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8546769886453603,
+ "learning_rate": 1.5066416395359444e-05,
+ "loss": 1.0025,
+ "step": 1824
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8742731068086662,
+ "learning_rate": 1.5061042380119864e-05,
+ "loss": 0.8514,
+ "step": 1825
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.9191629656740666,
+ "learning_rate": 1.5055666399229743e-05,
+ "loss": 0.9986,
+ "step": 1826
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.864915264868844,
+ "learning_rate": 1.5050288454777047e-05,
+ "loss": 0.9264,
+ "step": 1827
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8898894014288737,
+ "learning_rate": 1.504490854885051e-05,
+ "loss": 1.0025,
+ "step": 1828
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8919176795271149,
+ "learning_rate": 1.5039526683539627e-05,
+ "loss": 0.9007,
+ "step": 1829
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8326676139129455,
+ "learning_rate": 1.5034142860934649e-05,
+ "loss": 0.838,
+ "step": 1830
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 1.0331897953323774,
+ "learning_rate": 1.5028757083126594e-05,
+ "loss": 0.9448,
+ "step": 1831
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8882070918904326,
+ "learning_rate": 1.5023369352207229e-05,
+ "loss": 1.02,
+ "step": 1832
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.7227145232727819,
+ "learning_rate": 1.5017979670269096e-05,
+ "loss": 0.9057,
+ "step": 1833
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.9827321389264827,
+ "learning_rate": 1.501258803940548e-05,
+ "loss": 0.9532,
+ "step": 1834
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.7656284602546837,
+ "learning_rate": 1.500719446171043e-05,
+ "loss": 0.8359,
+ "step": 1835
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8720059317674618,
+ "learning_rate": 1.500179893927875e-05,
+ "loss": 0.8801,
+ "step": 1836
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 1.1718128370645895,
+ "learning_rate": 1.4996401474205997e-05,
+ "loss": 0.9533,
+ "step": 1837
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8427219416576509,
+ "learning_rate": 1.4991002068588484e-05,
+ "loss": 0.8424,
+ "step": 1838
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8372703249382428,
+ "learning_rate": 1.4985600724523282e-05,
+ "loss": 0.9005,
+ "step": 1839
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8579908451353849,
+ "learning_rate": 1.4980197444108205e-05,
+ "loss": 0.9429,
+ "step": 1840
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 1.1220620242593762,
+ "learning_rate": 1.4974792229441826e-05,
+ "loss": 0.9728,
+ "step": 1841
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.7949552663730435,
+ "learning_rate": 1.4969385082623473e-05,
+ "loss": 0.8879,
+ "step": 1842
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.8958167476098237,
+ "learning_rate": 1.4963976005753216e-05,
+ "loss": 0.9128,
+ "step": 1843
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.9033673883749678,
+ "learning_rate": 1.4958565000931877e-05,
+ "loss": 0.9956,
+ "step": 1844
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 1.0174307423574056,
+ "learning_rate": 1.4953152070261027e-05,
+ "loss": 0.9825,
+ "step": 1845
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8964749493654028,
+ "learning_rate": 1.494773721584299e-05,
+ "loss": 0.985,
+ "step": 1846
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 1.0439340860246706,
+ "learning_rate": 1.4942320439780833e-05,
+ "loss": 0.9507,
+ "step": 1847
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8641536189166213,
+ "learning_rate": 1.4936901744178367e-05,
+ "loss": 0.925,
+ "step": 1848
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.6972359878388217,
+ "learning_rate": 1.4931481131140149e-05,
+ "loss": 0.7657,
+ "step": 1849
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 1.2185536373113726,
+ "learning_rate": 1.4926058602771484e-05,
+ "loss": 0.9898,
+ "step": 1850
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.760487348808859,
+ "learning_rate": 1.4920634161178424e-05,
+ "loss": 0.8861,
+ "step": 1851
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9047981685612663,
+ "learning_rate": 1.4915207808467756e-05,
+ "loss": 0.9518,
+ "step": 1852
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8646224202452631,
+ "learning_rate": 1.4909779546747011e-05,
+ "loss": 0.9563,
+ "step": 1853
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8897413974385131,
+ "learning_rate": 1.4904349378124467e-05,
+ "loss": 0.9682,
+ "step": 1854
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9572314021465514,
+ "learning_rate": 1.489891730470914e-05,
+ "loss": 0.9532,
+ "step": 1855
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8679164618142823,
+ "learning_rate": 1.4893483328610778e-05,
+ "loss": 0.9026,
+ "step": 1856
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 1.1461550220832444,
+ "learning_rate": 1.488804745193988e-05,
+ "loss": 0.9126,
+ "step": 1857
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8775301602086298,
+ "learning_rate": 1.4882609676807675e-05,
+ "loss": 0.9167,
+ "step": 1858
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 1.0076946607347246,
+ "learning_rate": 1.4877170005326136e-05,
+ "loss": 0.9368,
+ "step": 1859
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9608832261364295,
+ "learning_rate": 1.4871728439607967e-05,
+ "loss": 0.9469,
+ "step": 1860
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 1.1218595981287796,
+ "learning_rate": 1.4866284981766607e-05,
+ "loss": 0.9426,
+ "step": 1861
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.912410310177432,
+ "learning_rate": 1.4860839633916236e-05,
+ "loss": 0.9367,
+ "step": 1862
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9675360940968317,
+ "learning_rate": 1.4855392398171762e-05,
+ "loss": 0.963,
+ "step": 1863
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9692011340827513,
+ "learning_rate": 1.484994327664883e-05,
+ "loss": 0.9727,
+ "step": 1864
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8849105582044469,
+ "learning_rate": 1.4844492271463814e-05,
+ "loss": 0.921,
+ "step": 1865
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 1.068024996188178,
+ "learning_rate": 1.4839039384733821e-05,
+ "loss": 0.9958,
+ "step": 1866
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.862400497555066,
+ "learning_rate": 1.4833584618576695e-05,
+ "loss": 0.8949,
+ "step": 1867
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9514879455715923,
+ "learning_rate": 1.4828127975111e-05,
+ "loss": 1.0166,
+ "step": 1868
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.7556953785166127,
+ "learning_rate": 1.4822669456456031e-05,
+ "loss": 0.9001,
+ "step": 1869
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8476460852870521,
+ "learning_rate": 1.4817209064731819e-05,
+ "loss": 0.9309,
+ "step": 1870
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.7924200952817001,
+ "learning_rate": 1.4811746802059115e-05,
+ "loss": 0.8525,
+ "step": 1871
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9220226215613513,
+ "learning_rate": 1.48062826705594e-05,
+ "loss": 0.9904,
+ "step": 1872
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8424416831984529,
+ "learning_rate": 1.4800816672354876e-05,
+ "loss": 0.9067,
+ "step": 1873
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8755187086763289,
+ "learning_rate": 1.4795348809568477e-05,
+ "loss": 0.9751,
+ "step": 1874
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8876459553345205,
+ "learning_rate": 1.4789879084323858e-05,
+ "loss": 0.8903,
+ "step": 1875
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8531474128314328,
+ "learning_rate": 1.4784407498745394e-05,
+ "loss": 0.9167,
+ "step": 1876
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9237689557291372,
+ "learning_rate": 1.477893405495819e-05,
+ "loss": 0.9348,
+ "step": 1877
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8840559268033596,
+ "learning_rate": 1.4773458755088068e-05,
+ "loss": 0.908,
+ "step": 1878
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 1.01725625317237,
+ "learning_rate": 1.4767981601261567e-05,
+ "loss": 0.9485,
+ "step": 1879
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8884451600536032,
+ "learning_rate": 1.4762502595605957e-05,
+ "loss": 0.9618,
+ "step": 1880
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9167076330508916,
+ "learning_rate": 1.4757021740249213e-05,
+ "loss": 0.9419,
+ "step": 1881
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9094547219117403,
+ "learning_rate": 1.4751539037320044e-05,
+ "loss": 0.9002,
+ "step": 1882
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.7609836642879874,
+ "learning_rate": 1.4746054488947863e-05,
+ "loss": 0.852,
+ "step": 1883
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.844623919132773,
+ "learning_rate": 1.4740568097262811e-05,
+ "loss": 0.9807,
+ "step": 1884
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9576966050485445,
+ "learning_rate": 1.473507986439573e-05,
+ "loss": 0.9275,
+ "step": 1885
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9913526844748883,
+ "learning_rate": 1.4729589792478193e-05,
+ "loss": 0.986,
+ "step": 1886
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8390399493507212,
+ "learning_rate": 1.4724097883642482e-05,
+ "loss": 0.9242,
+ "step": 1887
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9453551353246631,
+ "learning_rate": 1.4718604140021588e-05,
+ "loss": 0.9209,
+ "step": 1888
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8644027160141361,
+ "learning_rate": 1.471310856374922e-05,
+ "loss": 0.8872,
+ "step": 1889
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.8697883635399205,
+ "learning_rate": 1.470761115695979e-05,
+ "loss": 0.9393,
+ "step": 1890
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.898613074240735,
+ "learning_rate": 1.4702111921788437e-05,
+ "loss": 0.9549,
+ "step": 1891
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9366020106528409,
+ "learning_rate": 1.4696610860370997e-05,
+ "loss": 0.912,
+ "step": 1892
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9502018485280642,
+ "learning_rate": 1.4691107974844015e-05,
+ "loss": 1.0275,
+ "step": 1893
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 1.006905999368359,
+ "learning_rate": 1.468560326734475e-05,
+ "loss": 0.9756,
+ "step": 1894
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9537657347541025,
+ "learning_rate": 1.4680096740011172e-05,
+ "loss": 0.917,
+ "step": 1895
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9985993297073632,
+ "learning_rate": 1.4674588394981948e-05,
+ "loss": 0.9081,
+ "step": 1896
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.9431546445191886,
+ "learning_rate": 1.4669078234396454e-05,
+ "loss": 0.9207,
+ "step": 1897
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.8773297684382142,
+ "learning_rate": 1.4663566260394775e-05,
+ "loss": 0.9485,
+ "step": 1898
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.8740250009384237,
+ "learning_rate": 1.4658052475117704e-05,
+ "loss": 0.8924,
+ "step": 1899
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9815806454494395,
+ "learning_rate": 1.4652536880706723e-05,
+ "loss": 0.9698,
+ "step": 1900
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9486474181443565,
+ "learning_rate": 1.4647019479304028e-05,
+ "loss": 0.9345,
+ "step": 1901
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9601631197817476,
+ "learning_rate": 1.4641500273052516e-05,
+ "loss": 0.9815,
+ "step": 1902
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.7483636069441965,
+ "learning_rate": 1.463597926409578e-05,
+ "loss": 0.8775,
+ "step": 1903
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.7654916172107221,
+ "learning_rate": 1.4630456454578122e-05,
+ "loss": 0.8878,
+ "step": 1904
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 1.013322787317673,
+ "learning_rate": 1.462493184664453e-05,
+ "loss": 0.9808,
+ "step": 1905
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.796514685013387,
+ "learning_rate": 1.4619405442440702e-05,
+ "loss": 0.8519,
+ "step": 1906
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9158301962619945,
+ "learning_rate": 1.4613877244113033e-05,
+ "loss": 0.965,
+ "step": 1907
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.8941717164503102,
+ "learning_rate": 1.4608347253808605e-05,
+ "loss": 0.9278,
+ "step": 1908
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9260085549648269,
+ "learning_rate": 1.460281547367521e-05,
+ "loss": 0.9213,
+ "step": 1909
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 1.0249095710219696,
+ "learning_rate": 1.4597281905861318e-05,
+ "loss": 0.9649,
+ "step": 1910
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.8800021703463716,
+ "learning_rate": 1.4591746552516109e-05,
+ "loss": 0.9598,
+ "step": 1911
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9022079788538561,
+ "learning_rate": 1.4586209415789452e-05,
+ "loss": 0.9409,
+ "step": 1912
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9166074557382154,
+ "learning_rate": 1.4580670497831904e-05,
+ "loss": 0.9037,
+ "step": 1913
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.8184827726326348,
+ "learning_rate": 1.4575129800794718e-05,
+ "loss": 0.8209,
+ "step": 1914
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.979929768624538,
+ "learning_rate": 1.4569587326829834e-05,
+ "loss": 0.9214,
+ "step": 1915
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 1.0725457211273963,
+ "learning_rate": 1.4564043078089891e-05,
+ "loss": 1.0183,
+ "step": 1916
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9438217707664711,
+ "learning_rate": 1.4558497056728205e-05,
+ "loss": 0.9136,
+ "step": 1917
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.7603743013151304,
+ "learning_rate": 1.4552949264898795e-05,
+ "loss": 0.8404,
+ "step": 1918
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.6976747074120535,
+ "learning_rate": 1.4547399704756348e-05,
+ "loss": 0.8418,
+ "step": 1919
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.8805257176947605,
+ "learning_rate": 1.4541848378456255e-05,
+ "loss": 0.8595,
+ "step": 1920
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9905031655779478,
+ "learning_rate": 1.4536295288154594e-05,
+ "loss": 0.9428,
+ "step": 1921
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 1.0733219660298396,
+ "learning_rate": 1.4530740436008111e-05,
+ "loss": 1.0012,
+ "step": 1922
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 1.1315056201173224,
+ "learning_rate": 1.452518382417425e-05,
+ "loss": 0.9555,
+ "step": 1923
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9755225991546073,
+ "learning_rate": 1.4519625454811135e-05,
+ "loss": 1.0104,
+ "step": 1924
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.8039574392836043,
+ "learning_rate": 1.4514065330077575e-05,
+ "loss": 0.8842,
+ "step": 1925
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 1.2298128351522584,
+ "learning_rate": 1.4508503452133053e-05,
+ "loss": 0.9915,
+ "step": 1926
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9475187118391011,
+ "learning_rate": 1.4502939823137744e-05,
+ "loss": 0.9914,
+ "step": 1927
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 1.0238418500361268,
+ "learning_rate": 1.4497374445252496e-05,
+ "loss": 0.9668,
+ "step": 1928
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.8879339186763638,
+ "learning_rate": 1.4491807320638835e-05,
+ "loss": 0.9628,
+ "step": 1929
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 1.043255910202892,
+ "learning_rate": 1.4486238451458972e-05,
+ "loss": 0.9657,
+ "step": 1930
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.88146449315845,
+ "learning_rate": 1.4480667839875786e-05,
+ "loss": 0.9241,
+ "step": 1931
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 1.0097402614639863,
+ "learning_rate": 1.4475095488052843e-05,
+ "loss": 0.9725,
+ "step": 1932
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9262476296618154,
+ "learning_rate": 1.4469521398154381e-05,
+ "loss": 0.9889,
+ "step": 1933
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.792777027274484,
+ "learning_rate": 1.4463945572345308e-05,
+ "loss": 0.8819,
+ "step": 1934
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.805465899141217,
+ "learning_rate": 1.4458368012791213e-05,
+ "loss": 0.9014,
+ "step": 1935
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.789099481307358,
+ "learning_rate": 1.4452788721658355e-05,
+ "loss": 0.8989,
+ "step": 1936
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.8892189160433801,
+ "learning_rate": 1.4447207701113669e-05,
+ "loss": 0.8155,
+ "step": 1937
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.982443124122306,
+ "learning_rate": 1.4441624953324755e-05,
+ "loss": 0.9903,
+ "step": 1938
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9249251852617494,
+ "learning_rate": 1.4436040480459891e-05,
+ "loss": 0.9345,
+ "step": 1939
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9009637299071784,
+ "learning_rate": 1.443045428468802e-05,
+ "loss": 0.9165,
+ "step": 1940
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.8723614961861461,
+ "learning_rate": 1.4424866368178761e-05,
+ "loss": 0.9423,
+ "step": 1941
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.8112332828650175,
+ "learning_rate": 1.441927673310239e-05,
+ "loss": 0.92,
+ "step": 1942
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 1.0563803958747677,
+ "learning_rate": 1.4413685381629855e-05,
+ "loss": 0.9881,
+ "step": 1943
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.94837879715281,
+ "learning_rate": 1.440809231593278e-05,
+ "loss": 0.9337,
+ "step": 1944
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 1.1196342163276105,
+ "learning_rate": 1.4402497538183444e-05,
+ "loss": 0.9563,
+ "step": 1945
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.8559212941022728,
+ "learning_rate": 1.4396901050554794e-05,
+ "loss": 0.8536,
+ "step": 1946
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.870399943587896,
+ "learning_rate": 1.4391302855220442e-05,
+ "loss": 0.9491,
+ "step": 1947
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.9054637858503229,
+ "learning_rate": 1.4385702954354662e-05,
+ "loss": 0.8666,
+ "step": 1948
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.8830062987912204,
+ "learning_rate": 1.438010135013239e-05,
+ "loss": 0.9563,
+ "step": 1949
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 1.0307927102146766,
+ "learning_rate": 1.4374498044729225e-05,
+ "loss": 0.9575,
+ "step": 1950
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.9069737440389796,
+ "learning_rate": 1.4368893040321428e-05,
+ "loss": 0.9934,
+ "step": 1951
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.7924465455993395,
+ "learning_rate": 1.4363286339085915e-05,
+ "loss": 0.9049,
+ "step": 1952
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.9947902888414152,
+ "learning_rate": 1.435767794320027e-05,
+ "loss": 0.9646,
+ "step": 1953
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8787748633302995,
+ "learning_rate": 1.4352067854842724e-05,
+ "loss": 0.9081,
+ "step": 1954
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 1.1394231427703922,
+ "learning_rate": 1.434645607619217e-05,
+ "loss": 0.9342,
+ "step": 1955
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 1.1416652137952745,
+ "learning_rate": 1.434084260942816e-05,
+ "loss": 1.044,
+ "step": 1956
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.9074571642867629,
+ "learning_rate": 1.4335227456730902e-05,
+ "loss": 0.9839,
+ "step": 1957
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.9797782216453558,
+ "learning_rate": 1.4329610620281253e-05,
+ "loss": 0.8726,
+ "step": 1958
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 1.0590030903237138,
+ "learning_rate": 1.4323992102260733e-05,
+ "loss": 0.9164,
+ "step": 1959
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8571251524679723,
+ "learning_rate": 1.4318371904851502e-05,
+ "loss": 0.9377,
+ "step": 1960
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8042233182079012,
+ "learning_rate": 1.4312750030236382e-05,
+ "loss": 0.9228,
+ "step": 1961
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.9866237251720887,
+ "learning_rate": 1.4307126480598852e-05,
+ "loss": 0.9879,
+ "step": 1962
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.9499895157449215,
+ "learning_rate": 1.4301501258123024e-05,
+ "loss": 0.8796,
+ "step": 1963
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.9427281586944805,
+ "learning_rate": 1.4295874364993672e-05,
+ "loss": 0.9563,
+ "step": 1964
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.7837490283424455,
+ "learning_rate": 1.4290245803396221e-05,
+ "loss": 0.8618,
+ "step": 1965
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.7615948817822988,
+ "learning_rate": 1.4284615575516737e-05,
+ "loss": 0.9481,
+ "step": 1966
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8733318088442981,
+ "learning_rate": 1.4278983683541934e-05,
+ "loss": 0.944,
+ "step": 1967
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.7888770614382925,
+ "learning_rate": 1.4273350129659173e-05,
+ "loss": 0.8505,
+ "step": 1968
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8577354419612407,
+ "learning_rate": 1.4267714916056465e-05,
+ "loss": 0.9144,
+ "step": 1969
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.7383101646246054,
+ "learning_rate": 1.426207804492246e-05,
+ "loss": 0.8305,
+ "step": 1970
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8379107800195931,
+ "learning_rate": 1.4256439518446456e-05,
+ "loss": 0.9199,
+ "step": 1971
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 1.5425991074310408,
+ "learning_rate": 1.4250799338818388e-05,
+ "loss": 0.9155,
+ "step": 1972
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 1.016735083022846,
+ "learning_rate": 1.424515750822884e-05,
+ "loss": 0.9858,
+ "step": 1973
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.9187727463612595,
+ "learning_rate": 1.4239514028869032e-05,
+ "loss": 0.9916,
+ "step": 1974
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.7273588661442333,
+ "learning_rate": 1.4233868902930827e-05,
+ "loss": 0.8711,
+ "step": 1975
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.958572799520519,
+ "learning_rate": 1.4228222132606729e-05,
+ "loss": 1.0053,
+ "step": 1976
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8312394585884404,
+ "learning_rate": 1.4222573720089874e-05,
+ "loss": 0.8994,
+ "step": 1977
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 1.051870370595863,
+ "learning_rate": 1.4216923667574042e-05,
+ "loss": 0.9951,
+ "step": 1978
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8905620542250453,
+ "learning_rate": 1.4211271977253653e-05,
+ "loss": 0.8816,
+ "step": 1979
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.7939788016861045,
+ "learning_rate": 1.4205618651323753e-05,
+ "loss": 0.9355,
+ "step": 1980
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8282961796402444,
+ "learning_rate": 1.4199963691980027e-05,
+ "loss": 0.922,
+ "step": 1981
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.9062579731138316,
+ "learning_rate": 1.4194307101418805e-05,
+ "loss": 0.971,
+ "step": 1982
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.9736606006616938,
+ "learning_rate": 1.4188648881837033e-05,
+ "loss": 0.8874,
+ "step": 1983
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8743235062321872,
+ "learning_rate": 1.4182989035432299e-05,
+ "loss": 0.8531,
+ "step": 1984
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8742558633785917,
+ "learning_rate": 1.4177327564402825e-05,
+ "loss": 0.9189,
+ "step": 1985
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.9425796036485377,
+ "learning_rate": 1.4171664470947464e-05,
+ "loss": 0.9864,
+ "step": 1986
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 1.1211345516482556,
+ "learning_rate": 1.416599975726569e-05,
+ "loss": 0.9516,
+ "step": 1987
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8755079314807876,
+ "learning_rate": 1.4160333425557616e-05,
+ "loss": 0.9524,
+ "step": 1988
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.9399390059601492,
+ "learning_rate": 1.4154665478023977e-05,
+ "loss": 0.8558,
+ "step": 1989
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 1.0180762048062242,
+ "learning_rate": 1.4148995916866139e-05,
+ "loss": 0.9397,
+ "step": 1990
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.9360742759788145,
+ "learning_rate": 1.41433247442861e-05,
+ "loss": 0.8549,
+ "step": 1991
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.9260178534746369,
+ "learning_rate": 1.4137651962486472e-05,
+ "loss": 0.9218,
+ "step": 1992
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8609444981410015,
+ "learning_rate": 1.4131977573670499e-05,
+ "loss": 0.8997,
+ "step": 1993
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8339600928030163,
+ "learning_rate": 1.412630158004205e-05,
+ "loss": 0.874,
+ "step": 1994
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.7194081356452551,
+ "learning_rate": 1.4120623983805617e-05,
+ "loss": 0.8414,
+ "step": 1995
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8251133134550331,
+ "learning_rate": 1.4114944787166307e-05,
+ "loss": 0.9349,
+ "step": 1996
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8205915117047229,
+ "learning_rate": 1.4109263992329858e-05,
+ "loss": 0.8964,
+ "step": 1997
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8857206237622188,
+ "learning_rate": 1.4103581601502629e-05,
+ "loss": 1.0074,
+ "step": 1998
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8220871065580929,
+ "learning_rate": 1.409789761689159e-05,
+ "loss": 0.885,
+ "step": 1999
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 1.0091905898008606,
+ "learning_rate": 1.4092212040704336e-05,
+ "loss": 0.9856,
+ "step": 2000
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.8444063048123951,
+ "learning_rate": 1.408652487514908e-05,
+ "loss": 0.9006,
+ "step": 2001
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 1.0030105126684379,
+ "learning_rate": 1.408083612243465e-05,
+ "loss": 0.9172,
+ "step": 2002
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8580137669521448,
+ "learning_rate": 1.4075145784770496e-05,
+ "loss": 0.89,
+ "step": 2003
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8763817758432053,
+ "learning_rate": 1.4069453864366678e-05,
+ "loss": 0.9573,
+ "step": 2004
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9267136042695975,
+ "learning_rate": 1.4063760363433867e-05,
+ "loss": 0.9176,
+ "step": 2005
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 1.0036559507217695,
+ "learning_rate": 1.405806528418336e-05,
+ "loss": 0.9799,
+ "step": 2006
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 1.100732038831473,
+ "learning_rate": 1.4052368628827057e-05,
+ "loss": 0.8295,
+ "step": 2007
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9240966350209425,
+ "learning_rate": 1.4046670399577478e-05,
+ "loss": 0.9179,
+ "step": 2008
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.847878896021774,
+ "learning_rate": 1.4040970598647742e-05,
+ "loss": 0.9063,
+ "step": 2009
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 1.1571927472095265,
+ "learning_rate": 1.4035269228251589e-05,
+ "loss": 0.9563,
+ "step": 2010
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.961327897267119,
+ "learning_rate": 1.4029566290603368e-05,
+ "loss": 0.9664,
+ "step": 2011
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9656888729639598,
+ "learning_rate": 1.4023861787918031e-05,
+ "loss": 0.9354,
+ "step": 2012
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8360560613851813,
+ "learning_rate": 1.4018155722411144e-05,
+ "loss": 0.904,
+ "step": 2013
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 1.1779022532073358,
+ "learning_rate": 1.4012448096298874e-05,
+ "loss": 1.049,
+ "step": 2014
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8676988199338543,
+ "learning_rate": 1.4006738911798001e-05,
+ "loss": 0.9345,
+ "step": 2015
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.889609653986335,
+ "learning_rate": 1.40010281711259e-05,
+ "loss": 0.935,
+ "step": 2016
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8467068357086099,
+ "learning_rate": 1.3995315876500565e-05,
+ "loss": 0.941,
+ "step": 2017
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.959093215888042,
+ "learning_rate": 1.3989602030140581e-05,
+ "loss": 0.9353,
+ "step": 2018
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9460388212930191,
+ "learning_rate": 1.398388663426514e-05,
+ "loss": 0.9561,
+ "step": 2019
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 1.062637070665528,
+ "learning_rate": 1.3978169691094037e-05,
+ "loss": 0.9985,
+ "step": 2020
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 1.019248969455229,
+ "learning_rate": 1.3972451202847665e-05,
+ "loss": 0.9691,
+ "step": 2021
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9563067223829539,
+ "learning_rate": 1.3966731171747024e-05,
+ "loss": 0.9612,
+ "step": 2022
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8929233715441107,
+ "learning_rate": 1.3961009600013702e-05,
+ "loss": 0.9203,
+ "step": 2023
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8994152635230828,
+ "learning_rate": 1.3955286489869894e-05,
+ "loss": 0.9565,
+ "step": 2024
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9346745860366049,
+ "learning_rate": 1.394956184353839e-05,
+ "loss": 1.018,
+ "step": 2025
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8314785135760687,
+ "learning_rate": 1.3943835663242577e-05,
+ "loss": 0.8875,
+ "step": 2026
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 1.0486948440977388,
+ "learning_rate": 1.3938107951206438e-05,
+ "loss": 0.9506,
+ "step": 2027
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8553903310522047,
+ "learning_rate": 1.3932378709654548e-05,
+ "loss": 0.9638,
+ "step": 2028
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9728607739240488,
+ "learning_rate": 1.3926647940812081e-05,
+ "loss": 0.9155,
+ "step": 2029
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.802412682137892,
+ "learning_rate": 1.39209156469048e-05,
+ "loss": 0.9332,
+ "step": 2030
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 1.0390899410783163,
+ "learning_rate": 1.3915181830159061e-05,
+ "loss": 0.9457,
+ "step": 2031
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9775315836068712,
+ "learning_rate": 1.3909446492801819e-05,
+ "loss": 0.9055,
+ "step": 2032
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8413444570623176,
+ "learning_rate": 1.3903709637060605e-05,
+ "loss": 0.9337,
+ "step": 2033
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 1.1380572872866588,
+ "learning_rate": 1.3897971265163546e-05,
+ "loss": 1.0123,
+ "step": 2034
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8706384708452394,
+ "learning_rate": 1.3892231379339369e-05,
+ "loss": 0.8948,
+ "step": 2035
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8078253574611688,
+ "learning_rate": 1.3886489981817375e-05,
+ "loss": 0.8797,
+ "step": 2036
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9925684455713385,
+ "learning_rate": 1.3880747074827454e-05,
+ "loss": 0.9285,
+ "step": 2037
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9027583259537797,
+ "learning_rate": 1.3875002660600085e-05,
+ "loss": 0.8611,
+ "step": 2038
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.915386451815617,
+ "learning_rate": 1.386925674136634e-05,
+ "loss": 0.9559,
+ "step": 2039
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 1.0154426459142105,
+ "learning_rate": 1.3863509319357857e-05,
+ "loss": 0.9078,
+ "step": 2040
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 1.020893263005494,
+ "learning_rate": 1.3857760396806876e-05,
+ "loss": 0.9636,
+ "step": 2041
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 1.157603908132479,
+ "learning_rate": 1.3852009975946209e-05,
+ "loss": 0.9804,
+ "step": 2042
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9665388234932779,
+ "learning_rate": 1.3846258059009252e-05,
+ "loss": 0.9772,
+ "step": 2043
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 1.0191998266208375,
+ "learning_rate": 1.384050464822999e-05,
+ "loss": 0.9576,
+ "step": 2044
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9211230193790735,
+ "learning_rate": 1.383474974584297e-05,
+ "loss": 0.9601,
+ "step": 2045
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8436625927115899,
+ "learning_rate": 1.3828993354083342e-05,
+ "loss": 0.8874,
+ "step": 2046
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9398468829595837,
+ "learning_rate": 1.3823235475186816e-05,
+ "loss": 0.9378,
+ "step": 2047
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8836232770152602,
+ "learning_rate": 1.3817476111389685e-05,
+ "loss": 0.938,
+ "step": 2048
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.8314927195712102,
+ "learning_rate": 1.3811715264928824e-05,
+ "loss": 0.8972,
+ "step": 2049
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9009364299707033,
+ "learning_rate": 1.3805952938041674e-05,
+ "loss": 0.9061,
+ "step": 2050
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9512035371457199,
+ "learning_rate": 1.3800189132966257e-05,
+ "loss": 0.9252,
+ "step": 2051
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.9799480066971844,
+ "learning_rate": 1.3794423851941174e-05,
+ "loss": 0.9245,
+ "step": 2052
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.7910507035424716,
+ "learning_rate": 1.378865709720559e-05,
+ "loss": 0.9099,
+ "step": 2053
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 1.028852089793899,
+ "learning_rate": 1.3782888870999245e-05,
+ "loss": 0.9859,
+ "step": 2054
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8370521950540126,
+ "learning_rate": 1.377711917556245e-05,
+ "loss": 0.9183,
+ "step": 2055
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8788384431273036,
+ "learning_rate": 1.3771348013136096e-05,
+ "loss": 0.9893,
+ "step": 2056
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.844453665030592,
+ "learning_rate": 1.3765575385961627e-05,
+ "loss": 0.9731,
+ "step": 2057
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 1.0220205459364426,
+ "learning_rate": 1.3759801296281072e-05,
+ "loss": 0.9872,
+ "step": 2058
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8583372827190888,
+ "learning_rate": 1.3754025746337014e-05,
+ "loss": 0.941,
+ "step": 2059
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8206358732033752,
+ "learning_rate": 1.3748248738372616e-05,
+ "loss": 0.9567,
+ "step": 2060
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8350144985375577,
+ "learning_rate": 1.3742470274631599e-05,
+ "loss": 0.9283,
+ "step": 2061
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 1.0260855545139542,
+ "learning_rate": 1.3736690357358253e-05,
+ "loss": 0.8992,
+ "step": 2062
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8024535547931305,
+ "learning_rate": 1.3730908988797427e-05,
+ "loss": 0.8404,
+ "step": 2063
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 1.0317181085617486,
+ "learning_rate": 1.3725126171194543e-05,
+ "loss": 0.8498,
+ "step": 2064
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 1.0956177656988373,
+ "learning_rate": 1.371934190679558e-05,
+ "loss": 0.9627,
+ "step": 2065
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9402077389190346,
+ "learning_rate": 1.3713556197847076e-05,
+ "loss": 1.0306,
+ "step": 2066
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9560593740330857,
+ "learning_rate": 1.3707769046596136e-05,
+ "loss": 0.8394,
+ "step": 2067
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9762119033421444,
+ "learning_rate": 1.3701980455290425e-05,
+ "loss": 0.9129,
+ "step": 2068
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9462020367011332,
+ "learning_rate": 1.3696190426178162e-05,
+ "loss": 0.9498,
+ "step": 2069
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 1.023089007611191,
+ "learning_rate": 1.3690398961508128e-05,
+ "loss": 1.0076,
+ "step": 2070
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.903313823109592,
+ "learning_rate": 1.3684606063529662e-05,
+ "loss": 0.9683,
+ "step": 2071
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9231262282697168,
+ "learning_rate": 1.3678811734492659e-05,
+ "loss": 0.9101,
+ "step": 2072
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8620923051842395,
+ "learning_rate": 1.367301597664757e-05,
+ "loss": 0.9181,
+ "step": 2073
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9724540203390146,
+ "learning_rate": 1.36672187922454e-05,
+ "loss": 0.9283,
+ "step": 2074
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.87526774540864,
+ "learning_rate": 1.3661420183537705e-05,
+ "loss": 0.9583,
+ "step": 2075
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9936158946801154,
+ "learning_rate": 1.3655620152776605e-05,
+ "loss": 0.9843,
+ "step": 2076
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9374689285771917,
+ "learning_rate": 1.364981870221476e-05,
+ "loss": 0.9579,
+ "step": 2077
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9935733159199066,
+ "learning_rate": 1.364401583410539e-05,
+ "loss": 0.9993,
+ "step": 2078
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8636503646855689,
+ "learning_rate": 1.3638211550702256e-05,
+ "loss": 0.9309,
+ "step": 2079
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9306902626568633,
+ "learning_rate": 1.363240585425968e-05,
+ "loss": 0.9443,
+ "step": 2080
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9328480738047901,
+ "learning_rate": 1.362659874703253e-05,
+ "loss": 1.0248,
+ "step": 2081
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8771738123021174,
+ "learning_rate": 1.3620790231276213e-05,
+ "loss": 0.9057,
+ "step": 2082
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8457797419925539,
+ "learning_rate": 1.3614980309246692e-05,
+ "loss": 0.9175,
+ "step": 2083
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8604423255057652,
+ "learning_rate": 1.3609168983200474e-05,
+ "loss": 0.919,
+ "step": 2084
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.7618132245335117,
+ "learning_rate": 1.3603356255394613e-05,
+ "loss": 0.8441,
+ "step": 2085
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9656290442534817,
+ "learning_rate": 1.3597542128086702e-05,
+ "loss": 0.9738,
+ "step": 2086
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.910210719341017,
+ "learning_rate": 1.3591726603534885e-05,
+ "loss": 0.8867,
+ "step": 2087
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9433476413269557,
+ "learning_rate": 1.3585909683997842e-05,
+ "loss": 0.9897,
+ "step": 2088
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9998913981653302,
+ "learning_rate": 1.3580091371734798e-05,
+ "loss": 0.9552,
+ "step": 2089
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 1.1272893760182217,
+ "learning_rate": 1.357427166900552e-05,
+ "loss": 0.936,
+ "step": 2090
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9695667620659533,
+ "learning_rate": 1.3568450578070309e-05,
+ "loss": 0.9196,
+ "step": 2091
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 1.0563699865753733,
+ "learning_rate": 1.3562628101190015e-05,
+ "loss": 0.9464,
+ "step": 2092
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 1.0512822354617037,
+ "learning_rate": 1.3556804240626019e-05,
+ "loss": 0.8949,
+ "step": 2093
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8641328042351272,
+ "learning_rate": 1.3550978998640241e-05,
+ "loss": 0.8929,
+ "step": 2094
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.7352432934971339,
+ "learning_rate": 1.3545152377495136e-05,
+ "loss": 0.8602,
+ "step": 2095
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 1.0365473591432508,
+ "learning_rate": 1.3539324379453698e-05,
+ "loss": 0.99,
+ "step": 2096
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9630797455063902,
+ "learning_rate": 1.3533495006779455e-05,
+ "loss": 0.9395,
+ "step": 2097
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9157033189133503,
+ "learning_rate": 1.3527664261736471e-05,
+ "loss": 0.9556,
+ "step": 2098
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8763404164147738,
+ "learning_rate": 1.3521832146589335e-05,
+ "loss": 0.9182,
+ "step": 2099
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8671601810426536,
+ "learning_rate": 1.3515998663603174e-05,
+ "loss": 0.9382,
+ "step": 2100
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9911514266710806,
+ "learning_rate": 1.3510163815043647e-05,
+ "loss": 0.916,
+ "step": 2101
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8527537902425154,
+ "learning_rate": 1.3504327603176943e-05,
+ "loss": 0.9124,
+ "step": 2102
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.8086932035918405,
+ "learning_rate": 1.3498490030269782e-05,
+ "loss": 0.8575,
+ "step": 2103
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.9729506909184018,
+ "learning_rate": 1.3492651098589398e-05,
+ "loss": 0.9846,
+ "step": 2104
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 1.2058711788784828,
+ "learning_rate": 1.3486810810403578e-05,
+ "loss": 1.0487,
+ "step": 2105
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 1.0201880887312547,
+ "learning_rate": 1.348096916798062e-05,
+ "loss": 0.9223,
+ "step": 2106
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8199658744339089,
+ "learning_rate": 1.3475126173589343e-05,
+ "loss": 0.8093,
+ "step": 2107
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9638803506762078,
+ "learning_rate": 1.3469281829499107e-05,
+ "loss": 0.9318,
+ "step": 2108
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8657124745199996,
+ "learning_rate": 1.3463436137979786e-05,
+ "loss": 0.9515,
+ "step": 2109
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9100122378778769,
+ "learning_rate": 1.3457589101301776e-05,
+ "loss": 0.9243,
+ "step": 2110
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 1.0480835696428203,
+ "learning_rate": 1.3451740721736005e-05,
+ "loss": 0.9053,
+ "step": 2111
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.7936569437396148,
+ "learning_rate": 1.3445891001553905e-05,
+ "loss": 0.9174,
+ "step": 2112
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9517063633156221,
+ "learning_rate": 1.3440039943027452e-05,
+ "loss": 0.971,
+ "step": 2113
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9471740203984249,
+ "learning_rate": 1.3434187548429126e-05,
+ "loss": 0.9239,
+ "step": 2114
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 1.1278599771202846,
+ "learning_rate": 1.3428333820031922e-05,
+ "loss": 0.9818,
+ "step": 2115
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8927044567776891,
+ "learning_rate": 1.3422478760109371e-05,
+ "loss": 0.9093,
+ "step": 2116
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8827983163542394,
+ "learning_rate": 1.3416622370935507e-05,
+ "loss": 0.9345,
+ "step": 2117
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9974482146621254,
+ "learning_rate": 1.3410764654784885e-05,
+ "loss": 0.8699,
+ "step": 2118
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8507978660363839,
+ "learning_rate": 1.3404905613932573e-05,
+ "loss": 0.8159,
+ "step": 2119
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9133857728169197,
+ "learning_rate": 1.3399045250654152e-05,
+ "loss": 0.9172,
+ "step": 2120
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8392325672492333,
+ "learning_rate": 1.3393183567225724e-05,
+ "loss": 0.9434,
+ "step": 2121
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9450855945837672,
+ "learning_rate": 1.3387320565923901e-05,
+ "loss": 0.9196,
+ "step": 2122
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9050002995145253,
+ "learning_rate": 1.33814562490258e-05,
+ "loss": 0.9223,
+ "step": 2123
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.896773243860159,
+ "learning_rate": 1.3375590618809056e-05,
+ "loss": 0.9517,
+ "step": 2124
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9605119051546097,
+ "learning_rate": 1.3369723677551813e-05,
+ "loss": 0.9436,
+ "step": 2125
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 1.1686377562910408,
+ "learning_rate": 1.3363855427532724e-05,
+ "loss": 0.8846,
+ "step": 2126
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9386551282224604,
+ "learning_rate": 1.3357985871030948e-05,
+ "loss": 0.8806,
+ "step": 2127
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.851830143766913,
+ "learning_rate": 1.3352115010326155e-05,
+ "loss": 0.9407,
+ "step": 2128
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 1.012671182964821,
+ "learning_rate": 1.3346242847698516e-05,
+ "loss": 0.9655,
+ "step": 2129
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 1.0540874348167801,
+ "learning_rate": 1.3340369385428713e-05,
+ "loss": 0.9399,
+ "step": 2130
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9080794848861694,
+ "learning_rate": 1.3334494625797936e-05,
+ "loss": 0.9469,
+ "step": 2131
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 1.0779542861242648,
+ "learning_rate": 1.3328618571087867e-05,
+ "loss": 0.933,
+ "step": 2132
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9015655214132624,
+ "learning_rate": 1.33227412235807e-05,
+ "loss": 0.9185,
+ "step": 2133
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9254527944107253,
+ "learning_rate": 1.3316862585559132e-05,
+ "loss": 0.9219,
+ "step": 2134
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8634184222934571,
+ "learning_rate": 1.3310982659306352e-05,
+ "loss": 0.9605,
+ "step": 2135
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 1.0732090424668668,
+ "learning_rate": 1.3305101447106064e-05,
+ "loss": 0.9052,
+ "step": 2136
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8289422071911152,
+ "learning_rate": 1.3299218951242456e-05,
+ "loss": 0.9016,
+ "step": 2137
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8212845421733582,
+ "learning_rate": 1.3293335174000226e-05,
+ "loss": 0.9402,
+ "step": 2138
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8696486413548757,
+ "learning_rate": 1.328745011766456e-05,
+ "loss": 0.9575,
+ "step": 2139
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9030138284532585,
+ "learning_rate": 1.3281563784521154e-05,
+ "loss": 0.9651,
+ "step": 2140
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8930054034239019,
+ "learning_rate": 1.3275676176856185e-05,
+ "loss": 0.9363,
+ "step": 2141
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8733947373441526,
+ "learning_rate": 1.3269787296956333e-05,
+ "loss": 0.9801,
+ "step": 2142
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9884894120237259,
+ "learning_rate": 1.3263897147108778e-05,
+ "loss": 0.9387,
+ "step": 2143
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8771721366236639,
+ "learning_rate": 1.3258005729601178e-05,
+ "loss": 0.9025,
+ "step": 2144
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8934162433915248,
+ "learning_rate": 1.3252113046721692e-05,
+ "loss": 0.9227,
+ "step": 2145
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9288113411421657,
+ "learning_rate": 1.3246219100758974e-05,
+ "loss": 0.9579,
+ "step": 2146
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9957768345151621,
+ "learning_rate": 1.3240323894002166e-05,
+ "loss": 0.9727,
+ "step": 2147
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8977422575556423,
+ "learning_rate": 1.3234427428740895e-05,
+ "loss": 0.812,
+ "step": 2148
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8941503061376238,
+ "learning_rate": 1.3228529707265279e-05,
+ "loss": 0.9106,
+ "step": 2149
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8300140290197217,
+ "learning_rate": 1.322263073186593e-05,
+ "loss": 0.935,
+ "step": 2150
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9379297004350344,
+ "learning_rate": 1.3216730504833938e-05,
+ "loss": 0.9012,
+ "step": 2151
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9151026409538474,
+ "learning_rate": 1.3210829028460883e-05,
+ "loss": 0.9311,
+ "step": 2152
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8390749088793632,
+ "learning_rate": 1.3204926305038832e-05,
+ "loss": 0.9072,
+ "step": 2153
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.8950892281852907,
+ "learning_rate": 1.3199022336860335e-05,
+ "loss": 0.8161,
+ "step": 2154
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.901731932228211,
+ "learning_rate": 1.3193117126218425e-05,
+ "loss": 0.9456,
+ "step": 2155
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.9083180685523767,
+ "learning_rate": 1.3187210675406617e-05,
+ "loss": 0.922,
+ "step": 2156
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 1.0198118140775685,
+ "learning_rate": 1.318130298671891e-05,
+ "loss": 0.9803,
+ "step": 2157
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8024833717783901,
+ "learning_rate": 1.3175394062449777e-05,
+ "loss": 0.9135,
+ "step": 2158
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 1.1318702244111514,
+ "learning_rate": 1.3169483904894185e-05,
+ "loss": 1.0018,
+ "step": 2159
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.7361656801553551,
+ "learning_rate": 1.3163572516347565e-05,
+ "loss": 0.8265,
+ "step": 2160
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.966866240758402,
+ "learning_rate": 1.3157659899105835e-05,
+ "loss": 0.9364,
+ "step": 2161
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.794657015054268,
+ "learning_rate": 1.315174605546538e-05,
+ "loss": 0.8606,
+ "step": 2162
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.9862857009243148,
+ "learning_rate": 1.3145830987723081e-05,
+ "loss": 0.9638,
+ "step": 2163
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.9369752946655109,
+ "learning_rate": 1.3139914698176273e-05,
+ "loss": 0.9144,
+ "step": 2164
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 1.0568666449967494,
+ "learning_rate": 1.3133997189122777e-05,
+ "loss": 0.8772,
+ "step": 2165
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8739618214407433,
+ "learning_rate": 1.3128078462860887e-05,
+ "loss": 0.8755,
+ "step": 2166
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8566246050296912,
+ "learning_rate": 1.3122158521689367e-05,
+ "loss": 0.9244,
+ "step": 2167
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8511710754712899,
+ "learning_rate": 1.3116237367907454e-05,
+ "loss": 0.895,
+ "step": 2168
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 1.0127171499809435,
+ "learning_rate": 1.3110315003814855e-05,
+ "loss": 0.9012,
+ "step": 2169
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8483280934174003,
+ "learning_rate": 1.3104391431711748e-05,
+ "loss": 0.8873,
+ "step": 2170
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.9150391477403632,
+ "learning_rate": 1.309846665389878e-05,
+ "loss": 0.8914,
+ "step": 2171
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 1.0201957239115922,
+ "learning_rate": 1.309254067267707e-05,
+ "loss": 0.9195,
+ "step": 2172
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.7568426285724813,
+ "learning_rate": 1.3086613490348198e-05,
+ "loss": 0.8847,
+ "step": 2173
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.9130032014846652,
+ "learning_rate": 1.3080685109214208e-05,
+ "loss": 0.9476,
+ "step": 2174
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.9468735783199617,
+ "learning_rate": 1.3074755531577628e-05,
+ "loss": 0.9385,
+ "step": 2175
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8551801806917512,
+ "learning_rate": 1.3068824759741428e-05,
+ "loss": 0.9764,
+ "step": 2176
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8444878675059705,
+ "learning_rate": 1.306289279600905e-05,
+ "loss": 0.9023,
+ "step": 2177
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.9144764402575253,
+ "learning_rate": 1.3056959642684404e-05,
+ "loss": 0.8931,
+ "step": 2178
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8939309904244324,
+ "learning_rate": 1.305102530207186e-05,
+ "loss": 0.9569,
+ "step": 2179
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8682559149021166,
+ "learning_rate": 1.3045089776476246e-05,
+ "loss": 0.8868,
+ "step": 2180
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8351988291792387,
+ "learning_rate": 1.3039153068202853e-05,
+ "loss": 0.8734,
+ "step": 2181
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.9950694497612245,
+ "learning_rate": 1.3033215179557424e-05,
+ "loss": 0.9645,
+ "step": 2182
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.9808033668068006,
+ "learning_rate": 1.3027276112846172e-05,
+ "loss": 0.9593,
+ "step": 2183
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8556344120101488,
+ "learning_rate": 1.3021335870375763e-05,
+ "loss": 0.9209,
+ "step": 2184
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.9307037141925026,
+ "learning_rate": 1.3015394454453316e-05,
+ "loss": 1.006,
+ "step": 2185
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8332972795043385,
+ "learning_rate": 1.3009451867386411e-05,
+ "loss": 0.972,
+ "step": 2186
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8498486138180256,
+ "learning_rate": 1.3003508111483077e-05,
+ "loss": 0.8918,
+ "step": 2187
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 1.0154267592166855,
+ "learning_rate": 1.29975631890518e-05,
+ "loss": 1.0385,
+ "step": 2188
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.9643255405369295,
+ "learning_rate": 1.2991617102401524e-05,
+ "loss": 1.0189,
+ "step": 2189
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.9999963177765913,
+ "learning_rate": 1.2985669853841635e-05,
+ "loss": 0.9502,
+ "step": 2190
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 1.055543509771536,
+ "learning_rate": 1.297972144568198e-05,
+ "loss": 0.9946,
+ "step": 2191
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8317250919203271,
+ "learning_rate": 1.2973771880232853e-05,
+ "loss": 0.9091,
+ "step": 2192
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.7895691954424078,
+ "learning_rate": 1.2967821159804994e-05,
+ "loss": 0.8551,
+ "step": 2193
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 1.0362042124719069,
+ "learning_rate": 1.2961869286709594e-05,
+ "loss": 0.9761,
+ "step": 2194
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.807503085482749,
+ "learning_rate": 1.295591626325829e-05,
+ "loss": 0.8496,
+ "step": 2195
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8072008486265011,
+ "learning_rate": 1.2949962091763174e-05,
+ "loss": 0.8929,
+ "step": 2196
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8690581258452311,
+ "learning_rate": 1.2944006774536773e-05,
+ "loss": 0.934,
+ "step": 2197
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.7956373580751883,
+ "learning_rate": 1.2938050313892062e-05,
+ "loss": 0.8662,
+ "step": 2198
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 1.02111310621287,
+ "learning_rate": 1.2932092712142468e-05,
+ "loss": 0.9334,
+ "step": 2199
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8717684939022758,
+ "learning_rate": 1.292613397160185e-05,
+ "loss": 0.912,
+ "step": 2200
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.7580401836519327,
+ "learning_rate": 1.2920174094584514e-05,
+ "loss": 0.8451,
+ "step": 2201
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.7354593767865946,
+ "learning_rate": 1.2914213083405211e-05,
+ "loss": 0.7894,
+ "step": 2202
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.9068255297786387,
+ "learning_rate": 1.2908250940379124e-05,
+ "loss": 0.9369,
+ "step": 2203
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.7333921022370111,
+ "learning_rate": 1.2902287667821885e-05,
+ "loss": 0.8307,
+ "step": 2204
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.9422968743724666,
+ "learning_rate": 1.289632326804956e-05,
+ "loss": 0.9612,
+ "step": 2205
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.6550919422487774,
+ "learning_rate": 1.2890357743378649e-05,
+ "loss": 0.7924,
+ "step": 2206
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8160761756000248,
+ "learning_rate": 1.2884391096126098e-05,
+ "loss": 0.8763,
+ "step": 2207
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.9061214329104939,
+ "learning_rate": 1.2878423328609281e-05,
+ "loss": 0.8859,
+ "step": 2208
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.8939459555110824,
+ "learning_rate": 1.2872454443146015e-05,
+ "loss": 0.8946,
+ "step": 2209
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9367840606138146,
+ "learning_rate": 1.286648444205454e-05,
+ "loss": 0.9106,
+ "step": 2210
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9126460998591084,
+ "learning_rate": 1.2860513327653537e-05,
+ "loss": 0.8996,
+ "step": 2211
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8027713902262512,
+ "learning_rate": 1.2854541102262119e-05,
+ "loss": 0.7973,
+ "step": 2212
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 1.0651955742880965,
+ "learning_rate": 1.284856776819983e-05,
+ "loss": 0.981,
+ "step": 2213
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 1.2831194224703233,
+ "learning_rate": 1.2842593327786649e-05,
+ "loss": 0.9468,
+ "step": 2214
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 1.1021268640452047,
+ "learning_rate": 1.2836617783342968e-05,
+ "loss": 0.8712,
+ "step": 2215
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8831486611833153,
+ "learning_rate": 1.2830641137189628e-05,
+ "loss": 0.9142,
+ "step": 2216
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.900483403576882,
+ "learning_rate": 1.282466339164789e-05,
+ "loss": 0.9413,
+ "step": 2217
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 1.0200303811207974,
+ "learning_rate": 1.2818684549039437e-05,
+ "loss": 0.9141,
+ "step": 2218
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.982807273900127,
+ "learning_rate": 1.2812704611686386e-05,
+ "loss": 0.9387,
+ "step": 2219
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8435029975191141,
+ "learning_rate": 1.2806723581911274e-05,
+ "loss": 0.9205,
+ "step": 2220
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8318831035980068,
+ "learning_rate": 1.2800741462037065e-05,
+ "loss": 0.9073,
+ "step": 2221
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9154503782346017,
+ "learning_rate": 1.2794758254387147e-05,
+ "loss": 0.8904,
+ "step": 2222
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8830248215532113,
+ "learning_rate": 1.2788773961285323e-05,
+ "loss": 0.9398,
+ "step": 2223
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 1.0039121615109634,
+ "learning_rate": 1.2782788585055829e-05,
+ "loss": 0.8373,
+ "step": 2224
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.7819833237413248,
+ "learning_rate": 1.2776802128023317e-05,
+ "loss": 0.8329,
+ "step": 2225
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.7951873459325333,
+ "learning_rate": 1.2770814592512853e-05,
+ "loss": 0.931,
+ "step": 2226
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.7653848177976322,
+ "learning_rate": 1.2764825980849931e-05,
+ "loss": 0.9421,
+ "step": 2227
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9283523838898656,
+ "learning_rate": 1.2758836295360455e-05,
+ "loss": 0.9328,
+ "step": 2228
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8757328387584603,
+ "learning_rate": 1.2752845538370752e-05,
+ "loss": 0.8946,
+ "step": 2229
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 1.1117225459926026,
+ "learning_rate": 1.2746853712207567e-05,
+ "loss": 0.961,
+ "step": 2230
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9067243054740067,
+ "learning_rate": 1.274086081919805e-05,
+ "loss": 0.8418,
+ "step": 2231
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 1.0406832772839323,
+ "learning_rate": 1.273486686166977e-05,
+ "loss": 0.9526,
+ "step": 2232
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8190042989077312,
+ "learning_rate": 1.2728871841950719e-05,
+ "loss": 0.8949,
+ "step": 2233
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.843455189454114,
+ "learning_rate": 1.2722875762369288e-05,
+ "loss": 0.966,
+ "step": 2234
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9128400211362868,
+ "learning_rate": 1.2716878625254287e-05,
+ "loss": 0.9684,
+ "step": 2235
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8811556270079063,
+ "learning_rate": 1.2710880432934934e-05,
+ "loss": 0.9431,
+ "step": 2236
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9670319124586425,
+ "learning_rate": 1.270488118774086e-05,
+ "loss": 0.9217,
+ "step": 2237
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.6988637360254698,
+ "learning_rate": 1.26988808920021e-05,
+ "loss": 0.8693,
+ "step": 2238
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8652315443802078,
+ "learning_rate": 1.26928795480491e-05,
+ "loss": 0.9067,
+ "step": 2239
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.7962169833246877,
+ "learning_rate": 1.2686877158212715e-05,
+ "loss": 0.9165,
+ "step": 2240
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.7999814792080348,
+ "learning_rate": 1.26808737248242e-05,
+ "loss": 0.8628,
+ "step": 2241
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8878929008246196,
+ "learning_rate": 1.2674869250215225e-05,
+ "loss": 0.9566,
+ "step": 2242
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9643444116175804,
+ "learning_rate": 1.2668863736717855e-05,
+ "loss": 0.9864,
+ "step": 2243
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8686630016249888,
+ "learning_rate": 1.2662857186664558e-05,
+ "loss": 0.9201,
+ "step": 2244
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8595322784723836,
+ "learning_rate": 1.2656849602388222e-05,
+ "loss": 0.8776,
+ "step": 2245
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8686109940956778,
+ "learning_rate": 1.2650840986222111e-05,
+ "loss": 0.8966,
+ "step": 2246
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8567876908949325,
+ "learning_rate": 1.2644831340499906e-05,
+ "loss": 0.8575,
+ "step": 2247
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.90192186142703,
+ "learning_rate": 1.2638820667555685e-05,
+ "loss": 0.9649,
+ "step": 2248
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8250028683500116,
+ "learning_rate": 1.2632808969723927e-05,
+ "loss": 0.9171,
+ "step": 2249
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9008503688386137,
+ "learning_rate": 1.26267962493395e-05,
+ "loss": 0.9599,
+ "step": 2250
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9913614323250304,
+ "learning_rate": 1.2620782508737678e-05,
+ "loss": 0.8675,
+ "step": 2251
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9569919582981561,
+ "learning_rate": 1.2614767750254129e-05,
+ "loss": 0.8051,
+ "step": 2252
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9231133392473371,
+ "learning_rate": 1.2608751976224916e-05,
+ "loss": 0.9404,
+ "step": 2253
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9456264298714065,
+ "learning_rate": 1.2602735188986498e-05,
+ "loss": 0.9648,
+ "step": 2254
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8770379992344625,
+ "learning_rate": 1.2596717390875721e-05,
+ "loss": 0.897,
+ "step": 2255
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9504760546021825,
+ "learning_rate": 1.2590698584229834e-05,
+ "loss": 0.9028,
+ "step": 2256
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9563220870749286,
+ "learning_rate": 1.2584678771386467e-05,
+ "loss": 0.9837,
+ "step": 2257
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.9448542045546671,
+ "learning_rate": 1.2578657954683651e-05,
+ "loss": 1.005,
+ "step": 2258
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.7810543548466898,
+ "learning_rate": 1.2572636136459799e-05,
+ "loss": 0.8573,
+ "step": 2259
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.7848109531627149,
+ "learning_rate": 1.2566613319053713e-05,
+ "loss": 0.8474,
+ "step": 2260
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.8402319429607122,
+ "learning_rate": 1.2560589504804592e-05,
+ "loss": 0.8793,
+ "step": 2261
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.8468642584985647,
+ "learning_rate": 1.2554564696052011e-05,
+ "loss": 0.8891,
+ "step": 2262
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9205537388059336,
+ "learning_rate": 1.2548538895135942e-05,
+ "loss": 0.9479,
+ "step": 2263
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 1.3191982669707438,
+ "learning_rate": 1.254251210439673e-05,
+ "loss": 0.9465,
+ "step": 2264
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 1.0155844198886341,
+ "learning_rate": 1.2536484326175114e-05,
+ "loss": 0.9233,
+ "step": 2265
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.7914661481418371,
+ "learning_rate": 1.2530455562812214e-05,
+ "loss": 0.7637,
+ "step": 2266
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.8576349287537162,
+ "learning_rate": 1.252442581664953e-05,
+ "loss": 0.8873,
+ "step": 2267
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 1.3116892981730255,
+ "learning_rate": 1.2518395090028952e-05,
+ "loss": 0.9261,
+ "step": 2268
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9505863375062338,
+ "learning_rate": 1.2512363385292739e-05,
+ "loss": 0.9286,
+ "step": 2269
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9790558920112058,
+ "learning_rate": 1.2506330704783533e-05,
+ "loss": 0.9397,
+ "step": 2270
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.777067707819939,
+ "learning_rate": 1.2500297050844367e-05,
+ "loss": 0.8604,
+ "step": 2271
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9693193786093736,
+ "learning_rate": 1.2494262425818637e-05,
+ "loss": 0.9279,
+ "step": 2272
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9132165167761025,
+ "learning_rate": 1.2488226832050116e-05,
+ "loss": 0.8659,
+ "step": 2273
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 1.0404534785825161,
+ "learning_rate": 1.2482190271882973e-05,
+ "loss": 0.9227,
+ "step": 2274
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.8939335737641162,
+ "learning_rate": 1.2476152747661727e-05,
+ "loss": 0.8742,
+ "step": 2275
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9025028460582737,
+ "learning_rate": 1.2470114261731288e-05,
+ "loss": 0.9411,
+ "step": 2276
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9037992759277862,
+ "learning_rate": 1.246407481643693e-05,
+ "loss": 0.93,
+ "step": 2277
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9006461039114079,
+ "learning_rate": 1.245803441412431e-05,
+ "loss": 0.9155,
+ "step": 2278
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.8490977988796075,
+ "learning_rate": 1.2451993057139445e-05,
+ "loss": 0.9685,
+ "step": 2279
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9180759161931465,
+ "learning_rate": 1.2445950747828732e-05,
+ "loss": 0.9185,
+ "step": 2280
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.896120846584157,
+ "learning_rate": 1.2439907488538934e-05,
+ "loss": 0.8933,
+ "step": 2281
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9134146033827902,
+ "learning_rate": 1.243386328161718e-05,
+ "loss": 0.9933,
+ "step": 2282
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.8284319600278802,
+ "learning_rate": 1.2427818129410975e-05,
+ "loss": 0.9607,
+ "step": 2283
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.8797605668553792,
+ "learning_rate": 1.2421772034268187e-05,
+ "loss": 0.9565,
+ "step": 2284
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9671130142722008,
+ "learning_rate": 1.2415724998537042e-05,
+ "loss": 0.9196,
+ "step": 2285
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.8870725850010622,
+ "learning_rate": 1.2409677024566145e-05,
+ "loss": 0.927,
+ "step": 2286
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.8795503727652301,
+ "learning_rate": 1.240362811470446e-05,
+ "loss": 0.8702,
+ "step": 2287
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.7695415996567161,
+ "learning_rate": 1.2397578271301312e-05,
+ "loss": 0.9047,
+ "step": 2288
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.8942816075711105,
+ "learning_rate": 1.2391527496706389e-05,
+ "loss": 0.9137,
+ "step": 2289
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9839494866211023,
+ "learning_rate": 1.2385475793269744e-05,
+ "loss": 0.9475,
+ "step": 2290
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.8607761945903216,
+ "learning_rate": 1.2379423163341791e-05,
+ "loss": 0.9513,
+ "step": 2291
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9874676021466612,
+ "learning_rate": 1.2373369609273299e-05,
+ "loss": 0.9573,
+ "step": 2292
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9967685211443729,
+ "learning_rate": 1.2367315133415396e-05,
+ "loss": 0.88,
+ "step": 2293
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9272517188545901,
+ "learning_rate": 1.2361259738119575e-05,
+ "loss": 0.8903,
+ "step": 2294
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9299453745353854,
+ "learning_rate": 1.2355203425737683e-05,
+ "loss": 0.9457,
+ "step": 2295
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9328180096893278,
+ "learning_rate": 1.2349146198621917e-05,
+ "loss": 1.0141,
+ "step": 2296
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9966162094449411,
+ "learning_rate": 1.2343088059124839e-05,
+ "loss": 0.9806,
+ "step": 2297
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.7454570005398844,
+ "learning_rate": 1.2337029009599357e-05,
+ "loss": 0.8621,
+ "step": 2298
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 1.0723523984689818,
+ "learning_rate": 1.2330969052398735e-05,
+ "loss": 1.0161,
+ "step": 2299
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.8777414892988066,
+ "learning_rate": 1.2324908189876597e-05,
+ "loss": 0.8917,
+ "step": 2300
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 1.1967680186387344,
+ "learning_rate": 1.2318846424386907e-05,
+ "loss": 0.9792,
+ "step": 2301
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.919167550777173,
+ "learning_rate": 1.2312783758283981e-05,
+ "loss": 0.9286,
+ "step": 2302
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9323671006879363,
+ "learning_rate": 1.23067201939225e-05,
+ "loss": 0.9162,
+ "step": 2303
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9022285276392306,
+ "learning_rate": 1.2300655733657475e-05,
+ "loss": 0.9074,
+ "step": 2304
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9650623323939015,
+ "learning_rate": 1.2294590379844268e-05,
+ "loss": 0.8816,
+ "step": 2305
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.8453901908969638,
+ "learning_rate": 1.2288524134838602e-05,
+ "loss": 0.8916,
+ "step": 2306
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 1.0463494277731755,
+ "learning_rate": 1.2282457000996533e-05,
+ "loss": 0.9261,
+ "step": 2307
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9707234867884714,
+ "learning_rate": 1.2276388980674465e-05,
+ "loss": 1.0039,
+ "step": 2308
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.7917339614535102,
+ "learning_rate": 1.227032007622915e-05,
+ "loss": 0.865,
+ "step": 2309
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.8993644056063185,
+ "learning_rate": 1.2264250290017675e-05,
+ "loss": 0.9227,
+ "step": 2310
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9074737570841841,
+ "learning_rate": 1.2258179624397477e-05,
+ "loss": 0.8732,
+ "step": 2311
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.9243744757860682,
+ "learning_rate": 1.2252108081726337e-05,
+ "loss": 0.978,
+ "step": 2312
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 1.0963824532598228,
+ "learning_rate": 1.224603566436237e-05,
+ "loss": 0.9022,
+ "step": 2313
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9775942897344263,
+ "learning_rate": 1.2239962374664029e-05,
+ "loss": 0.9061,
+ "step": 2314
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 1.0748437593447329,
+ "learning_rate": 1.2233888214990113e-05,
+ "loss": 0.981,
+ "step": 2315
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.941214829389541,
+ "learning_rate": 1.2227813187699757e-05,
+ "loss": 0.9364,
+ "step": 2316
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 1.0147802058068196,
+ "learning_rate": 1.222173729515243e-05,
+ "loss": 0.9382,
+ "step": 2317
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.931701357529382,
+ "learning_rate": 1.2215660539707936e-05,
+ "loss": 0.9342,
+ "step": 2318
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.8625971996241817,
+ "learning_rate": 1.2209582923726424e-05,
+ "loss": 0.9459,
+ "step": 2319
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.7732411891644637,
+ "learning_rate": 1.2203504449568361e-05,
+ "loss": 0.8984,
+ "step": 2320
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9396893140932644,
+ "learning_rate": 1.2197425119594563e-05,
+ "loss": 0.8818,
+ "step": 2321
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.905540988354156,
+ "learning_rate": 1.219134493616617e-05,
+ "loss": 0.9799,
+ "step": 2322
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9244985365741272,
+ "learning_rate": 1.2185263901644653e-05,
+ "loss": 0.9354,
+ "step": 2323
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9628362168750522,
+ "learning_rate": 1.217918201839182e-05,
+ "loss": 0.9355,
+ "step": 2324
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9880630455826348,
+ "learning_rate": 1.2173099288769799e-05,
+ "loss": 0.8397,
+ "step": 2325
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.8050007644375362,
+ "learning_rate": 1.2167015715141057e-05,
+ "loss": 0.8992,
+ "step": 2326
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.7707560559980093,
+ "learning_rate": 1.216093129986838e-05,
+ "loss": 0.8634,
+ "step": 2327
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.8594489969289725,
+ "learning_rate": 1.2154846045314885e-05,
+ "loss": 0.9415,
+ "step": 2328
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.962088656434714,
+ "learning_rate": 1.214875995384402e-05,
+ "loss": 0.9516,
+ "step": 2329
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.8325433940208284,
+ "learning_rate": 1.214267302781955e-05,
+ "loss": 0.9341,
+ "step": 2330
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.8838573252939088,
+ "learning_rate": 1.2136585269605558e-05,
+ "loss": 0.8697,
+ "step": 2331
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.7586705153269148,
+ "learning_rate": 1.2130496681566475e-05,
+ "loss": 0.8863,
+ "step": 2332
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 1.008620085739925,
+ "learning_rate": 1.212440726606703e-05,
+ "loss": 1.0598,
+ "step": 2333
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9048777623965037,
+ "learning_rate": 1.211831702547228e-05,
+ "loss": 0.9518,
+ "step": 2334
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 1.1165940923709488,
+ "learning_rate": 1.2112225962147605e-05,
+ "loss": 0.931,
+ "step": 2335
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.8545296331102793,
+ "learning_rate": 1.210613407845871e-05,
+ "loss": 0.9168,
+ "step": 2336
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 1.1169495708522161,
+ "learning_rate": 1.2100041376771605e-05,
+ "loss": 0.8689,
+ "step": 2337
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.839029030347656,
+ "learning_rate": 1.209394785945263e-05,
+ "loss": 0.8721,
+ "step": 2338
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9421555154811644,
+ "learning_rate": 1.2087853528868432e-05,
+ "loss": 0.9253,
+ "step": 2339
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.8391201168939678,
+ "learning_rate": 1.2081758387385982e-05,
+ "loss": 0.8823,
+ "step": 2340
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9536242397741264,
+ "learning_rate": 1.2075662437372567e-05,
+ "loss": 0.9544,
+ "step": 2341
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9184315531906584,
+ "learning_rate": 1.2069565681195776e-05,
+ "loss": 0.9237,
+ "step": 2342
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.8571690635275195,
+ "learning_rate": 1.206346812122352e-05,
+ "loss": 0.7997,
+ "step": 2343
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.993736493041169,
+ "learning_rate": 1.2057369759824025e-05,
+ "loss": 0.9158,
+ "step": 2344
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9232450969435135,
+ "learning_rate": 1.2051270599365825e-05,
+ "loss": 0.9434,
+ "step": 2345
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.937401762052201,
+ "learning_rate": 1.2045170642217756e-05,
+ "loss": 0.9659,
+ "step": 2346
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9884612109000868,
+ "learning_rate": 1.2039069890748978e-05,
+ "loss": 0.9275,
+ "step": 2347
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.8096746781793203,
+ "learning_rate": 1.2032968347328952e-05,
+ "loss": 0.8827,
+ "step": 2348
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9721901539982749,
+ "learning_rate": 1.2026866014327446e-05,
+ "loss": 0.9053,
+ "step": 2349
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.868185981043627,
+ "learning_rate": 1.2020762894114535e-05,
+ "loss": 0.9154,
+ "step": 2350
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 1.0040657783176368,
+ "learning_rate": 1.20146589890606e-05,
+ "loss": 0.9764,
+ "step": 2351
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.8993919390029462,
+ "learning_rate": 1.2008554301536328e-05,
+ "loss": 0.9335,
+ "step": 2352
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.7915617021866037,
+ "learning_rate": 1.2002448833912712e-05,
+ "loss": 0.9049,
+ "step": 2353
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.941600506227459,
+ "learning_rate": 1.1996342588561042e-05,
+ "loss": 0.9496,
+ "step": 2354
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.8806539928345356,
+ "learning_rate": 1.1990235567852917e-05,
+ "loss": 0.9784,
+ "step": 2355
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.7719462068672606,
+ "learning_rate": 1.1984127774160226e-05,
+ "loss": 0.8674,
+ "step": 2356
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9750477133822913,
+ "learning_rate": 1.1978019209855174e-05,
+ "loss": 0.9517,
+ "step": 2357
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 1.0129274251264657,
+ "learning_rate": 1.1971909877310253e-05,
+ "loss": 0.9528,
+ "step": 2358
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 1.0276780192047423,
+ "learning_rate": 1.1965799778898258e-05,
+ "loss": 0.9619,
+ "step": 2359
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9888950447198691,
+ "learning_rate": 1.1959688916992279e-05,
+ "loss": 0.9426,
+ "step": 2360
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9804139150430459,
+ "learning_rate": 1.1953577293965707e-05,
+ "loss": 0.9743,
+ "step": 2361
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9917794852029977,
+ "learning_rate": 1.1947464912192228e-05,
+ "loss": 0.9499,
+ "step": 2362
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.9346199072311586,
+ "learning_rate": 1.1941351774045815e-05,
+ "loss": 0.9202,
+ "step": 2363
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.7889692200039892,
+ "learning_rate": 1.1935237881900743e-05,
+ "loss": 0.8809,
+ "step": 2364
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 1.0290026816597673,
+ "learning_rate": 1.1929123238131579e-05,
+ "loss": 0.9394,
+ "step": 2365
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9155550587591733,
+ "learning_rate": 1.1923007845113178e-05,
+ "loss": 0.9183,
+ "step": 2366
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8280934757123218,
+ "learning_rate": 1.1916891705220689e-05,
+ "loss": 0.8689,
+ "step": 2367
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 1.1462505535112175,
+ "learning_rate": 1.191077482082955e-05,
+ "loss": 0.9282,
+ "step": 2368
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8489004035345884,
+ "learning_rate": 1.1904657194315486e-05,
+ "loss": 0.926,
+ "step": 2369
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.7324528830640139,
+ "learning_rate": 1.1898538828054517e-05,
+ "loss": 0.8437,
+ "step": 2370
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 1.098786163329813,
+ "learning_rate": 1.1892419724422946e-05,
+ "loss": 0.9005,
+ "step": 2371
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9048738472171444,
+ "learning_rate": 1.1886299885797357e-05,
+ "loss": 0.8989,
+ "step": 2372
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 1.10612713008854,
+ "learning_rate": 1.1880179314554629e-05,
+ "loss": 0.9768,
+ "step": 2373
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9252857630708283,
+ "learning_rate": 1.1874058013071923e-05,
+ "loss": 0.9211,
+ "step": 2374
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8514003700485429,
+ "learning_rate": 1.1867935983726676e-05,
+ "loss": 0.8755,
+ "step": 2375
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9311325301676305,
+ "learning_rate": 1.186181322889662e-05,
+ "loss": 0.8945,
+ "step": 2376
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8600593589198032,
+ "learning_rate": 1.1855689750959759e-05,
+ "loss": 0.915,
+ "step": 2377
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 1.054460247599659,
+ "learning_rate": 1.1849565552294379e-05,
+ "loss": 0.9009,
+ "step": 2378
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.7825726964835971,
+ "learning_rate": 1.1843440635279056e-05,
+ "loss": 0.9202,
+ "step": 2379
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8782667464030242,
+ "learning_rate": 1.1837315002292629e-05,
+ "loss": 0.9354,
+ "step": 2380
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9420370586863615,
+ "learning_rate": 1.1831188655714225e-05,
+ "loss": 0.9293,
+ "step": 2381
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 1.160896572804164,
+ "learning_rate": 1.182506159792325e-05,
+ "loss": 0.9205,
+ "step": 2382
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 1.0574638697184164,
+ "learning_rate": 1.1818933831299381e-05,
+ "loss": 0.9217,
+ "step": 2383
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 1.0116185231622183,
+ "learning_rate": 1.1812805358222571e-05,
+ "loss": 0.9726,
+ "step": 2384
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8660501324928412,
+ "learning_rate": 1.180667618107305e-05,
+ "loss": 0.947,
+ "step": 2385
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8972308213571614,
+ "learning_rate": 1.1800546302231317e-05,
+ "loss": 0.9541,
+ "step": 2386
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9740836176917561,
+ "learning_rate": 1.1794415724078147e-05,
+ "loss": 1.0161,
+ "step": 2387
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 1.0951667952404012,
+ "learning_rate": 1.1788284448994588e-05,
+ "loss": 0.9706,
+ "step": 2388
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8276278059262884,
+ "learning_rate": 1.1782152479361956e-05,
+ "loss": 0.9164,
+ "step": 2389
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9422791050884577,
+ "learning_rate": 1.1776019817561834e-05,
+ "loss": 0.9288,
+ "step": 2390
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 1.0147981856724573,
+ "learning_rate": 1.1769886465976086e-05,
+ "loss": 0.8612,
+ "step": 2391
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8108186714213463,
+ "learning_rate": 1.1763752426986823e-05,
+ "loss": 0.8637,
+ "step": 2392
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9030533777656744,
+ "learning_rate": 1.1757617702976443e-05,
+ "loss": 0.849,
+ "step": 2393
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9580950838209914,
+ "learning_rate": 1.17514822963276e-05,
+ "loss": 0.9419,
+ "step": 2394
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.7702737871040894,
+ "learning_rate": 1.1745346209423216e-05,
+ "loss": 0.9012,
+ "step": 2395
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8877065011852157,
+ "learning_rate": 1.1739209444646479e-05,
+ "loss": 0.9031,
+ "step": 2396
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9512991898337169,
+ "learning_rate": 1.1733072004380827e-05,
+ "loss": 0.9506,
+ "step": 2397
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9795734030977572,
+ "learning_rate": 1.1726933891009985e-05,
+ "loss": 0.9609,
+ "step": 2398
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8824455451460888,
+ "learning_rate": 1.1720795106917917e-05,
+ "loss": 0.9016,
+ "step": 2399
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9727053500616915,
+ "learning_rate": 1.171465565448886e-05,
+ "loss": 0.9849,
+ "step": 2400
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8714983960102699,
+ "learning_rate": 1.1708515536107299e-05,
+ "loss": 1.0035,
+ "step": 2401
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.7904429359741996,
+ "learning_rate": 1.1702374754157998e-05,
+ "loss": 0.8357,
+ "step": 2402
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9239708984692373,
+ "learning_rate": 1.1696233311025957e-05,
+ "loss": 0.9104,
+ "step": 2403
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8148168826691347,
+ "learning_rate": 1.1690091209096441e-05,
+ "loss": 0.9029,
+ "step": 2404
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.870782489451877,
+ "learning_rate": 1.1683948450754976e-05,
+ "loss": 0.9301,
+ "step": 2405
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 1.051494396715924,
+ "learning_rate": 1.1677805038387337e-05,
+ "loss": 0.9045,
+ "step": 2406
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9692116029848852,
+ "learning_rate": 1.1671660974379554e-05,
+ "loss": 0.9321,
+ "step": 2407
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.897659128365413,
+ "learning_rate": 1.1665516261117914e-05,
+ "loss": 0.8948,
+ "step": 2408
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9180979807670455,
+ "learning_rate": 1.1659370900988946e-05,
+ "loss": 0.9649,
+ "step": 2409
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8924094087371933,
+ "learning_rate": 1.165322489637944e-05,
+ "loss": 0.9686,
+ "step": 2410
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8289188195130809,
+ "learning_rate": 1.164707824967644e-05,
+ "loss": 0.9305,
+ "step": 2411
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9373337584613777,
+ "learning_rate": 1.1640930963267226e-05,
+ "loss": 0.93,
+ "step": 2412
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.7762393780235348,
+ "learning_rate": 1.1634783039539328e-05,
+ "loss": 0.8451,
+ "step": 2413
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.7838013185687841,
+ "learning_rate": 1.162863448088054e-05,
+ "loss": 0.8454,
+ "step": 2414
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.9639296692594781,
+ "learning_rate": 1.1622485289678886e-05,
+ "loss": 0.919,
+ "step": 2415
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.8039691890795312,
+ "learning_rate": 1.1616335468322641e-05,
+ "loss": 0.8682,
+ "step": 2416
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8193193101218789,
+ "learning_rate": 1.1610185019200324e-05,
+ "loss": 0.8697,
+ "step": 2417
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9249669703522008,
+ "learning_rate": 1.1604033944700701e-05,
+ "loss": 0.9784,
+ "step": 2418
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.909036611522033,
+ "learning_rate": 1.1597882247212776e-05,
+ "loss": 0.9195,
+ "step": 2419
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8067098598232343,
+ "learning_rate": 1.15917299291258e-05,
+ "loss": 0.9436,
+ "step": 2420
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8585124511047816,
+ "learning_rate": 1.1585576992829261e-05,
+ "loss": 0.9204,
+ "step": 2421
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9351868646055391,
+ "learning_rate": 1.1579423440712887e-05,
+ "loss": 0.9726,
+ "step": 2422
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8808652735933448,
+ "learning_rate": 1.1573269275166652e-05,
+ "loss": 0.9028,
+ "step": 2423
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9059833389151828,
+ "learning_rate": 1.1567114498580758e-05,
+ "loss": 0.9405,
+ "step": 2424
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9629498856687665,
+ "learning_rate": 1.1560959113345649e-05,
+ "loss": 0.9129,
+ "step": 2425
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8720052204637136,
+ "learning_rate": 1.1554803121852005e-05,
+ "loss": 0.908,
+ "step": 2426
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8366220055342847,
+ "learning_rate": 1.1548646526490749e-05,
+ "loss": 0.9286,
+ "step": 2427
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9400754416872955,
+ "learning_rate": 1.1542489329653024e-05,
+ "loss": 0.9263,
+ "step": 2428
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9112230368157715,
+ "learning_rate": 1.153633153373022e-05,
+ "loss": 0.9317,
+ "step": 2429
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8822410754901164,
+ "learning_rate": 1.1530173141113947e-05,
+ "loss": 0.9708,
+ "step": 2430
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8788759327004128,
+ "learning_rate": 1.1524014154196063e-05,
+ "loss": 0.867,
+ "step": 2431
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9917300908341928,
+ "learning_rate": 1.1517854575368644e-05,
+ "loss": 0.9319,
+ "step": 2432
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8649965706468556,
+ "learning_rate": 1.1511694407023994e-05,
+ "loss": 0.8463,
+ "step": 2433
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.7171466088532696,
+ "learning_rate": 1.1505533651554654e-05,
+ "loss": 0.8633,
+ "step": 2434
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8994079257246275,
+ "learning_rate": 1.1499372311353398e-05,
+ "loss": 0.8892,
+ "step": 2435
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.934409689777611,
+ "learning_rate": 1.149321038881321e-05,
+ "loss": 0.9409,
+ "step": 2436
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9147537401355629,
+ "learning_rate": 1.1487047886327314e-05,
+ "loss": 0.9153,
+ "step": 2437
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9201992531935073,
+ "learning_rate": 1.1480884806289151e-05,
+ "loss": 0.9546,
+ "step": 2438
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.894740123105788,
+ "learning_rate": 1.1474721151092397e-05,
+ "loss": 0.9233,
+ "step": 2439
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8595033208454251,
+ "learning_rate": 1.1468556923130943e-05,
+ "loss": 0.8677,
+ "step": 2440
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9990850953496287,
+ "learning_rate": 1.14623921247989e-05,
+ "loss": 0.9033,
+ "step": 2441
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.80162258509165,
+ "learning_rate": 1.1456226758490603e-05,
+ "loss": 0.8522,
+ "step": 2442
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 1.1512931236925705,
+ "learning_rate": 1.1450060826600618e-05,
+ "loss": 0.9087,
+ "step": 2443
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8856249226151223,
+ "learning_rate": 1.1443894331523718e-05,
+ "loss": 0.9191,
+ "step": 2444
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9244246223023082,
+ "learning_rate": 1.1437727275654893e-05,
+ "loss": 0.8689,
+ "step": 2445
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.945424413657258,
+ "learning_rate": 1.1431559661389362e-05,
+ "loss": 0.9457,
+ "step": 2446
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8659502973724246,
+ "learning_rate": 1.1425391491122557e-05,
+ "loss": 0.8955,
+ "step": 2447
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9256327243109714,
+ "learning_rate": 1.141922276725012e-05,
+ "loss": 0.9343,
+ "step": 2448
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8501313896870167,
+ "learning_rate": 1.1413053492167915e-05,
+ "loss": 0.9272,
+ "step": 2449
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9122945426028953,
+ "learning_rate": 1.1406883668272015e-05,
+ "loss": 0.8923,
+ "step": 2450
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.7699343783018004,
+ "learning_rate": 1.140071329795871e-05,
+ "loss": 0.8427,
+ "step": 2451
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8582187745030887,
+ "learning_rate": 1.13945423836245e-05,
+ "loss": 0.9264,
+ "step": 2452
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 1.1938291860294514,
+ "learning_rate": 1.1388370927666102e-05,
+ "loss": 0.9376,
+ "step": 2453
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9804048218156324,
+ "learning_rate": 1.1382198932480429e-05,
+ "loss": 0.9424,
+ "step": 2454
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9206388057962297,
+ "learning_rate": 1.1376026400464616e-05,
+ "loss": 0.8612,
+ "step": 2455
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8036855246615522,
+ "learning_rate": 1.136985333401601e-05,
+ "loss": 0.8687,
+ "step": 2456
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8001987189105646,
+ "learning_rate": 1.1363679735532151e-05,
+ "loss": 0.8955,
+ "step": 2457
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8979648843604973,
+ "learning_rate": 1.1357505607410797e-05,
+ "loss": 0.9465,
+ "step": 2458
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8946428094039034,
+ "learning_rate": 1.1351330952049908e-05,
+ "loss": 0.9064,
+ "step": 2459
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9144929222444373,
+ "learning_rate": 1.1345155771847646e-05,
+ "loss": 0.8163,
+ "step": 2460
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9557501465675763,
+ "learning_rate": 1.1338980069202388e-05,
+ "loss": 0.9097,
+ "step": 2461
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8570136915849398,
+ "learning_rate": 1.1332803846512697e-05,
+ "loss": 0.9295,
+ "step": 2462
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.855576646382349,
+ "learning_rate": 1.1326627106177348e-05,
+ "loss": 0.9629,
+ "step": 2463
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.81447619038449,
+ "learning_rate": 1.132044985059532e-05,
+ "loss": 0.8743,
+ "step": 2464
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.9529747391366109,
+ "learning_rate": 1.1314272082165785e-05,
+ "loss": 0.9615,
+ "step": 2465
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8102666542822321,
+ "learning_rate": 1.1308093803288119e-05,
+ "loss": 0.8673,
+ "step": 2466
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.8935530920483463,
+ "learning_rate": 1.130191501636189e-05,
+ "loss": 0.8877,
+ "step": 2467
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.926405774997204,
+ "learning_rate": 1.1295735723786872e-05,
+ "loss": 0.9287,
+ "step": 2468
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.942888474563503,
+ "learning_rate": 1.1289555927963032e-05,
+ "loss": 0.9102,
+ "step": 2469
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9162290160368131,
+ "learning_rate": 1.1283375631290528e-05,
+ "loss": 0.9669,
+ "step": 2470
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.8473067870722337,
+ "learning_rate": 1.1277194836169714e-05,
+ "loss": 0.9073,
+ "step": 2471
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 1.0371891476416797,
+ "learning_rate": 1.1271013545001144e-05,
+ "loss": 0.9548,
+ "step": 2472
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.8568391256075264,
+ "learning_rate": 1.1264831760185562e-05,
+ "loss": 0.939,
+ "step": 2473
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 1.05001553258945,
+ "learning_rate": 1.1258649484123895e-05,
+ "loss": 0.9385,
+ "step": 2474
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.8948583875364915,
+ "learning_rate": 1.1252466719217274e-05,
+ "loss": 0.918,
+ "step": 2475
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.8940166872402399,
+ "learning_rate": 1.1246283467867012e-05,
+ "loss": 0.974,
+ "step": 2476
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.8814746308200198,
+ "learning_rate": 1.1240099732474613e-05,
+ "loss": 0.9408,
+ "step": 2477
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.8568835057729686,
+ "learning_rate": 1.1233915515441765e-05,
+ "loss": 0.8711,
+ "step": 2478
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 1.0324075778327733,
+ "learning_rate": 1.1227730819170349e-05,
+ "loss": 0.9759,
+ "step": 2479
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9800290942423504,
+ "learning_rate": 1.1221545646062431e-05,
+ "loss": 0.9136,
+ "step": 2480
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9742522526641997,
+ "learning_rate": 1.121535999852026e-05,
+ "loss": 0.9483,
+ "step": 2481
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.866587396715058,
+ "learning_rate": 1.1209173878946271e-05,
+ "loss": 0.889,
+ "step": 2482
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.8542990632480492,
+ "learning_rate": 1.1202987289743078e-05,
+ "loss": 0.8621,
+ "step": 2483
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9038068252517323,
+ "learning_rate": 1.1196800233313488e-05,
+ "loss": 0.8864,
+ "step": 2484
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9055792416506564,
+ "learning_rate": 1.1190612712060475e-05,
+ "loss": 0.9625,
+ "step": 2485
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.7488042866430171,
+ "learning_rate": 1.1184424728387204e-05,
+ "loss": 0.8115,
+ "step": 2486
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.8285643307979524,
+ "learning_rate": 1.1178236284697017e-05,
+ "loss": 0.9556,
+ "step": 2487
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.8858001716554392,
+ "learning_rate": 1.1172047383393434e-05,
+ "loss": 0.8987,
+ "step": 2488
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.8898845243154941,
+ "learning_rate": 1.1165858026880151e-05,
+ "loss": 0.9275,
+ "step": 2489
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 1.0837169665343633,
+ "learning_rate": 1.1159668217561048e-05,
+ "loss": 0.9527,
+ "step": 2490
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9884515828046143,
+ "learning_rate": 1.115347795784017e-05,
+ "loss": 0.9462,
+ "step": 2491
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.8500678195789728,
+ "learning_rate": 1.1147287250121745e-05,
+ "loss": 0.8821,
+ "step": 2492
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9236208787083816,
+ "learning_rate": 1.1141096096810174e-05,
+ "loss": 0.8708,
+ "step": 2493
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9006687358510446,
+ "learning_rate": 1.1134904500310029e-05,
+ "loss": 0.8836,
+ "step": 2494
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.8915056825227955,
+ "learning_rate": 1.1128712463026048e-05,
+ "loss": 0.9426,
+ "step": 2495
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.671204049922936,
+ "learning_rate": 1.1122519987363156e-05,
+ "loss": 0.7995,
+ "step": 2496
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9088402297755798,
+ "learning_rate": 1.1116327075726436e-05,
+ "loss": 0.9099,
+ "step": 2497
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9759297843694378,
+ "learning_rate": 1.1110133730521142e-05,
+ "loss": 0.9089,
+ "step": 2498
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9295096357487002,
+ "learning_rate": 1.11039399541527e-05,
+ "loss": 0.89,
+ "step": 2499
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9767473281952905,
+ "learning_rate": 1.10977457490267e-05,
+ "loss": 0.9199,
+ "step": 2500
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.822760323717349,
+ "learning_rate": 1.10915511175489e-05,
+ "loss": 0.9192,
+ "step": 2501
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9867890064903402,
+ "learning_rate": 1.1085356062125225e-05,
+ "loss": 0.9213,
+ "step": 2502
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9791577772382069,
+ "learning_rate": 1.1079160585161759e-05,
+ "loss": 0.9191,
+ "step": 2503
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9997639383262091,
+ "learning_rate": 1.107296468906476e-05,
+ "loss": 0.9277,
+ "step": 2504
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.7079840333825845,
+ "learning_rate": 1.106676837624064e-05,
+ "loss": 0.8225,
+ "step": 2505
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9006607326467191,
+ "learning_rate": 1.1060571649095972e-05,
+ "loss": 0.9296,
+ "step": 2506
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.8842182000095816,
+ "learning_rate": 1.10543745100375e-05,
+ "loss": 0.8679,
+ "step": 2507
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 1.1043890880491394,
+ "learning_rate": 1.1048176961472114e-05,
+ "loss": 0.9272,
+ "step": 2508
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9480601505781148,
+ "learning_rate": 1.1041979005806876e-05,
+ "loss": 0.9394,
+ "step": 2509
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 1.0075560449541396,
+ "learning_rate": 1.1035780645449001e-05,
+ "loss": 0.9319,
+ "step": 2510
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.8862951055447905,
+ "learning_rate": 1.1029581882805857e-05,
+ "loss": 0.9331,
+ "step": 2511
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.7870628169230721,
+ "learning_rate": 1.1023382720284973e-05,
+ "loss": 0.8908,
+ "step": 2512
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9570861992825871,
+ "learning_rate": 1.1017183160294033e-05,
+ "loss": 0.9369,
+ "step": 2513
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.961174346938788,
+ "learning_rate": 1.1010983205240878e-05,
+ "loss": 0.8855,
+ "step": 2514
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9144345625655905,
+ "learning_rate": 1.1004782857533488e-05,
+ "loss": 0.9364,
+ "step": 2515
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9584325102763894,
+ "learning_rate": 1.099858211958002e-05,
+ "loss": 0.9412,
+ "step": 2516
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9021245823914934,
+ "learning_rate": 1.0992380993788763e-05,
+ "loss": 0.8482,
+ "step": 2517
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 1.1147423454652747,
+ "learning_rate": 1.0986179482568162e-05,
+ "loss": 1.0222,
+ "step": 2518
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.9486015078056638,
+ "learning_rate": 1.0979977588326815e-05,
+ "loss": 0.9276,
+ "step": 2519
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 1.1593612070416686,
+ "learning_rate": 1.0973775313473465e-05,
+ "loss": 0.9413,
+ "step": 2520
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8710061882469988,
+ "learning_rate": 1.0967572660417001e-05,
+ "loss": 0.8537,
+ "step": 2521
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8996327546711663,
+ "learning_rate": 1.0961369631566468e-05,
+ "loss": 0.9361,
+ "step": 2522
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8064475456226737,
+ "learning_rate": 1.0955166229331048e-05,
+ "loss": 0.9153,
+ "step": 2523
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8608021853676151,
+ "learning_rate": 1.0948962456120068e-05,
+ "loss": 0.9235,
+ "step": 2524
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8852260339823464,
+ "learning_rate": 1.0942758314343007e-05,
+ "loss": 0.9461,
+ "step": 2525
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8678316373227288,
+ "learning_rate": 1.0936553806409482e-05,
+ "loss": 0.8729,
+ "step": 2526
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8725130974425622,
+ "learning_rate": 1.0930348934729249e-05,
+ "loss": 0.9332,
+ "step": 2527
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8197278256630408,
+ "learning_rate": 1.0924143701712211e-05,
+ "loss": 0.9052,
+ "step": 2528
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9227071384165294,
+ "learning_rate": 1.0917938109768404e-05,
+ "loss": 0.8721,
+ "step": 2529
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9202310669956766,
+ "learning_rate": 1.0911732161308014e-05,
+ "loss": 0.8468,
+ "step": 2530
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9632191462715535,
+ "learning_rate": 1.0905525858741364e-05,
+ "loss": 0.9707,
+ "step": 2531
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 1.0183887138446068,
+ "learning_rate": 1.08993192044789e-05,
+ "loss": 0.9825,
+ "step": 2532
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 1.02742785347052,
+ "learning_rate": 1.089311220093122e-05,
+ "loss": 0.9257,
+ "step": 2533
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.7652254275329015,
+ "learning_rate": 1.0886904850509052e-05,
+ "loss": 0.8632,
+ "step": 2534
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9097855200707109,
+ "learning_rate": 1.0880697155623264e-05,
+ "loss": 0.9146,
+ "step": 2535
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.956441293096091,
+ "learning_rate": 1.0874489118684846e-05,
+ "loss": 0.9407,
+ "step": 2536
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8103964493668271,
+ "learning_rate": 1.086828074210493e-05,
+ "loss": 0.8975,
+ "step": 2537
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8857215415773282,
+ "learning_rate": 1.0862072028294777e-05,
+ "loss": 0.8588,
+ "step": 2538
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8916396986032709,
+ "learning_rate": 1.0855862979665788e-05,
+ "loss": 0.9125,
+ "step": 2539
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 1.0678840230011677,
+ "learning_rate": 1.0849653598629477e-05,
+ "loss": 0.9093,
+ "step": 2540
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.7773438338658273,
+ "learning_rate": 1.0843443887597495e-05,
+ "loss": 0.9155,
+ "step": 2541
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9101379389107885,
+ "learning_rate": 1.0837233848981632e-05,
+ "loss": 0.8771,
+ "step": 2542
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8538385117114854,
+ "learning_rate": 1.0831023485193787e-05,
+ "loss": 0.8299,
+ "step": 2543
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9030682333200006,
+ "learning_rate": 1.0824812798645997e-05,
+ "loss": 0.9705,
+ "step": 2544
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9288473783663039,
+ "learning_rate": 1.0818601791750418e-05,
+ "loss": 0.9488,
+ "step": 2545
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8950245714539972,
+ "learning_rate": 1.0812390466919337e-05,
+ "loss": 0.9328,
+ "step": 2546
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9564521059009605,
+ "learning_rate": 1.0806178826565162e-05,
+ "loss": 0.9276,
+ "step": 2547
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8761287097782974,
+ "learning_rate": 1.0799966873100419e-05,
+ "loss": 0.9244,
+ "step": 2548
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 1.0412983702717888,
+ "learning_rate": 1.0793754608937758e-05,
+ "loss": 0.9736,
+ "step": 2549
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9437424331698793,
+ "learning_rate": 1.0787542036489955e-05,
+ "loss": 0.9179,
+ "step": 2550
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.7804661163473227,
+ "learning_rate": 1.0781329158169902e-05,
+ "loss": 0.9121,
+ "step": 2551
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8831772952539662,
+ "learning_rate": 1.0775115976390607e-05,
+ "loss": 0.8432,
+ "step": 2552
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9938871865340106,
+ "learning_rate": 1.0768902493565197e-05,
+ "loss": 0.9433,
+ "step": 2553
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8329659565497897,
+ "learning_rate": 1.0762688712106918e-05,
+ "loss": 0.8953,
+ "step": 2554
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8616458461988764,
+ "learning_rate": 1.0756474634429133e-05,
+ "loss": 0.8472,
+ "step": 2555
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.749284698387687,
+ "learning_rate": 1.0750260262945314e-05,
+ "loss": 0.848,
+ "step": 2556
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.7900090795422248,
+ "learning_rate": 1.0744045600069055e-05,
+ "loss": 0.8288,
+ "step": 2557
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.90878870144412,
+ "learning_rate": 1.0737830648214063e-05,
+ "loss": 0.9224,
+ "step": 2558
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8691331482683546,
+ "learning_rate": 1.0731615409794144e-05,
+ "loss": 0.8839,
+ "step": 2559
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8732184108501752,
+ "learning_rate": 1.0725399887223234e-05,
+ "loss": 0.8877,
+ "step": 2560
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8291325762736458,
+ "learning_rate": 1.0719184082915364e-05,
+ "loss": 0.8705,
+ "step": 2561
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9860547678378869,
+ "learning_rate": 1.0712967999284682e-05,
+ "loss": 0.8344,
+ "step": 2562
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8049678356130826,
+ "learning_rate": 1.0706751638745448e-05,
+ "loss": 0.9237,
+ "step": 2563
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.7731868167259505,
+ "learning_rate": 1.0700535003712023e-05,
+ "loss": 0.8839,
+ "step": 2564
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.86604478722542,
+ "learning_rate": 1.069431809659887e-05,
+ "loss": 0.8888,
+ "step": 2565
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8355266940892986,
+ "learning_rate": 1.068810091982057e-05,
+ "loss": 0.8851,
+ "step": 2566
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8424596228485154,
+ "learning_rate": 1.0681883475791803e-05,
+ "loss": 0.8894,
+ "step": 2567
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9438666012713255,
+ "learning_rate": 1.067566576692735e-05,
+ "loss": 0.9061,
+ "step": 2568
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9137475284130079,
+ "learning_rate": 1.0669447795642103e-05,
+ "loss": 0.9658,
+ "step": 2569
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9177491280420521,
+ "learning_rate": 1.066322956435104e-05,
+ "loss": 0.9652,
+ "step": 2570
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.8151440461337732,
+ "learning_rate": 1.065701107546926e-05,
+ "loss": 0.9404,
+ "step": 2571
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.9517736721987609,
+ "learning_rate": 1.065079233141195e-05,
+ "loss": 0.9008,
+ "step": 2572
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9214817977511858,
+ "learning_rate": 1.0644573334594395e-05,
+ "loss": 0.9013,
+ "step": 2573
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9935522415670198,
+ "learning_rate": 1.0638354087431986e-05,
+ "loss": 0.9906,
+ "step": 2574
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.831657275252258,
+ "learning_rate": 1.0632134592340204e-05,
+ "loss": 0.8538,
+ "step": 2575
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9208221535422952,
+ "learning_rate": 1.0625914851734632e-05,
+ "loss": 0.9353,
+ "step": 2576
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.8362787872462575,
+ "learning_rate": 1.0619694868030943e-05,
+ "loss": 0.8935,
+ "step": 2577
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.8724694745513101,
+ "learning_rate": 1.0613474643644907e-05,
+ "loss": 0.942,
+ "step": 2578
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9772096354169073,
+ "learning_rate": 1.0607254180992391e-05,
+ "loss": 0.9321,
+ "step": 2579
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.8178722945247205,
+ "learning_rate": 1.0601033482489346e-05,
+ "loss": 0.9227,
+ "step": 2580
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.7938438545130856,
+ "learning_rate": 1.0594812550551826e-05,
+ "loss": 0.8659,
+ "step": 2581
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9769194053293457,
+ "learning_rate": 1.058859138759596e-05,
+ "loss": 0.9612,
+ "step": 2582
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9496683326132712,
+ "learning_rate": 1.0582369996037985e-05,
+ "loss": 0.9323,
+ "step": 2583
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.8764871153485599,
+ "learning_rate": 1.0576148378294213e-05,
+ "loss": 0.8985,
+ "step": 2584
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9069334600949017,
+ "learning_rate": 1.056992653678105e-05,
+ "loss": 0.8981,
+ "step": 2585
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9099650409820482,
+ "learning_rate": 1.0563704473914986e-05,
+ "loss": 0.9368,
+ "step": 2586
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9646440392923197,
+ "learning_rate": 1.0557482192112603e-05,
+ "loss": 0.9668,
+ "step": 2587
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9896058753441028,
+ "learning_rate": 1.0551259693790556e-05,
+ "loss": 0.913,
+ "step": 2588
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9437713256693829,
+ "learning_rate": 1.0545036981365601e-05,
+ "loss": 0.988,
+ "step": 2589
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.8363912318332196,
+ "learning_rate": 1.053881405725456e-05,
+ "loss": 0.8804,
+ "step": 2590
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.8074888738043394,
+ "learning_rate": 1.0532590923874349e-05,
+ "loss": 0.8875,
+ "step": 2591
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.7917758946751435,
+ "learning_rate": 1.0526367583641958e-05,
+ "loss": 0.9155,
+ "step": 2592
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 1.0004308869341767,
+ "learning_rate": 1.0520144038974468e-05,
+ "loss": 0.9115,
+ "step": 2593
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9334426047016258,
+ "learning_rate": 1.0513920292289021e-05,
+ "loss": 0.9215,
+ "step": 2594
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.971669182888355,
+ "learning_rate": 1.0507696346002857e-05,
+ "loss": 1.002,
+ "step": 2595
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9048044665335145,
+ "learning_rate": 1.0501472202533285e-05,
+ "loss": 0.9214,
+ "step": 2596
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9450349281998583,
+ "learning_rate": 1.0495247864297684e-05,
+ "loss": 0.8992,
+ "step": 2597
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.7847922346316105,
+ "learning_rate": 1.0489023333713522e-05,
+ "loss": 0.8048,
+ "step": 2598
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.79524184009825,
+ "learning_rate": 1.0482798613198328e-05,
+ "loss": 0.8964,
+ "step": 2599
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9524976603435144,
+ "learning_rate": 1.047657370516972e-05,
+ "loss": 0.9111,
+ "step": 2600
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9081215324462001,
+ "learning_rate": 1.0470348612045376e-05,
+ "loss": 0.949,
+ "step": 2601
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9045225841688138,
+ "learning_rate": 1.0464123336243049e-05,
+ "loss": 0.8287,
+ "step": 2602
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.8380398750759631,
+ "learning_rate": 1.0457897880180566e-05,
+ "loss": 0.9399,
+ "step": 2603
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9087598109748737,
+ "learning_rate": 1.0451672246275826e-05,
+ "loss": 0.9585,
+ "step": 2604
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.8496588018443858,
+ "learning_rate": 1.0445446436946788e-05,
+ "loss": 0.8778,
+ "step": 2605
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.8673556842396951,
+ "learning_rate": 1.0439220454611486e-05,
+ "loss": 0.916,
+ "step": 2606
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9558596086881778,
+ "learning_rate": 1.0432994301688021e-05,
+ "loss": 0.9003,
+ "step": 2607
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.8932203872619231,
+ "learning_rate": 1.0426767980594559e-05,
+ "loss": 0.9011,
+ "step": 2608
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.8709678413237031,
+ "learning_rate": 1.0420541493749332e-05,
+ "loss": 0.9188,
+ "step": 2609
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9443307261781778,
+ "learning_rate": 1.0414314843570634e-05,
+ "loss": 0.9224,
+ "step": 2610
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.8442843612430706,
+ "learning_rate": 1.0408088032476822e-05,
+ "loss": 0.9342,
+ "step": 2611
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9057892841452813,
+ "learning_rate": 1.0401861062886324e-05,
+ "loss": 0.9421,
+ "step": 2612
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.95722829726723,
+ "learning_rate": 1.0395633937217622e-05,
+ "loss": 0.9237,
+ "step": 2613
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.887245273579422,
+ "learning_rate": 1.0389406657889254e-05,
+ "loss": 0.8691,
+ "step": 2614
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9952294014524894,
+ "learning_rate": 1.0383179227319826e-05,
+ "loss": 0.9472,
+ "step": 2615
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9183848153399197,
+ "learning_rate": 1.0376951647928007e-05,
+ "loss": 0.9701,
+ "step": 2616
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9202787300400367,
+ "learning_rate": 1.0370723922132506e-05,
+ "loss": 0.9518,
+ "step": 2617
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 1.1518615287501688,
+ "learning_rate": 1.036449605235211e-05,
+ "loss": 0.9557,
+ "step": 2618
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9091438334788177,
+ "learning_rate": 1.0358268041005644e-05,
+ "loss": 0.8786,
+ "step": 2619
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9425613409631651,
+ "learning_rate": 1.0352039890511997e-05,
+ "loss": 0.9225,
+ "step": 2620
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.9035868714083709,
+ "learning_rate": 1.034581160329012e-05,
+ "loss": 0.8795,
+ "step": 2621
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.969566685701797,
+ "learning_rate": 1.0339583181758997e-05,
+ "loss": 0.9597,
+ "step": 2622
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.7752328599278581,
+ "learning_rate": 1.033335462833768e-05,
+ "loss": 0.8672,
+ "step": 2623
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.894538400316396,
+ "learning_rate": 1.0327125945445265e-05,
+ "loss": 0.923,
+ "step": 2624
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 1.0020503476584997,
+ "learning_rate": 1.0320897135500904e-05,
+ "loss": 0.8843,
+ "step": 2625
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.9762706053326204,
+ "learning_rate": 1.0314668200923791e-05,
+ "loss": 0.8628,
+ "step": 2626
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.7306092357943745,
+ "learning_rate": 1.0308439144133177e-05,
+ "loss": 0.8269,
+ "step": 2627
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8450381002190108,
+ "learning_rate": 1.0302209967548354e-05,
+ "loss": 0.8759,
+ "step": 2628
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 1.0484126440621628,
+ "learning_rate": 1.029598067358866e-05,
+ "loss": 0.9987,
+ "step": 2629
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 1.0162609495770896,
+ "learning_rate": 1.0289751264673485e-05,
+ "loss": 0.9406,
+ "step": 2630
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8857328642856026,
+ "learning_rate": 1.0283521743222256e-05,
+ "loss": 0.8905,
+ "step": 2631
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.9613453171218498,
+ "learning_rate": 1.0277292111654447e-05,
+ "loss": 0.8706,
+ "step": 2632
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.9585597791516702,
+ "learning_rate": 1.0271062372389582e-05,
+ "loss": 0.9398,
+ "step": 2633
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 1.02185310005277,
+ "learning_rate": 1.0264832527847212e-05,
+ "loss": 1.015,
+ "step": 2634
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8506999074950125,
+ "learning_rate": 1.0258602580446941e-05,
+ "loss": 0.9413,
+ "step": 2635
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.7756019521705919,
+ "learning_rate": 1.0252372532608405e-05,
+ "loss": 0.7947,
+ "step": 2636
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8242268732820904,
+ "learning_rate": 1.024614238675129e-05,
+ "loss": 0.8543,
+ "step": 2637
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.9557258859558818,
+ "learning_rate": 1.0239912145295303e-05,
+ "loss": 0.9363,
+ "step": 2638
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.885606252981095,
+ "learning_rate": 1.0233681810660207e-05,
+ "loss": 0.9005,
+ "step": 2639
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.87261949669791,
+ "learning_rate": 1.0227451385265788e-05,
+ "loss": 0.9026,
+ "step": 2640
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8351181251793705,
+ "learning_rate": 1.022122087153187e-05,
+ "loss": 0.9176,
+ "step": 2641
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 1.0067856604494778,
+ "learning_rate": 1.0214990271878319e-05,
+ "loss": 0.9134,
+ "step": 2642
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.910586530408183,
+ "learning_rate": 1.0208759588725016e-05,
+ "loss": 0.9316,
+ "step": 2643
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8404782529309832,
+ "learning_rate": 1.0202528824491899e-05,
+ "loss": 0.8693,
+ "step": 2644
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.9355122862456461,
+ "learning_rate": 1.0196297981598921e-05,
+ "loss": 0.9204,
+ "step": 2645
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.874851430526491,
+ "learning_rate": 1.019006706246607e-05,
+ "loss": 0.913,
+ "step": 2646
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.9123862236178606,
+ "learning_rate": 1.018383606951336e-05,
+ "loss": 0.8558,
+ "step": 2647
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8083236091623784,
+ "learning_rate": 1.0177605005160837e-05,
+ "loss": 0.8599,
+ "step": 2648
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.7662563643188285,
+ "learning_rate": 1.0171373871828578e-05,
+ "loss": 0.85,
+ "step": 2649
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8626881415928633,
+ "learning_rate": 1.0165142671936685e-05,
+ "loss": 0.8551,
+ "step": 2650
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.7521555912863335,
+ "learning_rate": 1.0158911407905279e-05,
+ "loss": 0.8779,
+ "step": 2651
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.852450834594269,
+ "learning_rate": 1.0152680082154514e-05,
+ "loss": 0.9091,
+ "step": 2652
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8607550615525535,
+ "learning_rate": 1.0146448697104561e-05,
+ "loss": 0.8538,
+ "step": 2653
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.9225008247575006,
+ "learning_rate": 1.0140217255175626e-05,
+ "loss": 0.9383,
+ "step": 2654
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.9667517957388785,
+ "learning_rate": 1.013398575878792e-05,
+ "loss": 0.9425,
+ "step": 2655
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.9444714811857227,
+ "learning_rate": 1.0127754210361694e-05,
+ "loss": 0.9294,
+ "step": 2656
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.9642924515137425,
+ "learning_rate": 1.0121522612317204e-05,
+ "loss": 0.9386,
+ "step": 2657
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8077968807746465,
+ "learning_rate": 1.011529096707473e-05,
+ "loss": 0.8755,
+ "step": 2658
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 1.018675145764501,
+ "learning_rate": 1.0109059277054574e-05,
+ "loss": 0.8789,
+ "step": 2659
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8959489891860603,
+ "learning_rate": 1.010282754467705e-05,
+ "loss": 0.9555,
+ "step": 2660
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 1.0289885314244605,
+ "learning_rate": 1.0096595772362492e-05,
+ "loss": 0.9455,
+ "step": 2661
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8010121594109141,
+ "learning_rate": 1.0090363962531251e-05,
+ "loss": 0.8189,
+ "step": 2662
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8850716538987777,
+ "learning_rate": 1.0084132117603689e-05,
+ "loss": 0.8895,
+ "step": 2663
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8146922307406169,
+ "learning_rate": 1.0077900240000181e-05,
+ "loss": 0.9185,
+ "step": 2664
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.9555548419652233,
+ "learning_rate": 1.0071668332141115e-05,
+ "loss": 0.9544,
+ "step": 2665
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8097655953213143,
+ "learning_rate": 1.0065436396446899e-05,
+ "loss": 0.8509,
+ "step": 2666
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8715681140386908,
+ "learning_rate": 1.0059204435337938e-05,
+ "loss": 0.938,
+ "step": 2667
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.9550607483205545,
+ "learning_rate": 1.0052972451234656e-05,
+ "loss": 0.9438,
+ "step": 2668
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.7403061380300183,
+ "learning_rate": 1.0046740446557485e-05,
+ "loss": 0.845,
+ "step": 2669
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8548122937206981,
+ "learning_rate": 1.0040508423726865e-05,
+ "loss": 0.9133,
+ "step": 2670
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.822985306694666,
+ "learning_rate": 1.0034276385163238e-05,
+ "loss": 0.8613,
+ "step": 2671
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.9792028962147389,
+ "learning_rate": 1.0028044333287056e-05,
+ "loss": 0.9516,
+ "step": 2672
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 1.0057001649907389,
+ "learning_rate": 1.002181227051878e-05,
+ "loss": 0.9627,
+ "step": 2673
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8175201200990784,
+ "learning_rate": 1.0015580199278873e-05,
+ "loss": 0.889,
+ "step": 2674
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.9916316391604548,
+ "learning_rate": 1.0009348121987795e-05,
+ "loss": 0.9594,
+ "step": 2675
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.8684745974465504,
+ "learning_rate": 1.000311604106601e-05,
+ "loss": 0.9809,
+ "step": 2676
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9155440255234769,
+ "learning_rate": 9.996883958933993e-06,
+ "loss": 0.9064,
+ "step": 2677
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.8640568678762447,
+ "learning_rate": 9.99065187801221e-06,
+ "loss": 0.8847,
+ "step": 2678
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9504764124917944,
+ "learning_rate": 9.984419800721132e-06,
+ "loss": 0.9874,
+ "step": 2679
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.968686441097706,
+ "learning_rate": 9.978187729481218e-06,
+ "loss": 0.9961,
+ "step": 2680
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 1.097545469495621,
+ "learning_rate": 9.971955666712945e-06,
+ "loss": 0.8897,
+ "step": 2681
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9265533664930266,
+ "learning_rate": 9.965723614836764e-06,
+ "loss": 0.8999,
+ "step": 2682
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.8529524853563816,
+ "learning_rate": 9.959491576273139e-06,
+ "loss": 0.9192,
+ "step": 2683
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 1.0370520750567518,
+ "learning_rate": 9.95325955344252e-06,
+ "loss": 0.9016,
+ "step": 2684
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9480273223579014,
+ "learning_rate": 9.947027548765347e-06,
+ "loss": 0.8892,
+ "step": 2685
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.8972914419458259,
+ "learning_rate": 9.940795564662064e-06,
+ "loss": 0.9388,
+ "step": 2686
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 1.133889974782131,
+ "learning_rate": 9.934563603553103e-06,
+ "loss": 0.8861,
+ "step": 2687
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.7846840406978326,
+ "learning_rate": 9.928331667858886e-06,
+ "loss": 0.9225,
+ "step": 2688
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9160207276009006,
+ "learning_rate": 9.922099759999822e-06,
+ "loss": 0.8547,
+ "step": 2689
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.8970412714530798,
+ "learning_rate": 9.915867882396314e-06,
+ "loss": 0.9017,
+ "step": 2690
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9819606853619415,
+ "learning_rate": 9.909636037468754e-06,
+ "loss": 0.8598,
+ "step": 2691
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.959621348155528,
+ "learning_rate": 9.90340422763751e-06,
+ "loss": 0.9305,
+ "step": 2692
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 1.0056031158774708,
+ "learning_rate": 9.897172455322953e-06,
+ "loss": 0.8966,
+ "step": 2693
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.933979224935091,
+ "learning_rate": 9.890940722945429e-06,
+ "loss": 0.9015,
+ "step": 2694
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 1.111724034217747,
+ "learning_rate": 9.884709032925274e-06,
+ "loss": 0.8763,
+ "step": 2695
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 1.035387498738903,
+ "learning_rate": 9.878477387682801e-06,
+ "loss": 0.9129,
+ "step": 2696
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9428862283312817,
+ "learning_rate": 9.872245789638308e-06,
+ "loss": 0.8948,
+ "step": 2697
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9925589116215584,
+ "learning_rate": 9.866014241212078e-06,
+ "loss": 0.9153,
+ "step": 2698
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9622771768642312,
+ "learning_rate": 9.859782744824376e-06,
+ "loss": 0.8814,
+ "step": 2699
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9304872266758989,
+ "learning_rate": 9.85355130289544e-06,
+ "loss": 0.961,
+ "step": 2700
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.8932655089327036,
+ "learning_rate": 9.84731991784549e-06,
+ "loss": 0.8428,
+ "step": 2701
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.8086636779098098,
+ "learning_rate": 9.841088592094726e-06,
+ "loss": 0.8532,
+ "step": 2702
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.8452967875903474,
+ "learning_rate": 9.834857328063316e-06,
+ "loss": 0.9471,
+ "step": 2703
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9285138349230967,
+ "learning_rate": 9.828626128171422e-06,
+ "loss": 0.9679,
+ "step": 2704
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.7850938005307868,
+ "learning_rate": 9.822394994839164e-06,
+ "loss": 0.8158,
+ "step": 2705
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9142247032151152,
+ "learning_rate": 9.816163930486643e-06,
+ "loss": 0.9181,
+ "step": 2706
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.824365020661094,
+ "learning_rate": 9.809932937533935e-06,
+ "loss": 0.9214,
+ "step": 2707
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.7450186719300673,
+ "learning_rate": 9.803702018401084e-06,
+ "loss": 0.8249,
+ "step": 2708
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9515052961042607,
+ "learning_rate": 9.797471175508101e-06,
+ "loss": 0.8825,
+ "step": 2709
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.924805724255183,
+ "learning_rate": 9.791240411274982e-06,
+ "loss": 0.9015,
+ "step": 2710
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 1.175898803493173,
+ "learning_rate": 9.785009728121686e-06,
+ "loss": 0.9106,
+ "step": 2711
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9479481777049079,
+ "learning_rate": 9.778779128468133e-06,
+ "loss": 0.9555,
+ "step": 2712
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 1.0911913695730804,
+ "learning_rate": 9.772548614734217e-06,
+ "loss": 0.9524,
+ "step": 2713
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.87388552824401,
+ "learning_rate": 9.766318189339798e-06,
+ "loss": 0.846,
+ "step": 2714
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9989366645175373,
+ "learning_rate": 9.760087854704697e-06,
+ "loss": 0.9321,
+ "step": 2715
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.8908420957462533,
+ "learning_rate": 9.753857613248714e-06,
+ "loss": 0.877,
+ "step": 2716
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.8544682850900619,
+ "learning_rate": 9.747627467391596e-06,
+ "loss": 0.9285,
+ "step": 2717
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 1.0050333585009792,
+ "learning_rate": 9.741397419553062e-06,
+ "loss": 0.9874,
+ "step": 2718
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.8338996086996195,
+ "learning_rate": 9.735167472152793e-06,
+ "loss": 0.8951,
+ "step": 2719
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.957417706634571,
+ "learning_rate": 9.728937627610425e-06,
+ "loss": 0.9587,
+ "step": 2720
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.7800515164985007,
+ "learning_rate": 9.722707888345553e-06,
+ "loss": 0.8651,
+ "step": 2721
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.8038472144653551,
+ "learning_rate": 9.716478256777749e-06,
+ "loss": 0.8878,
+ "step": 2722
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.912356367201173,
+ "learning_rate": 9.710248735326519e-06,
+ "loss": 0.9643,
+ "step": 2723
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9406352227053918,
+ "learning_rate": 9.704019326411344e-06,
+ "loss": 0.9287,
+ "step": 2724
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.8698506239978246,
+ "learning_rate": 9.697790032451651e-06,
+ "loss": 0.8895,
+ "step": 2725
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 1.0442017747831227,
+ "learning_rate": 9.691560855866826e-06,
+ "loss": 0.9219,
+ "step": 2726
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9693225905602304,
+ "learning_rate": 9.685331799076208e-06,
+ "loss": 0.9459,
+ "step": 2727
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.9954073787456973,
+ "learning_rate": 9.6791028644991e-06,
+ "loss": 0.9448,
+ "step": 2728
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.779641152911498,
+ "learning_rate": 9.672874054554738e-06,
+ "loss": 0.8448,
+ "step": 2729
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8927948634318709,
+ "learning_rate": 9.666645371662324e-06,
+ "loss": 0.9204,
+ "step": 2730
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9038143111408246,
+ "learning_rate": 9.660416818241007e-06,
+ "loss": 0.9044,
+ "step": 2731
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9075975682196729,
+ "learning_rate": 9.654188396709882e-06,
+ "loss": 0.8678,
+ "step": 2732
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9432930039421822,
+ "learning_rate": 9.647960109488003e-06,
+ "loss": 0.8383,
+ "step": 2733
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8710647976986235,
+ "learning_rate": 9.64173195899436e-06,
+ "loss": 0.9381,
+ "step": 2734
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9918346181120583,
+ "learning_rate": 9.635503947647894e-06,
+ "loss": 0.8816,
+ "step": 2735
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9295134401466785,
+ "learning_rate": 9.629276077867497e-06,
+ "loss": 0.9255,
+ "step": 2736
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9859454340264127,
+ "learning_rate": 9.623048352071998e-06,
+ "loss": 0.935,
+ "step": 2737
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 1.013292235329241,
+ "learning_rate": 9.616820772680174e-06,
+ "loss": 0.9863,
+ "step": 2738
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 1.120477410813826,
+ "learning_rate": 9.610593342110746e-06,
+ "loss": 0.9324,
+ "step": 2739
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9193687922594248,
+ "learning_rate": 9.604366062782381e-06,
+ "loss": 0.8787,
+ "step": 2740
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9159267693554459,
+ "learning_rate": 9.598138937113677e-06,
+ "loss": 0.908,
+ "step": 2741
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9580787692929643,
+ "learning_rate": 9.59191196752318e-06,
+ "loss": 1.0209,
+ "step": 2742
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8277073582461278,
+ "learning_rate": 9.58568515642937e-06,
+ "loss": 0.933,
+ "step": 2743
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8244172635159756,
+ "learning_rate": 9.579458506250668e-06,
+ "loss": 0.936,
+ "step": 2744
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8795723489093242,
+ "learning_rate": 9.573232019405441e-06,
+ "loss": 0.9406,
+ "step": 2745
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9621544806179235,
+ "learning_rate": 9.567005698311982e-06,
+ "loss": 1.0185,
+ "step": 2746
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8621218780247002,
+ "learning_rate": 9.560779545388517e-06,
+ "loss": 0.8546,
+ "step": 2747
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.966305821392136,
+ "learning_rate": 9.554553563053217e-06,
+ "loss": 0.9355,
+ "step": 2748
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8442369405063994,
+ "learning_rate": 9.548327753724181e-06,
+ "loss": 0.8634,
+ "step": 2749
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8642247794460316,
+ "learning_rate": 9.542102119819436e-06,
+ "loss": 0.9376,
+ "step": 2750
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.893929431327071,
+ "learning_rate": 9.535876663756955e-06,
+ "loss": 0.9199,
+ "step": 2751
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8582812650798772,
+ "learning_rate": 9.529651387954628e-06,
+ "loss": 0.8884,
+ "step": 2752
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9921949317796473,
+ "learning_rate": 9.523426294830284e-06,
+ "loss": 0.9579,
+ "step": 2753
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.869543213774848,
+ "learning_rate": 9.517201386801675e-06,
+ "loss": 0.8638,
+ "step": 2754
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9393754312841202,
+ "learning_rate": 9.510976666286484e-06,
+ "loss": 0.9272,
+ "step": 2755
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9466622229984474,
+ "learning_rate": 9.504752135702318e-06,
+ "loss": 0.9234,
+ "step": 2756
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8885316798429939,
+ "learning_rate": 9.498527797466718e-06,
+ "loss": 0.9864,
+ "step": 2757
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8782502025304176,
+ "learning_rate": 9.492303653997146e-06,
+ "loss": 0.9173,
+ "step": 2758
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9152491284120332,
+ "learning_rate": 9.48607970771098e-06,
+ "loss": 0.9389,
+ "step": 2759
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.832979471232123,
+ "learning_rate": 9.479855961025538e-06,
+ "loss": 0.8709,
+ "step": 2760
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 1.186545412621472,
+ "learning_rate": 9.473632416358045e-06,
+ "loss": 0.9639,
+ "step": 2761
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8884032453897417,
+ "learning_rate": 9.467409076125653e-06,
+ "loss": 0.847,
+ "step": 2762
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8006699169210758,
+ "learning_rate": 9.461185942745443e-06,
+ "loss": 0.8459,
+ "step": 2763
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.7766186551333335,
+ "learning_rate": 9.454963018634402e-06,
+ "loss": 0.8324,
+ "step": 2764
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9237476035546434,
+ "learning_rate": 9.448740306209447e-06,
+ "loss": 0.9729,
+ "step": 2765
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 1.130076465150555,
+ "learning_rate": 9.442517807887402e-06,
+ "loss": 0.9291,
+ "step": 2766
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9274629901179156,
+ "learning_rate": 9.436295526085016e-06,
+ "loss": 0.8956,
+ "step": 2767
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9466712361262105,
+ "learning_rate": 9.430073463218952e-06,
+ "loss": 0.8847,
+ "step": 2768
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9150813412542628,
+ "learning_rate": 9.423851621705789e-06,
+ "loss": 0.9804,
+ "step": 2769
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8742599731983802,
+ "learning_rate": 9.41763000396202e-06,
+ "loss": 0.8676,
+ "step": 2770
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9846363309688085,
+ "learning_rate": 9.411408612404043e-06,
+ "loss": 0.9437,
+ "step": 2771
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9531542175536194,
+ "learning_rate": 9.40518744944818e-06,
+ "loss": 0.9508,
+ "step": 2772
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8997787513870119,
+ "learning_rate": 9.398966517510654e-06,
+ "loss": 0.9235,
+ "step": 2773
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.664104262609531,
+ "learning_rate": 9.39274581900761e-06,
+ "loss": 0.8186,
+ "step": 2774
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9201932350458625,
+ "learning_rate": 9.386525356355095e-06,
+ "loss": 0.8796,
+ "step": 2775
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9402996802264322,
+ "learning_rate": 9.380305131969059e-06,
+ "loss": 0.9598,
+ "step": 2776
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.8955461641801926,
+ "learning_rate": 9.374085148265372e-06,
+ "loss": 0.9106,
+ "step": 2777
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 1.0874745595922153,
+ "learning_rate": 9.3678654076598e-06,
+ "loss": 0.9892,
+ "step": 2778
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.903339872921699,
+ "learning_rate": 9.361645912568015e-06,
+ "loss": 0.8753,
+ "step": 2779
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.9904002465946561,
+ "learning_rate": 9.355426665405607e-06,
+ "loss": 0.9402,
+ "step": 2780
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8469067459385102,
+ "learning_rate": 9.349207668588053e-06,
+ "loss": 0.8425,
+ "step": 2781
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 1.0114768402522494,
+ "learning_rate": 9.342988924530742e-06,
+ "loss": 0.9161,
+ "step": 2782
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9103410534742161,
+ "learning_rate": 9.336770435648963e-06,
+ "loss": 0.9082,
+ "step": 2783
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9699837672709782,
+ "learning_rate": 9.330552204357904e-06,
+ "loss": 0.9396,
+ "step": 2784
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8517617486881591,
+ "learning_rate": 9.32433423307265e-06,
+ "loss": 0.8949,
+ "step": 2785
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8384103194268664,
+ "learning_rate": 9.318116524208198e-06,
+ "loss": 0.964,
+ "step": 2786
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8258252256435029,
+ "learning_rate": 9.311899080179433e-06,
+ "loss": 0.8494,
+ "step": 2787
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9347155651432546,
+ "learning_rate": 9.305681903401133e-06,
+ "loss": 0.8921,
+ "step": 2788
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8955426782634741,
+ "learning_rate": 9.299464996287984e-06,
+ "loss": 0.8505,
+ "step": 2789
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8799098838384357,
+ "learning_rate": 9.293248361254557e-06,
+ "loss": 0.9311,
+ "step": 2790
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9514760362361673,
+ "learning_rate": 9.287032000715318e-06,
+ "loss": 0.8585,
+ "step": 2791
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 1.2196833037454613,
+ "learning_rate": 9.28081591708464e-06,
+ "loss": 0.8586,
+ "step": 2792
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8707921410483079,
+ "learning_rate": 9.27460011277677e-06,
+ "loss": 0.9048,
+ "step": 2793
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9470024633107743,
+ "learning_rate": 9.268384590205858e-06,
+ "loss": 0.9016,
+ "step": 2794
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 1.230101597217662,
+ "learning_rate": 9.262169351785944e-06,
+ "loss": 0.9577,
+ "step": 2795
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.7966496630210943,
+ "learning_rate": 9.255954399930948e-06,
+ "loss": 0.8503,
+ "step": 2796
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.960131248334068,
+ "learning_rate": 9.249739737054686e-06,
+ "loss": 0.9492,
+ "step": 2797
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 1.1034537795974488,
+ "learning_rate": 9.24352536557087e-06,
+ "loss": 0.9407,
+ "step": 2798
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9650956271049898,
+ "learning_rate": 9.237311287893086e-06,
+ "loss": 0.9301,
+ "step": 2799
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9277231690081373,
+ "learning_rate": 9.231097506434808e-06,
+ "loss": 0.8886,
+ "step": 2800
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9053117564958411,
+ "learning_rate": 9.224884023609398e-06,
+ "loss": 0.914,
+ "step": 2801
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8328271726723444,
+ "learning_rate": 9.218670841830098e-06,
+ "loss": 0.8446,
+ "step": 2802
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 1.0950860824950357,
+ "learning_rate": 9.212457963510045e-06,
+ "loss": 0.9847,
+ "step": 2803
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9108256159980279,
+ "learning_rate": 9.206245391062243e-06,
+ "loss": 0.9718,
+ "step": 2804
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8408923512843837,
+ "learning_rate": 9.200033126899585e-06,
+ "loss": 0.9097,
+ "step": 2805
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9072157323548091,
+ "learning_rate": 9.193821173434843e-06,
+ "loss": 0.9807,
+ "step": 2806
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.794629396119892,
+ "learning_rate": 9.187609533080668e-06,
+ "loss": 0.8434,
+ "step": 2807
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.881701854743868,
+ "learning_rate": 9.181398208249583e-06,
+ "loss": 0.9036,
+ "step": 2808
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 1.0559950308849337,
+ "learning_rate": 9.175187201354005e-06,
+ "loss": 0.9313,
+ "step": 2809
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8293751570171252,
+ "learning_rate": 9.168976514806216e-06,
+ "loss": 0.86,
+ "step": 2810
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9534488534755416,
+ "learning_rate": 9.162766151018372e-06,
+ "loss": 0.9765,
+ "step": 2811
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8627872331921922,
+ "learning_rate": 9.156556112402508e-06,
+ "loss": 0.9373,
+ "step": 2812
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9603261805794119,
+ "learning_rate": 9.150346401370528e-06,
+ "loss": 0.9306,
+ "step": 2813
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.7963613769807456,
+ "learning_rate": 9.144137020334214e-06,
+ "loss": 0.895,
+ "step": 2814
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9154259134690524,
+ "learning_rate": 9.137927971705223e-06,
+ "loss": 0.922,
+ "step": 2815
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9419590483955624,
+ "learning_rate": 9.131719257895074e-06,
+ "loss": 0.9185,
+ "step": 2816
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9121695497112485,
+ "learning_rate": 9.125510881315159e-06,
+ "loss": 0.835,
+ "step": 2817
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9862344867334731,
+ "learning_rate": 9.119302844376741e-06,
+ "loss": 0.8965,
+ "step": 2818
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 1.0839036206932122,
+ "learning_rate": 9.113095149490951e-06,
+ "loss": 0.9146,
+ "step": 2819
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8693606257602265,
+ "learning_rate": 9.106887799068782e-06,
+ "loss": 0.9378,
+ "step": 2820
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9156228829443188,
+ "learning_rate": 9.100680795521104e-06,
+ "loss": 0.9269,
+ "step": 2821
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.7762666934128619,
+ "learning_rate": 9.09447414125864e-06,
+ "loss": 0.7938,
+ "step": 2822
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 1.0825736731745133,
+ "learning_rate": 9.088267838691987e-06,
+ "loss": 0.8761,
+ "step": 2823
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8753577519078445,
+ "learning_rate": 9.0820618902316e-06,
+ "loss": 0.8891,
+ "step": 2824
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8354771501831556,
+ "learning_rate": 9.075856298287796e-06,
+ "loss": 0.8822,
+ "step": 2825
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 1.119267435279118,
+ "learning_rate": 9.069651065270753e-06,
+ "loss": 0.9486,
+ "step": 2826
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.93763987645099,
+ "learning_rate": 9.06344619359052e-06,
+ "loss": 0.9732,
+ "step": 2827
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9530326312872509,
+ "learning_rate": 9.057241685656995e-06,
+ "loss": 0.9508,
+ "step": 2828
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.9085354026670516,
+ "learning_rate": 9.051037543879933e-06,
+ "loss": 0.8433,
+ "step": 2829
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.8443760752540921,
+ "learning_rate": 9.044833770668957e-06,
+ "loss": 0.8847,
+ "step": 2830
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 1.048973249716788,
+ "learning_rate": 9.038630368433537e-06,
+ "loss": 0.8257,
+ "step": 2831
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 1.179033435596135,
+ "learning_rate": 9.032427339583e-06,
+ "loss": 0.9361,
+ "step": 2832
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8288263919061417,
+ "learning_rate": 9.026224686526539e-06,
+ "loss": 0.9023,
+ "step": 2833
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 1.0334949608567428,
+ "learning_rate": 9.020022411673186e-06,
+ "loss": 0.9181,
+ "step": 2834
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9239114578599628,
+ "learning_rate": 9.013820517431841e-06,
+ "loss": 0.8856,
+ "step": 2835
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9794833723909543,
+ "learning_rate": 9.00761900621124e-06,
+ "loss": 0.8985,
+ "step": 2836
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8084656437114732,
+ "learning_rate": 9.00141788041998e-06,
+ "loss": 0.8729,
+ "step": 2837
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9206779727809926,
+ "learning_rate": 8.99521714246651e-06,
+ "loss": 0.9039,
+ "step": 2838
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9466698208250063,
+ "learning_rate": 8.989016794759127e-06,
+ "loss": 0.947,
+ "step": 2839
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8747635898786159,
+ "learning_rate": 8.98281683970597e-06,
+ "loss": 0.9588,
+ "step": 2840
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9499809821242415,
+ "learning_rate": 8.97661727971503e-06,
+ "loss": 0.9875,
+ "step": 2841
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9813451295934587,
+ "learning_rate": 8.970418117194146e-06,
+ "loss": 0.9868,
+ "step": 2842
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8319625168492422,
+ "learning_rate": 8.964219354550999e-06,
+ "loss": 0.8635,
+ "step": 2843
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9634967464584698,
+ "learning_rate": 8.958020994193124e-06,
+ "loss": 0.9198,
+ "step": 2844
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8756053522366924,
+ "learning_rate": 8.951823038527887e-06,
+ "loss": 0.8431,
+ "step": 2845
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.813234190508048,
+ "learning_rate": 8.945625489962503e-06,
+ "loss": 0.9237,
+ "step": 2846
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8894422679044989,
+ "learning_rate": 8.93942835090403e-06,
+ "loss": 0.8921,
+ "step": 2847
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9217423883117492,
+ "learning_rate": 8.933231623759365e-06,
+ "loss": 0.9487,
+ "step": 2848
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9146171666403465,
+ "learning_rate": 8.927035310935241e-06,
+ "loss": 0.9024,
+ "step": 2849
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8417513080210457,
+ "learning_rate": 8.920839414838243e-06,
+ "loss": 0.9186,
+ "step": 2850
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 1.2267848524369835,
+ "learning_rate": 8.914643937874778e-06,
+ "loss": 0.9407,
+ "step": 2851
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8389995837398306,
+ "learning_rate": 8.908448882451104e-06,
+ "loss": 0.8918,
+ "step": 2852
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9433767153496767,
+ "learning_rate": 8.902254250973306e-06,
+ "loss": 0.9566,
+ "step": 2853
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9389439152504334,
+ "learning_rate": 8.896060045847305e-06,
+ "loss": 0.9269,
+ "step": 2854
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.844402795827549,
+ "learning_rate": 8.88986626947886e-06,
+ "loss": 0.8804,
+ "step": 2855
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 1.0600064630310688,
+ "learning_rate": 8.883672924273566e-06,
+ "loss": 0.9598,
+ "step": 2856
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.6784829967907909,
+ "learning_rate": 8.877480012636847e-06,
+ "loss": 0.7513,
+ "step": 2857
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 1.0334544574713502,
+ "learning_rate": 8.871287536973954e-06,
+ "loss": 0.8995,
+ "step": 2858
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8453952904110601,
+ "learning_rate": 8.865095499689978e-06,
+ "loss": 0.8863,
+ "step": 2859
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8539837508497696,
+ "learning_rate": 8.85890390318983e-06,
+ "loss": 0.9112,
+ "step": 2860
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8926906415651625,
+ "learning_rate": 8.852712749878255e-06,
+ "loss": 0.897,
+ "step": 2861
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9204825004812238,
+ "learning_rate": 8.846522042159833e-06,
+ "loss": 0.9986,
+ "step": 2862
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.744174296794978,
+ "learning_rate": 8.840331782438954e-06,
+ "loss": 0.7873,
+ "step": 2863
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 1.1747498673881946,
+ "learning_rate": 8.83414197311985e-06,
+ "loss": 0.8873,
+ "step": 2864
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9157770776285465,
+ "learning_rate": 8.82795261660657e-06,
+ "loss": 0.9341,
+ "step": 2865
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9074716814296028,
+ "learning_rate": 8.821763715302986e-06,
+ "loss": 0.883,
+ "step": 2866
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8922576125374475,
+ "learning_rate": 8.815575271612798e-06,
+ "loss": 0.9238,
+ "step": 2867
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9308080809460032,
+ "learning_rate": 8.809387287939528e-06,
+ "loss": 0.934,
+ "step": 2868
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9147273764859372,
+ "learning_rate": 8.803199766686517e-06,
+ "loss": 0.8992,
+ "step": 2869
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.7531981921776345,
+ "learning_rate": 8.797012710256923e-06,
+ "loss": 0.8735,
+ "step": 2870
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 1.0309196915368306,
+ "learning_rate": 8.790826121053732e-06,
+ "loss": 0.8937,
+ "step": 2871
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9070125605631589,
+ "learning_rate": 8.784640001479741e-06,
+ "loss": 0.9289,
+ "step": 2872
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8559782464324585,
+ "learning_rate": 8.77845435393757e-06,
+ "loss": 0.8791,
+ "step": 2873
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8178121579699371,
+ "learning_rate": 8.772269180829653e-06,
+ "loss": 0.8856,
+ "step": 2874
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9313980584987472,
+ "learning_rate": 8.766084484558237e-06,
+ "loss": 0.9493,
+ "step": 2875
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8333348349809776,
+ "learning_rate": 8.759900267525393e-06,
+ "loss": 0.8542,
+ "step": 2876
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9603039279388584,
+ "learning_rate": 8.753716532132992e-06,
+ "loss": 0.9992,
+ "step": 2877
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.950029970634691,
+ "learning_rate": 8.747533280782725e-06,
+ "loss": 0.8652,
+ "step": 2878
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9773336212744834,
+ "learning_rate": 8.741350515876103e-06,
+ "loss": 0.8776,
+ "step": 2879
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8276593223512142,
+ "learning_rate": 8.73516823981444e-06,
+ "loss": 0.9077,
+ "step": 2880
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9034896037267094,
+ "learning_rate": 8.728986454998858e-06,
+ "loss": 0.9058,
+ "step": 2881
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9392772165873345,
+ "learning_rate": 8.72280516383029e-06,
+ "loss": 0.9747,
+ "step": 2882
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.8815737209828033,
+ "learning_rate": 8.716624368709477e-06,
+ "loss": 0.875,
+ "step": 2883
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.9606247786553922,
+ "learning_rate": 8.71044407203697e-06,
+ "loss": 0.991,
+ "step": 2884
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9446559513437827,
+ "learning_rate": 8.70426427621313e-06,
+ "loss": 0.8921,
+ "step": 2885
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.8854582313261097,
+ "learning_rate": 8.698084983638111e-06,
+ "loss": 0.943,
+ "step": 2886
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9068908634924546,
+ "learning_rate": 8.691906196711884e-06,
+ "loss": 0.8308,
+ "step": 2887
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.8362728007947356,
+ "learning_rate": 8.685727917834218e-06,
+ "loss": 0.8482,
+ "step": 2888
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9741735340286811,
+ "learning_rate": 8.679550149404685e-06,
+ "loss": 0.9479,
+ "step": 2889
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.8881434627629128,
+ "learning_rate": 8.673372893822653e-06,
+ "loss": 0.9318,
+ "step": 2890
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.8344551673873453,
+ "learning_rate": 8.667196153487308e-06,
+ "loss": 0.9383,
+ "step": 2891
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.8569145363538877,
+ "learning_rate": 8.661019930797615e-06,
+ "loss": 0.9245,
+ "step": 2892
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9448224962204009,
+ "learning_rate": 8.654844228152355e-06,
+ "loss": 0.8776,
+ "step": 2893
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.787582574984948,
+ "learning_rate": 8.648669047950097e-06,
+ "loss": 0.9104,
+ "step": 2894
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 1.0189834574150762,
+ "learning_rate": 8.642494392589206e-06,
+ "loss": 0.935,
+ "step": 2895
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9670673184765097,
+ "learning_rate": 8.63632026446785e-06,
+ "loss": 0.9096,
+ "step": 2896
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9127132772167057,
+ "learning_rate": 8.630146665983993e-06,
+ "loss": 0.9214,
+ "step": 2897
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 1.01447795228686,
+ "learning_rate": 8.623973599535385e-06,
+ "loss": 0.9991,
+ "step": 2898
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.7884176888726323,
+ "learning_rate": 8.617801067519575e-06,
+ "loss": 0.8353,
+ "step": 2899
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9576881407108104,
+ "learning_rate": 8.611629072333905e-06,
+ "loss": 0.9054,
+ "step": 2900
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 1.0163616995408742,
+ "learning_rate": 8.605457616375503e-06,
+ "loss": 0.9164,
+ "step": 2901
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9594349638982683,
+ "learning_rate": 8.599286702041292e-06,
+ "loss": 0.934,
+ "step": 2902
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.8551254277566125,
+ "learning_rate": 8.593116331727987e-06,
+ "loss": 0.7898,
+ "step": 2903
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.8816328371922619,
+ "learning_rate": 8.586946507832088e-06,
+ "loss": 0.9228,
+ "step": 2904
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9348980190557928,
+ "learning_rate": 8.580777232749883e-06,
+ "loss": 0.8273,
+ "step": 2905
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9957423487884581,
+ "learning_rate": 8.574608508877448e-06,
+ "loss": 0.8281,
+ "step": 2906
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9969978056295261,
+ "learning_rate": 8.568440338610638e-06,
+ "loss": 0.976,
+ "step": 2907
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.876867638214715,
+ "learning_rate": 8.562272724345108e-06,
+ "loss": 0.9308,
+ "step": 2908
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9341130376137358,
+ "learning_rate": 8.556105668476287e-06,
+ "loss": 0.9055,
+ "step": 2909
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.8886772995301155,
+ "learning_rate": 8.549939173399385e-06,
+ "loss": 0.9242,
+ "step": 2910
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 1.145987576882531,
+ "learning_rate": 8.5437732415094e-06,
+ "loss": 0.9278,
+ "step": 2911
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 1.001135534362951,
+ "learning_rate": 8.537607875201106e-06,
+ "loss": 0.9237,
+ "step": 2912
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 1.00581843353021,
+ "learning_rate": 8.531443076869058e-06,
+ "loss": 0.8819,
+ "step": 2913
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.917057557817771,
+ "learning_rate": 8.525278848907603e-06,
+ "loss": 0.9391,
+ "step": 2914
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.8010704739390195,
+ "learning_rate": 8.51911519371085e-06,
+ "loss": 0.8336,
+ "step": 2915
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9543683443874903,
+ "learning_rate": 8.512952113672689e-06,
+ "loss": 0.9527,
+ "step": 2916
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9836839930871281,
+ "learning_rate": 8.506789611186794e-06,
+ "loss": 1.0222,
+ "step": 2917
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9246133369200561,
+ "learning_rate": 8.500627688646607e-06,
+ "loss": 0.8902,
+ "step": 2918
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9183078537742924,
+ "learning_rate": 8.494466348445345e-06,
+ "loss": 0.9479,
+ "step": 2919
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.8530193745001196,
+ "learning_rate": 8.48830559297601e-06,
+ "loss": 0.9091,
+ "step": 2920
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9629520731309305,
+ "learning_rate": 8.48214542463136e-06,
+ "loss": 0.9364,
+ "step": 2921
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9150014733977437,
+ "learning_rate": 8.475985845803938e-06,
+ "loss": 0.9356,
+ "step": 2922
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.8261063375868216,
+ "learning_rate": 8.469826858886054e-06,
+ "loss": 0.9337,
+ "step": 2923
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 1.1234486881091612,
+ "learning_rate": 8.463668466269785e-06,
+ "loss": 0.9349,
+ "step": 2924
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.928738141263137,
+ "learning_rate": 8.457510670346976e-06,
+ "loss": 0.9074,
+ "step": 2925
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9477202246359326,
+ "learning_rate": 8.451353473509254e-06,
+ "loss": 0.926,
+ "step": 2926
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.8883431515706409,
+ "learning_rate": 8.445196878147997e-06,
+ "loss": 0.8463,
+ "step": 2927
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 1.0376361772455147,
+ "learning_rate": 8.439040886654354e-06,
+ "loss": 0.9228,
+ "step": 2928
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9247536779865697,
+ "learning_rate": 8.432885501419248e-06,
+ "loss": 0.9573,
+ "step": 2929
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9317466580032667,
+ "learning_rate": 8.426730724833354e-06,
+ "loss": 0.906,
+ "step": 2930
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.82375627690957,
+ "learning_rate": 8.420576559287112e-06,
+ "loss": 0.8826,
+ "step": 2931
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9526502240708186,
+ "learning_rate": 8.414423007170742e-06,
+ "loss": 0.9399,
+ "step": 2932
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.8863593158901537,
+ "learning_rate": 8.408270070874201e-06,
+ "loss": 0.9036,
+ "step": 2933
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9542485239121139,
+ "learning_rate": 8.402117752787225e-06,
+ "loss": 0.8611,
+ "step": 2934
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.8041770339354015,
+ "learning_rate": 8.395966055299302e-06,
+ "loss": 0.9374,
+ "step": 2935
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.9375165105895691,
+ "learning_rate": 8.389814980799679e-06,
+ "loss": 0.9006,
+ "step": 2936
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8974970160694713,
+ "learning_rate": 8.38366453167736e-06,
+ "loss": 0.9241,
+ "step": 2937
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8633372528156041,
+ "learning_rate": 8.377514710321117e-06,
+ "loss": 0.8905,
+ "step": 2938
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 1.2666441679371878,
+ "learning_rate": 8.371365519119463e-06,
+ "loss": 0.8391,
+ "step": 2939
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.820787530828404,
+ "learning_rate": 8.365216960460675e-06,
+ "loss": 0.8962,
+ "step": 2940
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9517866276180234,
+ "learning_rate": 8.359069036732781e-06,
+ "loss": 0.8873,
+ "step": 2941
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.772823993517953,
+ "learning_rate": 8.352921750323562e-06,
+ "loss": 0.8076,
+ "step": 2942
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8592855535633733,
+ "learning_rate": 8.346775103620559e-06,
+ "loss": 0.8369,
+ "step": 2943
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.926525611699989,
+ "learning_rate": 8.340629099011057e-06,
+ "loss": 0.9258,
+ "step": 2944
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8708002316010192,
+ "learning_rate": 8.33448373888209e-06,
+ "loss": 0.91,
+ "step": 2945
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8906604500719437,
+ "learning_rate": 8.328339025620449e-06,
+ "loss": 0.9044,
+ "step": 2946
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8196381043263796,
+ "learning_rate": 8.322194961612668e-06,
+ "loss": 0.8912,
+ "step": 2947
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 1.0369291216248417,
+ "learning_rate": 8.316051549245026e-06,
+ "loss": 0.8968,
+ "step": 2948
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9201713446981273,
+ "learning_rate": 8.309908790903562e-06,
+ "loss": 0.8893,
+ "step": 2949
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 1.0316530698872768,
+ "learning_rate": 8.303766688974047e-06,
+ "loss": 0.8754,
+ "step": 2950
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9670015715635107,
+ "learning_rate": 8.297625245842006e-06,
+ "loss": 0.8962,
+ "step": 2951
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 1.127719573890693,
+ "learning_rate": 8.291484463892703e-06,
+ "loss": 0.9977,
+ "step": 2952
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8423958904618566,
+ "learning_rate": 8.285344345511147e-06,
+ "loss": 0.82,
+ "step": 2953
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9449759772993279,
+ "learning_rate": 8.279204893082083e-06,
+ "loss": 0.9218,
+ "step": 2954
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9476765061050989,
+ "learning_rate": 8.273066108990017e-06,
+ "loss": 0.9065,
+ "step": 2955
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8195802129234587,
+ "learning_rate": 8.266927995619175e-06,
+ "loss": 0.8263,
+ "step": 2956
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 1.0156084747070426,
+ "learning_rate": 8.260790555353526e-06,
+ "loss": 0.9908,
+ "step": 2957
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9602270068068365,
+ "learning_rate": 8.254653790576787e-06,
+ "loss": 0.9454,
+ "step": 2958
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8994601338336368,
+ "learning_rate": 8.248517703672405e-06,
+ "loss": 0.8763,
+ "step": 2959
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 1.0717303600310182,
+ "learning_rate": 8.242382297023558e-06,
+ "loss": 0.8798,
+ "step": 2960
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9225476897467404,
+ "learning_rate": 8.23624757301318e-06,
+ "loss": 0.9841,
+ "step": 2961
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8722961671024559,
+ "learning_rate": 8.230113534023917e-06,
+ "loss": 0.9688,
+ "step": 2962
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8820786767795896,
+ "learning_rate": 8.223980182438167e-06,
+ "loss": 0.9081,
+ "step": 2963
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.915948991827674,
+ "learning_rate": 8.217847520638049e-06,
+ "loss": 0.9125,
+ "step": 2964
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.7711659906220023,
+ "learning_rate": 8.211715551005414e-06,
+ "loss": 0.87,
+ "step": 2965
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.7208004616960314,
+ "learning_rate": 8.205584275921854e-06,
+ "loss": 0.8562,
+ "step": 2966
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8564172490588439,
+ "learning_rate": 8.199453697768686e-06,
+ "loss": 0.9142,
+ "step": 2967
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8332655873707439,
+ "learning_rate": 8.193323818926955e-06,
+ "loss": 0.8856,
+ "step": 2968
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9321441739062402,
+ "learning_rate": 8.187194641777432e-06,
+ "loss": 0.9501,
+ "step": 2969
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8628057416575993,
+ "learning_rate": 8.181066168700622e-06,
+ "loss": 0.9378,
+ "step": 2970
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9743743273340152,
+ "learning_rate": 8.174938402076754e-06,
+ "loss": 0.8136,
+ "step": 2971
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9026157758169897,
+ "learning_rate": 8.168811344285776e-06,
+ "loss": 0.8641,
+ "step": 2972
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8727927232715033,
+ "learning_rate": 8.162684997707374e-06,
+ "loss": 0.8211,
+ "step": 2973
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9911478830586278,
+ "learning_rate": 8.156559364720947e-06,
+ "loss": 1.0118,
+ "step": 2974
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.7843381099959617,
+ "learning_rate": 8.150434447705623e-06,
+ "loss": 0.8707,
+ "step": 2975
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8063504227265889,
+ "learning_rate": 8.144310249040246e-06,
+ "loss": 0.8908,
+ "step": 2976
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9773390991304581,
+ "learning_rate": 8.138186771103382e-06,
+ "loss": 0.8714,
+ "step": 2977
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.7600985299663205,
+ "learning_rate": 8.132064016273325e-06,
+ "loss": 0.8824,
+ "step": 2978
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9792340697758263,
+ "learning_rate": 8.12594198692808e-06,
+ "loss": 0.9535,
+ "step": 2979
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9187429072935889,
+ "learning_rate": 8.119820685445373e-06,
+ "loss": 0.9276,
+ "step": 2980
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.9969402451636438,
+ "learning_rate": 8.113700114202647e-06,
+ "loss": 0.9076,
+ "step": 2981
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 1.082801295447143,
+ "learning_rate": 8.107580275577059e-06,
+ "loss": 0.8977,
+ "step": 2982
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.7502599206190357,
+ "learning_rate": 8.101461171945483e-06,
+ "loss": 0.8441,
+ "step": 2983
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.850821292645756,
+ "learning_rate": 8.095342805684516e-06,
+ "loss": 0.9256,
+ "step": 2984
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8836354861206998,
+ "learning_rate": 8.089225179170454e-06,
+ "loss": 0.8751,
+ "step": 2985
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8600873590187288,
+ "learning_rate": 8.083108294779313e-06,
+ "loss": 0.845,
+ "step": 2986
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.8430653652994249,
+ "learning_rate": 8.076992154886826e-06,
+ "loss": 0.8971,
+ "step": 2987
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.844586736484448,
+ "learning_rate": 8.070876761868426e-06,
+ "loss": 0.886,
+ "step": 2988
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8914725901338992,
+ "learning_rate": 8.064762118099258e-06,
+ "loss": 0.8982,
+ "step": 2989
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9809066551606824,
+ "learning_rate": 8.058648225954188e-06,
+ "loss": 0.9422,
+ "step": 2990
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.926556390850543,
+ "learning_rate": 8.052535087807774e-06,
+ "loss": 0.9251,
+ "step": 2991
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9913252703493083,
+ "learning_rate": 8.046422706034294e-06,
+ "loss": 0.901,
+ "step": 2992
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.7895189537186987,
+ "learning_rate": 8.040311083007725e-06,
+ "loss": 0.8319,
+ "step": 2993
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9064662414334193,
+ "learning_rate": 8.034200221101746e-06,
+ "loss": 0.908,
+ "step": 2994
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9671581149740855,
+ "learning_rate": 8.028090122689747e-06,
+ "loss": 0.9788,
+ "step": 2995
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8589025217308112,
+ "learning_rate": 8.021980790144828e-06,
+ "loss": 0.9424,
+ "step": 2996
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8812776212789514,
+ "learning_rate": 8.015872225839776e-06,
+ "loss": 0.9343,
+ "step": 2997
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9210985637272886,
+ "learning_rate": 8.009764432147086e-06,
+ "loss": 0.8646,
+ "step": 2998
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8524624418946697,
+ "learning_rate": 8.003657411438961e-06,
+ "loss": 0.8807,
+ "step": 2999
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9322844937972518,
+ "learning_rate": 7.997551166087293e-06,
+ "loss": 0.9728,
+ "step": 3000
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8987802060488909,
+ "learning_rate": 7.991445698463672e-06,
+ "loss": 0.9034,
+ "step": 3001
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8033725920340877,
+ "learning_rate": 7.985341010939402e-06,
+ "loss": 0.8597,
+ "step": 3002
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9193113456445283,
+ "learning_rate": 7.979237105885467e-06,
+ "loss": 0.9123,
+ "step": 3003
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8034726161218648,
+ "learning_rate": 7.973133985672558e-06,
+ "loss": 0.8147,
+ "step": 3004
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8906403744671403,
+ "learning_rate": 7.967031652671051e-06,
+ "loss": 0.8896,
+ "step": 3005
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.835282740289833,
+ "learning_rate": 7.960930109251023e-06,
+ "loss": 0.8467,
+ "step": 3006
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.6972517600203672,
+ "learning_rate": 7.954829357782243e-06,
+ "loss": 0.8043,
+ "step": 3007
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8733693253852634,
+ "learning_rate": 7.948729400634178e-06,
+ "loss": 0.8672,
+ "step": 3008
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9411510298654441,
+ "learning_rate": 7.942630240175977e-06,
+ "loss": 0.8477,
+ "step": 3009
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9889369274243794,
+ "learning_rate": 7.936531878776484e-06,
+ "loss": 0.8682,
+ "step": 3010
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8348359655805895,
+ "learning_rate": 7.930434318804229e-06,
+ "loss": 0.9156,
+ "step": 3011
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.950131889080997,
+ "learning_rate": 7.924337562627435e-06,
+ "loss": 0.9302,
+ "step": 3012
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9669111851343594,
+ "learning_rate": 7.918241612614016e-06,
+ "loss": 0.9828,
+ "step": 3013
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.7858071556014813,
+ "learning_rate": 7.91214647113157e-06,
+ "loss": 0.8712,
+ "step": 3014
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.921558614766626,
+ "learning_rate": 7.906052140547373e-06,
+ "loss": 0.8532,
+ "step": 3015
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8645095232352292,
+ "learning_rate": 7.899958623228398e-06,
+ "loss": 0.8658,
+ "step": 3016
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9669464202780016,
+ "learning_rate": 7.893865921541294e-06,
+ "loss": 0.9128,
+ "step": 3017
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9523586546091055,
+ "learning_rate": 7.887774037852395e-06,
+ "loss": 0.8839,
+ "step": 3018
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9974613025553319,
+ "learning_rate": 7.881682974527723e-06,
+ "loss": 0.9103,
+ "step": 3019
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8878953765185238,
+ "learning_rate": 7.875592733932972e-06,
+ "loss": 0.8983,
+ "step": 3020
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.7812720230097898,
+ "learning_rate": 7.869503318433529e-06,
+ "loss": 0.8616,
+ "step": 3021
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9254796511666435,
+ "learning_rate": 7.863414730394444e-06,
+ "loss": 0.9434,
+ "step": 3022
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.833929544183009,
+ "learning_rate": 7.857326972180455e-06,
+ "loss": 0.886,
+ "step": 3023
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9328700838673941,
+ "learning_rate": 7.85124004615598e-06,
+ "loss": 0.9408,
+ "step": 3024
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9465670044584954,
+ "learning_rate": 7.845153954685114e-06,
+ "loss": 0.9217,
+ "step": 3025
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8522265655994306,
+ "learning_rate": 7.839068700131623e-06,
+ "loss": 0.932,
+ "step": 3026
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8937138277620927,
+ "learning_rate": 7.832984284858946e-06,
+ "loss": 0.887,
+ "step": 3027
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9977556779432588,
+ "learning_rate": 7.826900711230204e-06,
+ "loss": 0.9264,
+ "step": 3028
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8606018880527176,
+ "learning_rate": 7.820817981608185e-06,
+ "loss": 0.9251,
+ "step": 3029
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8530278334154929,
+ "learning_rate": 7.814736098355348e-06,
+ "loss": 0.8695,
+ "step": 3030
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8567011058642134,
+ "learning_rate": 7.808655063833832e-06,
+ "loss": 0.9329,
+ "step": 3031
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9280510205178197,
+ "learning_rate": 7.802574880405438e-06,
+ "loss": 0.9373,
+ "step": 3032
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9848501247592024,
+ "learning_rate": 7.79649555043164e-06,
+ "loss": 0.9181,
+ "step": 3033
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 1.0297492133153854,
+ "learning_rate": 7.790417076273581e-06,
+ "loss": 0.8964,
+ "step": 3034
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8697680836383845,
+ "learning_rate": 7.784339460292065e-06,
+ "loss": 0.8573,
+ "step": 3035
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.842781609719924,
+ "learning_rate": 7.77826270484757e-06,
+ "loss": 0.9054,
+ "step": 3036
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.740688518386285,
+ "learning_rate": 7.772186812300244e-06,
+ "loss": 0.7684,
+ "step": 3037
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.8696552210515258,
+ "learning_rate": 7.766111785009888e-06,
+ "loss": 0.9298,
+ "step": 3038
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9294178462954537,
+ "learning_rate": 7.760037625335973e-06,
+ "loss": 0.8719,
+ "step": 3039
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.9869254713552199,
+ "learning_rate": 7.753964335637634e-06,
+ "loss": 0.9393,
+ "step": 3040
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8975835600692645,
+ "learning_rate": 7.747891918273668e-06,
+ "loss": 0.9443,
+ "step": 3041
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8518753300133183,
+ "learning_rate": 7.741820375602524e-06,
+ "loss": 0.8875,
+ "step": 3042
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8208267461969093,
+ "learning_rate": 7.735749709982329e-06,
+ "loss": 0.8864,
+ "step": 3043
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 1.0689326764845735,
+ "learning_rate": 7.729679923770855e-06,
+ "loss": 0.8713,
+ "step": 3044
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.895283749784096,
+ "learning_rate": 7.723611019325538e-06,
+ "loss": 0.8723,
+ "step": 3045
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9888518115529736,
+ "learning_rate": 7.71754299900347e-06,
+ "loss": 0.9657,
+ "step": 3046
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9619888771560674,
+ "learning_rate": 7.7114758651614e-06,
+ "loss": 0.8995,
+ "step": 3047
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.867741561085671,
+ "learning_rate": 7.705409620155733e-06,
+ "loss": 0.9001,
+ "step": 3048
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 1.0893561569683614,
+ "learning_rate": 7.699344266342529e-06,
+ "loss": 1.0243,
+ "step": 3049
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 1.0069293978702367,
+ "learning_rate": 7.693279806077504e-06,
+ "loss": 0.9075,
+ "step": 3050
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9863133056396031,
+ "learning_rate": 7.68721624171602e-06,
+ "loss": 0.8428,
+ "step": 3051
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.7953276992499636,
+ "learning_rate": 7.681153575613098e-06,
+ "loss": 0.8109,
+ "step": 3052
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8845937432453769,
+ "learning_rate": 7.675091810123404e-06,
+ "loss": 0.9383,
+ "step": 3053
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.938155446305834,
+ "learning_rate": 7.669030947601265e-06,
+ "loss": 0.9052,
+ "step": 3054
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8182078771237827,
+ "learning_rate": 7.662970990400647e-06,
+ "loss": 0.8712,
+ "step": 3055
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8933854877025341,
+ "learning_rate": 7.656911940875163e-06,
+ "loss": 0.9474,
+ "step": 3056
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9449257667933663,
+ "learning_rate": 7.650853801378084e-06,
+ "loss": 0.8568,
+ "step": 3057
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 1.0495579740142698,
+ "learning_rate": 7.644796574262322e-06,
+ "loss": 0.8806,
+ "step": 3058
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8508073914566813,
+ "learning_rate": 7.638740261880423e-06,
+ "loss": 0.8947,
+ "step": 3059
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9335694394677336,
+ "learning_rate": 7.632684866584606e-06,
+ "loss": 0.8983,
+ "step": 3060
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9733709525549523,
+ "learning_rate": 7.626630390726704e-06,
+ "loss": 0.9256,
+ "step": 3061
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.863455601871164,
+ "learning_rate": 7.620576836658212e-06,
+ "loss": 0.9206,
+ "step": 3062
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9586497646142664,
+ "learning_rate": 7.61452420673026e-06,
+ "loss": 0.8627,
+ "step": 3063
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8301599479065566,
+ "learning_rate": 7.608472503293615e-06,
+ "loss": 0.8164,
+ "step": 3064
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8892417177178197,
+ "learning_rate": 7.60242172869869e-06,
+ "loss": 0.9191,
+ "step": 3065
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8788846823907092,
+ "learning_rate": 7.596371885295542e-06,
+ "loss": 0.9064,
+ "step": 3066
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8370510726959535,
+ "learning_rate": 7.590322975433857e-06,
+ "loss": 0.8804,
+ "step": 3067
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9979788275045329,
+ "learning_rate": 7.584275001462961e-06,
+ "loss": 1.0111,
+ "step": 3068
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 1.0235967334786231,
+ "learning_rate": 7.578227965731819e-06,
+ "loss": 0.8809,
+ "step": 3069
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.85254860792901,
+ "learning_rate": 7.572181870589028e-06,
+ "loss": 0.9018,
+ "step": 3070
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9517331058693642,
+ "learning_rate": 7.566136718382821e-06,
+ "loss": 0.8162,
+ "step": 3071
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9077953953883825,
+ "learning_rate": 7.560092511461069e-06,
+ "loss": 0.9436,
+ "step": 3072
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8866841954375853,
+ "learning_rate": 7.55404925217127e-06,
+ "loss": 0.9202,
+ "step": 3073
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.6935749539849467,
+ "learning_rate": 7.548006942860557e-06,
+ "loss": 0.8192,
+ "step": 3074
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 1.0514487012201703,
+ "learning_rate": 7.541965585875695e-06,
+ "loss": 0.9739,
+ "step": 3075
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 1.0378129702046215,
+ "learning_rate": 7.535925183563073e-06,
+ "loss": 0.8681,
+ "step": 3076
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9129414279586064,
+ "learning_rate": 7.529885738268714e-06,
+ "loss": 0.9303,
+ "step": 3077
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 1.0734826745723056,
+ "learning_rate": 7.523847252338274e-06,
+ "loss": 0.8529,
+ "step": 3078
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9364323229864077,
+ "learning_rate": 7.51780972811703e-06,
+ "loss": 0.9117,
+ "step": 3079
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9019671873323886,
+ "learning_rate": 7.511773167949885e-06,
+ "loss": 0.8917,
+ "step": 3080
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 1.058335285683823,
+ "learning_rate": 7.5057375741813685e-06,
+ "loss": 0.964,
+ "step": 3081
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8754304852426065,
+ "learning_rate": 7.499702949155634e-06,
+ "loss": 0.8679,
+ "step": 3082
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8626888810903242,
+ "learning_rate": 7.493669295216467e-06,
+ "loss": 0.8742,
+ "step": 3083
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9446940901693937,
+ "learning_rate": 7.487636614707265e-06,
+ "loss": 0.9437,
+ "step": 3084
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.913647491487289,
+ "learning_rate": 7.48160490997105e-06,
+ "loss": 0.9122,
+ "step": 3085
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9817417389844753,
+ "learning_rate": 7.475574183350471e-06,
+ "loss": 0.9347,
+ "step": 3086
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9144861253958118,
+ "learning_rate": 7.46954443718779e-06,
+ "loss": 0.9046,
+ "step": 3087
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9404858654589225,
+ "learning_rate": 7.463515673824888e-06,
+ "loss": 0.938,
+ "step": 3088
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 1.1091536549587875,
+ "learning_rate": 7.457487895603273e-06,
+ "loss": 0.8852,
+ "step": 3089
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.9086534849901398,
+ "learning_rate": 7.451461104864061e-06,
+ "loss": 0.9179,
+ "step": 3090
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.8755976989663699,
+ "learning_rate": 7.44543530394799e-06,
+ "loss": 0.8824,
+ "step": 3091
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 1.090908089694531,
+ "learning_rate": 7.439410495195411e-06,
+ "loss": 0.9011,
+ "step": 3092
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8955078572197323,
+ "learning_rate": 7.433386680946288e-06,
+ "loss": 0.9086,
+ "step": 3093
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8913704610752187,
+ "learning_rate": 7.427363863540202e-06,
+ "loss": 0.8652,
+ "step": 3094
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8465243680237361,
+ "learning_rate": 7.421342045316351e-06,
+ "loss": 0.9402,
+ "step": 3095
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 1.0618151973993595,
+ "learning_rate": 7.415321228613534e-06,
+ "loss": 0.9194,
+ "step": 3096
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8799346592072748,
+ "learning_rate": 7.409301415770168e-06,
+ "loss": 0.7974,
+ "step": 3097
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.87647071998454,
+ "learning_rate": 7.403282609124281e-06,
+ "loss": 0.8938,
+ "step": 3098
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.9524293226024881,
+ "learning_rate": 7.397264811013507e-06,
+ "loss": 0.9343,
+ "step": 3099
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.9717560944156921,
+ "learning_rate": 7.391248023775084e-06,
+ "loss": 0.8794,
+ "step": 3100
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 1.0427302635252145,
+ "learning_rate": 7.385232249745873e-06,
+ "loss": 0.9443,
+ "step": 3101
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.820156355881189,
+ "learning_rate": 7.379217491262325e-06,
+ "loss": 0.8806,
+ "step": 3102
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.9267111425838447,
+ "learning_rate": 7.373203750660505e-06,
+ "loss": 0.9299,
+ "step": 3103
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.9198919699398267,
+ "learning_rate": 7.36719103027608e-06,
+ "loss": 0.9293,
+ "step": 3104
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.7851749355372527,
+ "learning_rate": 7.361179332444318e-06,
+ "loss": 0.8619,
+ "step": 3105
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 1.198117464191844,
+ "learning_rate": 7.355168659500094e-06,
+ "loss": 0.958,
+ "step": 3106
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8481077581272879,
+ "learning_rate": 7.3491590137778915e-06,
+ "loss": 0.8884,
+ "step": 3107
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.9082379189614874,
+ "learning_rate": 7.343150397611782e-06,
+ "loss": 0.8709,
+ "step": 3108
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.9828006656871224,
+ "learning_rate": 7.3371428133354435e-06,
+ "loss": 0.8861,
+ "step": 3109
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8277180308231091,
+ "learning_rate": 7.33113626328215e-06,
+ "loss": 0.8016,
+ "step": 3110
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.9207310049046246,
+ "learning_rate": 7.325130749784781e-06,
+ "loss": 0.9476,
+ "step": 3111
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 1.038531133366016,
+ "learning_rate": 7.3191262751758005e-06,
+ "loss": 0.8901,
+ "step": 3112
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.9745778005106581,
+ "learning_rate": 7.3131228417872905e-06,
+ "loss": 0.8654,
+ "step": 3113
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8525421710847984,
+ "learning_rate": 7.307120451950902e-06,
+ "loss": 0.8697,
+ "step": 3114
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8969369945071651,
+ "learning_rate": 7.301119107997905e-06,
+ "loss": 0.9302,
+ "step": 3115
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8704404473958959,
+ "learning_rate": 7.295118812259145e-06,
+ "loss": 0.8736,
+ "step": 3116
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8737931679431764,
+ "learning_rate": 7.289119567065068e-06,
+ "loss": 0.9358,
+ "step": 3117
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 1.1063117572079268,
+ "learning_rate": 7.2831213747457155e-06,
+ "loss": 0.9179,
+ "step": 3118
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8319200429675896,
+ "learning_rate": 7.2771242376307125e-06,
+ "loss": 0.8865,
+ "step": 3119
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.9099261295844384,
+ "learning_rate": 7.271128158049283e-06,
+ "loss": 0.8986,
+ "step": 3120
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.9763697004734214,
+ "learning_rate": 7.2651331383302326e-06,
+ "loss": 0.9185,
+ "step": 3121
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8296121035233821,
+ "learning_rate": 7.2591391808019555e-06,
+ "loss": 0.8959,
+ "step": 3122
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.7389510031745331,
+ "learning_rate": 7.253146287792434e-06,
+ "loss": 0.8506,
+ "step": 3123
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 1.0449629118250001,
+ "learning_rate": 7.247154461629248e-06,
+ "loss": 0.8943,
+ "step": 3124
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8519819117041794,
+ "learning_rate": 7.241163704639547e-06,
+ "loss": 0.8991,
+ "step": 3125
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.821426699566402,
+ "learning_rate": 7.235174019150071e-06,
+ "loss": 0.8482,
+ "step": 3126
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8530305036432776,
+ "learning_rate": 7.229185407487149e-06,
+ "loss": 0.8998,
+ "step": 3127
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8929154538055173,
+ "learning_rate": 7.2231978719766884e-06,
+ "loss": 0.8968,
+ "step": 3128
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.9565356056159762,
+ "learning_rate": 7.217211414944171e-06,
+ "loss": 0.9264,
+ "step": 3129
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8534274162064641,
+ "learning_rate": 7.2112260387146784e-06,
+ "loss": 0.8953,
+ "step": 3130
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8153820143829157,
+ "learning_rate": 7.2052417456128565e-06,
+ "loss": 0.8829,
+ "step": 3131
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8380144354016602,
+ "learning_rate": 7.199258537962936e-06,
+ "loss": 0.8948,
+ "step": 3132
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.798316857097433,
+ "learning_rate": 7.193276418088729e-06,
+ "loss": 0.8475,
+ "step": 3133
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8360752817717011,
+ "learning_rate": 7.187295388313618e-06,
+ "loss": 0.8671,
+ "step": 3134
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.9924444575496794,
+ "learning_rate": 7.181315450960562e-06,
+ "loss": 0.9016,
+ "step": 3135
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.9045718384715163,
+ "learning_rate": 7.175336608352113e-06,
+ "loss": 0.931,
+ "step": 3136
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.783770071965731,
+ "learning_rate": 7.169358862810374e-06,
+ "loss": 0.899,
+ "step": 3137
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.7438253485129284,
+ "learning_rate": 7.163382216657033e-06,
+ "loss": 0.7635,
+ "step": 3138
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.9292054173618102,
+ "learning_rate": 7.1574066722133565e-06,
+ "loss": 0.9126,
+ "step": 3139
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8290249820889951,
+ "learning_rate": 7.151432231800173e-06,
+ "loss": 0.8473,
+ "step": 3140
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8724940677184712,
+ "learning_rate": 7.145458897737882e-06,
+ "loss": 0.8825,
+ "step": 3141
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 1.319161629121295,
+ "learning_rate": 7.139486672346466e-06,
+ "loss": 0.9209,
+ "step": 3142
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8820725742097981,
+ "learning_rate": 7.133515557945463e-06,
+ "loss": 0.9676,
+ "step": 3143
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.8919944894918395,
+ "learning_rate": 7.12754555685399e-06,
+ "loss": 0.953,
+ "step": 3144
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.915830556250263,
+ "learning_rate": 7.121576671390722e-06,
+ "loss": 0.8791,
+ "step": 3145
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.7867023519579889,
+ "learning_rate": 7.115608903873905e-06,
+ "loss": 0.8592,
+ "step": 3146
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.7557530543214772,
+ "learning_rate": 7.109642256621353e-06,
+ "loss": 0.8424,
+ "step": 3147
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9538309972611474,
+ "learning_rate": 7.103676731950443e-06,
+ "loss": 0.9423,
+ "step": 3148
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9305136950405283,
+ "learning_rate": 7.0977123321781176e-06,
+ "loss": 0.9213,
+ "step": 3149
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.898409886169043,
+ "learning_rate": 7.091749059620881e-06,
+ "loss": 0.9482,
+ "step": 3150
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9266014619179291,
+ "learning_rate": 7.0857869165947945e-06,
+ "loss": 0.8275,
+ "step": 3151
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.7812282045125521,
+ "learning_rate": 7.079825905415491e-06,
+ "loss": 0.878,
+ "step": 3152
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.867731346965853,
+ "learning_rate": 7.073866028398153e-06,
+ "loss": 0.9008,
+ "step": 3153
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 1.056823648953414,
+ "learning_rate": 7.067907287857535e-06,
+ "loss": 0.86,
+ "step": 3154
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8386302063426734,
+ "learning_rate": 7.061949686107938e-06,
+ "loss": 0.8657,
+ "step": 3155
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8346374978363387,
+ "learning_rate": 7.0559932254632315e-06,
+ "loss": 0.913,
+ "step": 3156
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.942382013735655,
+ "learning_rate": 7.0500379082368305e-06,
+ "loss": 0.8709,
+ "step": 3157
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 1.095435759704348,
+ "learning_rate": 7.044083736741711e-06,
+ "loss": 0.8939,
+ "step": 3158
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8697809001392831,
+ "learning_rate": 7.03813071329041e-06,
+ "loss": 0.9055,
+ "step": 3159
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9794621291830303,
+ "learning_rate": 7.032178840195009e-06,
+ "loss": 0.8622,
+ "step": 3160
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.930007138571353,
+ "learning_rate": 7.026228119767149e-06,
+ "loss": 0.9294,
+ "step": 3161
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8598086834220618,
+ "learning_rate": 7.020278554318023e-06,
+ "loss": 0.8498,
+ "step": 3162
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 1.226756798789789,
+ "learning_rate": 7.014330146158367e-06,
+ "loss": 0.9039,
+ "step": 3163
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.989943171604295,
+ "learning_rate": 7.008382897598477e-06,
+ "loss": 0.9167,
+ "step": 3164
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9308190832090322,
+ "learning_rate": 7.002436810948201e-06,
+ "loss": 0.8719,
+ "step": 3165
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9420845340418191,
+ "learning_rate": 6.996491888516927e-06,
+ "loss": 0.9497,
+ "step": 3166
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9185284293256382,
+ "learning_rate": 6.990548132613592e-06,
+ "loss": 0.9822,
+ "step": 3167
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.7829390437932643,
+ "learning_rate": 6.984605545546686e-06,
+ "loss": 0.9004,
+ "step": 3168
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9761632074396405,
+ "learning_rate": 6.978664129624241e-06,
+ "loss": 0.9686,
+ "step": 3169
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9062920352008544,
+ "learning_rate": 6.972723887153828e-06,
+ "loss": 0.8849,
+ "step": 3170
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8616495007297676,
+ "learning_rate": 6.9667848204425785e-06,
+ "loss": 0.8719,
+ "step": 3171
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8964126320308036,
+ "learning_rate": 6.960846931797152e-06,
+ "loss": 0.8857,
+ "step": 3172
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9264946122652956,
+ "learning_rate": 6.9549102235237565e-06,
+ "loss": 0.9398,
+ "step": 3173
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9094363996417679,
+ "learning_rate": 6.948974697928144e-06,
+ "loss": 0.8851,
+ "step": 3174
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9513388081881399,
+ "learning_rate": 6.943040357315598e-06,
+ "loss": 0.9803,
+ "step": 3175
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9433967810474849,
+ "learning_rate": 6.9371072039909515e-06,
+ "loss": 0.8724,
+ "step": 3176
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8608695133016676,
+ "learning_rate": 6.931175240258576e-06,
+ "loss": 0.9292,
+ "step": 3177
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9530960651699819,
+ "learning_rate": 6.9252444684223765e-06,
+ "loss": 0.8737,
+ "step": 3178
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 1.0395046231050402,
+ "learning_rate": 6.919314890785793e-06,
+ "loss": 0.9723,
+ "step": 3179
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8458885636267541,
+ "learning_rate": 6.913386509651807e-06,
+ "loss": 0.9264,
+ "step": 3180
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8955067088058976,
+ "learning_rate": 6.907459327322934e-06,
+ "loss": 0.9081,
+ "step": 3181
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.7963619009041655,
+ "learning_rate": 6.90153334610122e-06,
+ "loss": 0.7736,
+ "step": 3182
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8721799260952391,
+ "learning_rate": 6.895608568288255e-06,
+ "loss": 0.8856,
+ "step": 3183
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 1.152388337452677,
+ "learning_rate": 6.889684996185148e-06,
+ "loss": 0.9011,
+ "step": 3184
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.875052852556438,
+ "learning_rate": 6.88376263209255e-06,
+ "loss": 0.853,
+ "step": 3185
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8642041460444178,
+ "learning_rate": 6.877841478310639e-06,
+ "loss": 0.9209,
+ "step": 3186
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9067189008349694,
+ "learning_rate": 6.871921537139117e-06,
+ "loss": 0.8886,
+ "step": 3187
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8949740080122288,
+ "learning_rate": 6.866002810877224e-06,
+ "loss": 0.8575,
+ "step": 3188
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8072031212579807,
+ "learning_rate": 6.860085301823729e-06,
+ "loss": 0.9466,
+ "step": 3189
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8708895794962733,
+ "learning_rate": 6.854169012276923e-06,
+ "loss": 0.8599,
+ "step": 3190
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9153443616477932,
+ "learning_rate": 6.848253944534622e-06,
+ "loss": 0.9016,
+ "step": 3191
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.9449029470758108,
+ "learning_rate": 6.84234010089417e-06,
+ "loss": 0.7901,
+ "step": 3192
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 1.053527491468743,
+ "learning_rate": 6.836427483652436e-06,
+ "loss": 0.9721,
+ "step": 3193
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8664564982897314,
+ "learning_rate": 6.830516095105817e-06,
+ "loss": 0.9024,
+ "step": 3194
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.8810373719043834,
+ "learning_rate": 6.824605937550224e-06,
+ "loss": 0.9008,
+ "step": 3195
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.7886223100754801,
+ "learning_rate": 6.818697013281093e-06,
+ "loss": 0.846,
+ "step": 3196
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.812628989973114,
+ "learning_rate": 6.8127893245933864e-06,
+ "loss": 0.8481,
+ "step": 3197
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.866900863830601,
+ "learning_rate": 6.806882873781579e-06,
+ "loss": 0.8875,
+ "step": 3198
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8943349204273193,
+ "learning_rate": 6.800977663139666e-06,
+ "loss": 0.952,
+ "step": 3199
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8564433978090882,
+ "learning_rate": 6.795073694961171e-06,
+ "loss": 0.8304,
+ "step": 3200
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8315503097138466,
+ "learning_rate": 6.789170971539119e-06,
+ "loss": 0.8363,
+ "step": 3201
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8096126777115353,
+ "learning_rate": 6.783269495166066e-06,
+ "loss": 0.8559,
+ "step": 3202
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9747919050753229,
+ "learning_rate": 6.777369268134076e-06,
+ "loss": 0.9068,
+ "step": 3203
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8585355916807423,
+ "learning_rate": 6.771470292734723e-06,
+ "loss": 0.8832,
+ "step": 3204
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8890707984729356,
+ "learning_rate": 6.7655725712591055e-06,
+ "loss": 0.8589,
+ "step": 3205
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 1.0661202486493144,
+ "learning_rate": 6.759676105997834e-06,
+ "loss": 0.9119,
+ "step": 3206
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9405660104630943,
+ "learning_rate": 6.753780899241027e-06,
+ "loss": 0.9044,
+ "step": 3207
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8367646811805161,
+ "learning_rate": 6.747886953278311e-06,
+ "loss": 0.9263,
+ "step": 3208
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9893393958324957,
+ "learning_rate": 6.741994270398826e-06,
+ "loss": 0.9209,
+ "step": 3209
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9350973298039797,
+ "learning_rate": 6.736102852891227e-06,
+ "loss": 0.8402,
+ "step": 3210
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.866409124933096,
+ "learning_rate": 6.730212703043666e-06,
+ "loss": 0.9116,
+ "step": 3211
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9178950934585832,
+ "learning_rate": 6.7243238231438176e-06,
+ "loss": 0.8705,
+ "step": 3212
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8149867710860199,
+ "learning_rate": 6.718436215478849e-06,
+ "loss": 0.8652,
+ "step": 3213
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 1.0009053893488602,
+ "learning_rate": 6.712549882335442e-06,
+ "loss": 0.8752,
+ "step": 3214
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9409873711131781,
+ "learning_rate": 6.70666482599978e-06,
+ "loss": 0.9029,
+ "step": 3215
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9812311940038729,
+ "learning_rate": 6.7007810487575475e-06,
+ "loss": 0.8897,
+ "step": 3216
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8806357632510843,
+ "learning_rate": 6.694898552893941e-06,
+ "loss": 0.9084,
+ "step": 3217
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8468073305406082,
+ "learning_rate": 6.6890173406936485e-06,
+ "loss": 0.7731,
+ "step": 3218
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 1.1001199693783135,
+ "learning_rate": 6.683137414440872e-06,
+ "loss": 0.96,
+ "step": 3219
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8498287756216577,
+ "learning_rate": 6.677258776419304e-06,
+ "loss": 0.845,
+ "step": 3220
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8062988634178215,
+ "learning_rate": 6.671381428912138e-06,
+ "loss": 0.9022,
+ "step": 3221
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9057542872573875,
+ "learning_rate": 6.66550537420207e-06,
+ "loss": 0.9051,
+ "step": 3222
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8990148419123214,
+ "learning_rate": 6.659630614571287e-06,
+ "loss": 0.8986,
+ "step": 3223
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8680757495365716,
+ "learning_rate": 6.653757152301488e-06,
+ "loss": 0.906,
+ "step": 3224
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.7594808796323015,
+ "learning_rate": 6.647884989673849e-06,
+ "loss": 0.8297,
+ "step": 3225
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8225326600006523,
+ "learning_rate": 6.642014128969055e-06,
+ "loss": 0.8706,
+ "step": 3226
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8995796233995702,
+ "learning_rate": 6.63614457246728e-06,
+ "loss": 0.9397,
+ "step": 3227
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9165732231002232,
+ "learning_rate": 6.630276322448188e-06,
+ "loss": 0.8998,
+ "step": 3228
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.908972191513797,
+ "learning_rate": 6.624409381190946e-06,
+ "loss": 0.9211,
+ "step": 3229
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 1.001488427731204,
+ "learning_rate": 6.618543750974202e-06,
+ "loss": 0.8943,
+ "step": 3230
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9928219338627889,
+ "learning_rate": 6.6126794340761025e-06,
+ "loss": 0.8631,
+ "step": 3231
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8930406631901596,
+ "learning_rate": 6.606816432774279e-06,
+ "loss": 0.9568,
+ "step": 3232
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 1.113876608551708,
+ "learning_rate": 6.600954749345851e-06,
+ "loss": 0.9144,
+ "step": 3233
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.895346908663339,
+ "learning_rate": 6.595094386067428e-06,
+ "loss": 0.9374,
+ "step": 3234
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8311582516467093,
+ "learning_rate": 6.589235345215117e-06,
+ "loss": 0.8193,
+ "step": 3235
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9798043650661699,
+ "learning_rate": 6.583377629064494e-06,
+ "loss": 0.9819,
+ "step": 3236
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8707960089295761,
+ "learning_rate": 6.5775212398906295e-06,
+ "loss": 0.907,
+ "step": 3237
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 1.100050827008296,
+ "learning_rate": 6.571666179968079e-06,
+ "loss": 0.9208,
+ "step": 3238
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9073346495015958,
+ "learning_rate": 6.565812451570881e-06,
+ "loss": 0.9239,
+ "step": 3239
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9669736853429974,
+ "learning_rate": 6.5599600569725495e-06,
+ "loss": 0.9053,
+ "step": 3240
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9335190633902349,
+ "learning_rate": 6.554108998446096e-06,
+ "loss": 0.9217,
+ "step": 3241
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8321186344952952,
+ "learning_rate": 6.548259278263999e-06,
+ "loss": 0.8223,
+ "step": 3242
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8754883224712815,
+ "learning_rate": 6.542410898698226e-06,
+ "loss": 0.9157,
+ "step": 3243
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8349280795664766,
+ "learning_rate": 6.536563862020218e-06,
+ "loss": 0.8593,
+ "step": 3244
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 1.0830970781978273,
+ "learning_rate": 6.530718170500896e-06,
+ "loss": 0.9515,
+ "step": 3245
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.9038972483968136,
+ "learning_rate": 6.524873826410658e-06,
+ "loss": 0.8754,
+ "step": 3246
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8756406283704826,
+ "learning_rate": 6.519030832019383e-06,
+ "loss": 0.9035,
+ "step": 3247
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.8863111467130707,
+ "learning_rate": 6.513189189596422e-06,
+ "loss": 0.8736,
+ "step": 3248
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.8878081775063752,
+ "learning_rate": 6.507348901410604e-06,
+ "loss": 0.8879,
+ "step": 3249
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9322607572611155,
+ "learning_rate": 6.501509969730224e-06,
+ "loss": 0.9829,
+ "step": 3250
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9229288939212942,
+ "learning_rate": 6.495672396823061e-06,
+ "loss": 0.8361,
+ "step": 3251
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.7614400839920861,
+ "learning_rate": 6.489836184956353e-06,
+ "loss": 0.8946,
+ "step": 3252
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.962302176364048,
+ "learning_rate": 6.484001336396828e-06,
+ "loss": 0.8738,
+ "step": 3253
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9255266287091252,
+ "learning_rate": 6.478167853410668e-06,
+ "loss": 0.8776,
+ "step": 3254
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9594207264288369,
+ "learning_rate": 6.472335738263534e-06,
+ "loss": 0.8925,
+ "step": 3255
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.8826277693067501,
+ "learning_rate": 6.466504993220548e-06,
+ "loss": 0.8854,
+ "step": 3256
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 1.0043926962250158,
+ "learning_rate": 6.460675620546305e-06,
+ "loss": 0.9604,
+ "step": 3257
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.8808295764027383,
+ "learning_rate": 6.454847622504867e-06,
+ "loss": 0.8862,
+ "step": 3258
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.8615785343420977,
+ "learning_rate": 6.4490210013597635e-06,
+ "loss": 0.8996,
+ "step": 3259
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.8923283004057868,
+ "learning_rate": 6.4431957593739845e-06,
+ "loss": 0.8885,
+ "step": 3260
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9501948584509429,
+ "learning_rate": 6.4373718988099896e-06,
+ "loss": 0.8947,
+ "step": 3261
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 1.0034537547985931,
+ "learning_rate": 6.431549421929694e-06,
+ "loss": 0.9398,
+ "step": 3262
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.7991235447346864,
+ "learning_rate": 6.4257283309944804e-06,
+ "loss": 0.7453,
+ "step": 3263
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 1.0237356481445665,
+ "learning_rate": 6.419908628265203e-06,
+ "loss": 0.8621,
+ "step": 3264
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9270513668056444,
+ "learning_rate": 6.414090316002161e-06,
+ "loss": 0.9018,
+ "step": 3265
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.8479982940713884,
+ "learning_rate": 6.4082733964651166e-06,
+ "loss": 0.7733,
+ "step": 3266
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9594350472570427,
+ "learning_rate": 6.4024578719133e-06,
+ "loss": 0.9283,
+ "step": 3267
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.7872219892547018,
+ "learning_rate": 6.396643744605391e-06,
+ "loss": 0.8897,
+ "step": 3268
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.981772336992416,
+ "learning_rate": 6.390831016799527e-06,
+ "loss": 0.903,
+ "step": 3269
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9760894652545553,
+ "learning_rate": 6.385019690753311e-06,
+ "loss": 0.9394,
+ "step": 3270
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9163084362814138,
+ "learning_rate": 6.379209768723791e-06,
+ "loss": 0.9802,
+ "step": 3271
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9129707109450066,
+ "learning_rate": 6.373401252967475e-06,
+ "loss": 0.8756,
+ "step": 3272
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9346634296238862,
+ "learning_rate": 6.367594145740324e-06,
+ "loss": 0.8876,
+ "step": 3273
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.8573209127722409,
+ "learning_rate": 6.361788449297748e-06,
+ "loss": 0.9411,
+ "step": 3274
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 1.1461716331777836,
+ "learning_rate": 6.355984165894613e-06,
+ "loss": 0.9323,
+ "step": 3275
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 1.124717892477342,
+ "learning_rate": 6.350181297785242e-06,
+ "loss": 0.9554,
+ "step": 3276
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.8731406719377184,
+ "learning_rate": 6.344379847223398e-06,
+ "loss": 0.9253,
+ "step": 3277
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.8681069935684773,
+ "learning_rate": 6.338579816462298e-06,
+ "loss": 0.86,
+ "step": 3278
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9529648625777252,
+ "learning_rate": 6.332781207754605e-06,
+ "loss": 0.968,
+ "step": 3279
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 1.0169042732983236,
+ "learning_rate": 6.326984023352435e-06,
+ "loss": 0.9259,
+ "step": 3280
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9291263889801283,
+ "learning_rate": 6.321188265507342e-06,
+ "loss": 0.8896,
+ "step": 3281
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.8652796633882472,
+ "learning_rate": 6.31539393647034e-06,
+ "loss": 0.9151,
+ "step": 3282
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 1.0989508823272882,
+ "learning_rate": 6.309601038491874e-06,
+ "loss": 0.9179,
+ "step": 3283
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.80919985324805,
+ "learning_rate": 6.303809573821842e-06,
+ "loss": 0.8538,
+ "step": 3284
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.7817323101895023,
+ "learning_rate": 6.298019544709579e-06,
+ "loss": 0.8658,
+ "step": 3285
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.825467080869915,
+ "learning_rate": 6.292230953403866e-06,
+ "loss": 0.8759,
+ "step": 3286
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.8808874075038633,
+ "learning_rate": 6.286443802152926e-06,
+ "loss": 0.8605,
+ "step": 3287
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.8842098272447734,
+ "learning_rate": 6.280658093204422e-06,
+ "loss": 0.8883,
+ "step": 3288
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9321305545714378,
+ "learning_rate": 6.274873828805459e-06,
+ "loss": 0.9111,
+ "step": 3289
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.8335970611850612,
+ "learning_rate": 6.269091011202576e-06,
+ "loss": 0.8916,
+ "step": 3290
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 1.0056988995508562,
+ "learning_rate": 6.263309642641751e-06,
+ "loss": 0.9146,
+ "step": 3291
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9366802953362328,
+ "learning_rate": 6.257529725368405e-06,
+ "loss": 0.871,
+ "step": 3292
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9835642456560029,
+ "learning_rate": 6.251751261627386e-06,
+ "loss": 0.9502,
+ "step": 3293
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9909810001533016,
+ "learning_rate": 6.245974253662988e-06,
+ "loss": 0.9711,
+ "step": 3294
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9772797854240282,
+ "learning_rate": 6.240198703718932e-06,
+ "loss": 0.9121,
+ "step": 3295
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9191430186515294,
+ "learning_rate": 6.234424614038375e-06,
+ "loss": 0.9109,
+ "step": 3296
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9442372435701061,
+ "learning_rate": 6.2286519868639095e-06,
+ "loss": 0.9528,
+ "step": 3297
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 1.0828132026723065,
+ "learning_rate": 6.222880824437549e-06,
+ "loss": 0.9741,
+ "step": 3298
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 1.0425060423186634,
+ "learning_rate": 6.217111129000759e-06,
+ "loss": 0.9251,
+ "step": 3299
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.9939427724604535,
+ "learning_rate": 6.211342902794413e-06,
+ "loss": 0.9615,
+ "step": 3300
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 1.0709194747261073,
+ "learning_rate": 6.205576148058828e-06,
+ "loss": 0.8744,
+ "step": 3301
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.8692419531508614,
+ "learning_rate": 6.199810867033745e-06,
+ "loss": 0.9191,
+ "step": 3302
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9147638629214966,
+ "learning_rate": 6.19404706195833e-06,
+ "loss": 0.9312,
+ "step": 3303
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 1.336183878351168,
+ "learning_rate": 6.188284735071177e-06,
+ "loss": 0.9113,
+ "step": 3304
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.767747365788594,
+ "learning_rate": 6.182523888610316e-06,
+ "loss": 0.8828,
+ "step": 3305
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.8013724589912433,
+ "learning_rate": 6.176764524813187e-06,
+ "loss": 0.8864,
+ "step": 3306
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.8038368097561518,
+ "learning_rate": 6.171006645916662e-06,
+ "loss": 0.8496,
+ "step": 3307
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.7372647028306176,
+ "learning_rate": 6.165250254157032e-06,
+ "loss": 0.8084,
+ "step": 3308
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 1.1111603718791627,
+ "learning_rate": 6.159495351770017e-06,
+ "loss": 0.9726,
+ "step": 3309
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9140283971012301,
+ "learning_rate": 6.153741940990749e-06,
+ "loss": 0.9411,
+ "step": 3310
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 1.0068704482869333,
+ "learning_rate": 6.1479900240537956e-06,
+ "loss": 0.9066,
+ "step": 3311
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 1.158059516546419,
+ "learning_rate": 6.142239603193128e-06,
+ "loss": 0.9694,
+ "step": 3312
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.7684388697508447,
+ "learning_rate": 6.136490680642146e-06,
+ "loss": 0.8641,
+ "step": 3313
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9502013498861337,
+ "learning_rate": 6.130743258633667e-06,
+ "loss": 0.9401,
+ "step": 3314
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9935841058924265,
+ "learning_rate": 6.124997339399916e-06,
+ "loss": 0.9308,
+ "step": 3315
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.887957567770159,
+ "learning_rate": 6.119252925172549e-06,
+ "loss": 0.8984,
+ "step": 3316
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 1.0171068829241392,
+ "learning_rate": 6.113510018182628e-06,
+ "loss": 0.895,
+ "step": 3317
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9844102370676482,
+ "learning_rate": 6.107768620660633e-06,
+ "loss": 0.9476,
+ "step": 3318
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.7994543878681762,
+ "learning_rate": 6.102028734836456e-06,
+ "loss": 0.8659,
+ "step": 3319
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9536635985725865,
+ "learning_rate": 6.0962903629394e-06,
+ "loss": 0.8841,
+ "step": 3320
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 1.0260922613161438,
+ "learning_rate": 6.090553507198187e-06,
+ "loss": 0.9875,
+ "step": 3321
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9285081006115551,
+ "learning_rate": 6.0848181698409384e-06,
+ "loss": 0.9077,
+ "step": 3322
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.8558549149558954,
+ "learning_rate": 6.079084353095202e-06,
+ "loss": 0.8606,
+ "step": 3323
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9273874590394057,
+ "learning_rate": 6.07335205918792e-06,
+ "loss": 0.9213,
+ "step": 3324
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.8119799939792245,
+ "learning_rate": 6.067621290345455e-06,
+ "loss": 0.8365,
+ "step": 3325
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.8286164780478104,
+ "learning_rate": 6.061892048793568e-06,
+ "loss": 0.8337,
+ "step": 3326
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9665081994804595,
+ "learning_rate": 6.056164336757426e-06,
+ "loss": 0.9553,
+ "step": 3327
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9398621025319048,
+ "learning_rate": 6.050438156461613e-06,
+ "loss": 0.9324,
+ "step": 3328
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9205647530985112,
+ "learning_rate": 6.044713510130108e-06,
+ "loss": 0.8776,
+ "step": 3329
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9597637278577276,
+ "learning_rate": 6.038990399986302e-06,
+ "loss": 0.9598,
+ "step": 3330
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 1.0622050440423265,
+ "learning_rate": 6.03326882825298e-06,
+ "loss": 0.9359,
+ "step": 3331
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9546111945962177,
+ "learning_rate": 6.027548797152336e-06,
+ "loss": 0.884,
+ "step": 3332
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.8147346221091533,
+ "learning_rate": 6.021830308905963e-06,
+ "loss": 0.8514,
+ "step": 3333
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9476591290817805,
+ "learning_rate": 6.016113365734861e-06,
+ "loss": 0.8823,
+ "step": 3334
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.7697781681624081,
+ "learning_rate": 6.0103979698594215e-06,
+ "loss": 0.8188,
+ "step": 3335
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.8380004631866774,
+ "learning_rate": 6.004684123499436e-06,
+ "loss": 0.8763,
+ "step": 3336
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9847565034185128,
+ "learning_rate": 5.998971828874102e-06,
+ "loss": 0.9596,
+ "step": 3337
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9537746778246229,
+ "learning_rate": 5.993261088202005e-06,
+ "loss": 0.9939,
+ "step": 3338
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9336493198264489,
+ "learning_rate": 5.987551903701128e-06,
+ "loss": 0.8761,
+ "step": 3339
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9830557131025129,
+ "learning_rate": 5.9818442775888595e-06,
+ "loss": 0.9447,
+ "step": 3340
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9501202029008352,
+ "learning_rate": 5.97613821208197e-06,
+ "loss": 0.8117,
+ "step": 3341
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.8121481938176214,
+ "learning_rate": 5.970433709396635e-06,
+ "loss": 0.8216,
+ "step": 3342
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.8980032297592268,
+ "learning_rate": 5.964730771748415e-06,
+ "loss": 0.88,
+ "step": 3343
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9215562969639635,
+ "learning_rate": 5.959029401352262e-06,
+ "loss": 0.9375,
+ "step": 3344
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9530706788338669,
+ "learning_rate": 5.953329600422524e-06,
+ "loss": 0.9565,
+ "step": 3345
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9341692476239544,
+ "learning_rate": 5.947631371172943e-06,
+ "loss": 0.8829,
+ "step": 3346
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.8433878370268778,
+ "learning_rate": 5.941934715816642e-06,
+ "loss": 0.8587,
+ "step": 3347
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 1.0107100707401744,
+ "learning_rate": 5.936239636566137e-06,
+ "loss": 0.9015,
+ "step": 3348
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9462242224813096,
+ "learning_rate": 5.930546135633327e-06,
+ "loss": 0.9422,
+ "step": 3349
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.8435469696030137,
+ "learning_rate": 5.924854215229509e-06,
+ "loss": 0.9209,
+ "step": 3350
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9257406462746642,
+ "learning_rate": 5.919163877565351e-06,
+ "loss": 0.9302,
+ "step": 3351
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.9063894710732844,
+ "learning_rate": 5.9134751248509236e-06,
+ "loss": 0.9544,
+ "step": 3352
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9105088917858691,
+ "learning_rate": 5.9077879592956675e-06,
+ "loss": 0.9326,
+ "step": 3353
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8595235276291397,
+ "learning_rate": 5.902102383108415e-06,
+ "loss": 0.9248,
+ "step": 3354
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8126304130933923,
+ "learning_rate": 5.896418398497377e-06,
+ "loss": 0.9073,
+ "step": 3355
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8138664165499582,
+ "learning_rate": 5.890736007670144e-06,
+ "loss": 0.7843,
+ "step": 3356
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 1.0070584504088194,
+ "learning_rate": 5.885055212833696e-06,
+ "loss": 0.9664,
+ "step": 3357
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8544585209072542,
+ "learning_rate": 5.879376016194387e-06,
+ "loss": 0.9101,
+ "step": 3358
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9155485542383524,
+ "learning_rate": 5.873698419957952e-06,
+ "loss": 0.883,
+ "step": 3359
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9940606811836806,
+ "learning_rate": 5.8680224263295045e-06,
+ "loss": 0.9228,
+ "step": 3360
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9437517206434091,
+ "learning_rate": 5.862348037513533e-06,
+ "loss": 0.9266,
+ "step": 3361
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8609221173533104,
+ "learning_rate": 5.856675255713905e-06,
+ "loss": 0.838,
+ "step": 3362
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9434007416909821,
+ "learning_rate": 5.851004083133862e-06,
+ "loss": 0.9064,
+ "step": 3363
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 1.0290108872454056,
+ "learning_rate": 5.8453345219760275e-06,
+ "loss": 0.9372,
+ "step": 3364
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8781260878318194,
+ "learning_rate": 5.839666574442389e-06,
+ "loss": 0.845,
+ "step": 3365
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.7702037206071264,
+ "learning_rate": 5.834000242734317e-06,
+ "loss": 0.82,
+ "step": 3366
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9472885689463841,
+ "learning_rate": 5.828335529052541e-06,
+ "loss": 0.8872,
+ "step": 3367
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8874860454985124,
+ "learning_rate": 5.822672435597172e-06,
+ "loss": 0.8784,
+ "step": 3368
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8367233331672315,
+ "learning_rate": 5.817010964567702e-06,
+ "loss": 0.8681,
+ "step": 3369
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9883916160905318,
+ "learning_rate": 5.811351118162969e-06,
+ "loss": 0.8989,
+ "step": 3370
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8722316268623705,
+ "learning_rate": 5.805692898581196e-06,
+ "loss": 0.8807,
+ "step": 3371
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8775362998064364,
+ "learning_rate": 5.800036308019974e-06,
+ "loss": 0.953,
+ "step": 3372
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.7946634593097527,
+ "learning_rate": 5.79438134867625e-06,
+ "loss": 0.8761,
+ "step": 3373
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8844695809108578,
+ "learning_rate": 5.788728022746348e-06,
+ "loss": 0.8683,
+ "step": 3374
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9326174041987463,
+ "learning_rate": 5.783076332425957e-06,
+ "loss": 0.9111,
+ "step": 3375
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.7642844190506682,
+ "learning_rate": 5.777426279910125e-06,
+ "loss": 0.8927,
+ "step": 3376
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8053230945516049,
+ "learning_rate": 5.771777867393275e-06,
+ "loss": 0.8583,
+ "step": 3377
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.946573280704107,
+ "learning_rate": 5.766131097069174e-06,
+ "loss": 0.9214,
+ "step": 3378
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8626890508507832,
+ "learning_rate": 5.760485971130969e-06,
+ "loss": 0.8129,
+ "step": 3379
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8596996597665849,
+ "learning_rate": 5.7548424917711596e-06,
+ "loss": 0.8744,
+ "step": 3380
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8776164716085274,
+ "learning_rate": 5.749200661181611e-06,
+ "loss": 0.8434,
+ "step": 3381
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 1.005509266985471,
+ "learning_rate": 5.7435604815535475e-06,
+ "loss": 0.9409,
+ "step": 3382
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9743735277936009,
+ "learning_rate": 5.7379219550775415e-06,
+ "loss": 0.9028,
+ "step": 3383
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9151985407085292,
+ "learning_rate": 5.732285083943537e-06,
+ "loss": 0.9299,
+ "step": 3384
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8440116883810425,
+ "learning_rate": 5.726649870340833e-06,
+ "loss": 0.8652,
+ "step": 3385
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9900867577380902,
+ "learning_rate": 5.721016316458068e-06,
+ "loss": 0.9247,
+ "step": 3386
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9626858705845855,
+ "learning_rate": 5.715384424483268e-06,
+ "loss": 0.9017,
+ "step": 3387
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9798722326172483,
+ "learning_rate": 5.709754196603781e-06,
+ "loss": 0.9243,
+ "step": 3388
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 1.017737212119303,
+ "learning_rate": 5.704125635006329e-06,
+ "loss": 0.9333,
+ "step": 3389
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.969046163856431,
+ "learning_rate": 5.6984987418769825e-06,
+ "loss": 0.9003,
+ "step": 3390
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.7089081712102512,
+ "learning_rate": 5.692873519401154e-06,
+ "loss": 0.7972,
+ "step": 3391
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 1.0159028505390155,
+ "learning_rate": 5.6872499697636195e-06,
+ "loss": 0.9637,
+ "step": 3392
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9326793210567529,
+ "learning_rate": 5.681628095148502e-06,
+ "loss": 0.9484,
+ "step": 3393
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.8514463972750765,
+ "learning_rate": 5.6760078977392706e-06,
+ "loss": 0.8651,
+ "step": 3394
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 1.049771217877223,
+ "learning_rate": 5.67038937971875e-06,
+ "loss": 0.9004,
+ "step": 3395
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.7209213774076711,
+ "learning_rate": 5.664772543269101e-06,
+ "loss": 0.8291,
+ "step": 3396
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9663480024894006,
+ "learning_rate": 5.659157390571842e-06,
+ "loss": 0.8783,
+ "step": 3397
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 1.0748229237280895,
+ "learning_rate": 5.653543923807833e-06,
+ "loss": 0.9402,
+ "step": 3398
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 1.005878011868789,
+ "learning_rate": 5.6479321451572785e-06,
+ "loss": 0.9077,
+ "step": 3399
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9011630561254075,
+ "learning_rate": 5.642322056799732e-06,
+ "loss": 0.8952,
+ "step": 3400
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 1.1193408050557743,
+ "learning_rate": 5.636713660914087e-06,
+ "loss": 1.0096,
+ "step": 3401
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 1.101405618492286,
+ "learning_rate": 5.631106959678575e-06,
+ "loss": 0.9389,
+ "step": 3402
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.9475734112345786,
+ "learning_rate": 5.625501955270777e-06,
+ "loss": 0.8692,
+ "step": 3403
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 1.2023077285184303,
+ "learning_rate": 5.619898649867612e-06,
+ "loss": 0.9241,
+ "step": 3404
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8490728639623257,
+ "learning_rate": 5.614297045645339e-06,
+ "loss": 0.9202,
+ "step": 3405
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9737075166729794,
+ "learning_rate": 5.6086971447795625e-06,
+ "loss": 0.9002,
+ "step": 3406
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 1.0102837495005053,
+ "learning_rate": 5.603098949445209e-06,
+ "loss": 0.8761,
+ "step": 3407
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.873161086030156,
+ "learning_rate": 5.597502461816557e-06,
+ "loss": 0.8266,
+ "step": 3408
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9661948113920387,
+ "learning_rate": 5.5919076840672215e-06,
+ "loss": 0.9593,
+ "step": 3409
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.952311111394339,
+ "learning_rate": 5.5863146183701454e-06,
+ "loss": 0.9037,
+ "step": 3410
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9993243482108803,
+ "learning_rate": 5.580723266897616e-06,
+ "loss": 0.9224,
+ "step": 3411
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8324360584686835,
+ "learning_rate": 5.575133631821243e-06,
+ "loss": 0.8121,
+ "step": 3412
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 1.0682116747233388,
+ "learning_rate": 5.5695457153119806e-06,
+ "loss": 0.9308,
+ "step": 3413
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.7986522384159873,
+ "learning_rate": 5.563959519540114e-06,
+ "loss": 0.8701,
+ "step": 3414
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8644591382554868,
+ "learning_rate": 5.558375046675244e-06,
+ "loss": 0.8844,
+ "step": 3415
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9705469050856875,
+ "learning_rate": 5.552792298886335e-06,
+ "loss": 0.9435,
+ "step": 3416
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8030372860824314,
+ "learning_rate": 5.547211278341646e-06,
+ "loss": 0.8828,
+ "step": 3417
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 1.014873471767318,
+ "learning_rate": 5.541631987208789e-06,
+ "loss": 0.9233,
+ "step": 3418
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9161763853357491,
+ "learning_rate": 5.536054427654698e-06,
+ "loss": 0.8159,
+ "step": 3419
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.856965638509794,
+ "learning_rate": 5.530478601845624e-06,
+ "loss": 0.8874,
+ "step": 3420
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 1.0425395595947131,
+ "learning_rate": 5.52490451194716e-06,
+ "loss": 0.9189,
+ "step": 3421
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8880815421138911,
+ "learning_rate": 5.519332160124215e-06,
+ "loss": 0.8874,
+ "step": 3422
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8485731127474028,
+ "learning_rate": 5.513761548541032e-06,
+ "loss": 0.8559,
+ "step": 3423
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8615152400129891,
+ "learning_rate": 5.508192679361169e-06,
+ "loss": 0.9138,
+ "step": 3424
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9770857467762206,
+ "learning_rate": 5.502625554747508e-06,
+ "loss": 0.9296,
+ "step": 3425
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8721669442302464,
+ "learning_rate": 5.497060176862259e-06,
+ "loss": 0.8836,
+ "step": 3426
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8437100573198295,
+ "learning_rate": 5.491496547866948e-06,
+ "loss": 0.9058,
+ "step": 3427
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9201560209677323,
+ "learning_rate": 5.485934669922428e-06,
+ "loss": 0.9015,
+ "step": 3428
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.7943515859818367,
+ "learning_rate": 5.480374545188866e-06,
+ "loss": 0.8488,
+ "step": 3429
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 1.1848453656826603,
+ "learning_rate": 5.474816175825754e-06,
+ "loss": 0.9261,
+ "step": 3430
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.6916057405289044,
+ "learning_rate": 5.469259563991894e-06,
+ "loss": 0.7851,
+ "step": 3431
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.7751840845134919,
+ "learning_rate": 5.46370471184541e-06,
+ "loss": 0.8706,
+ "step": 3432
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 1.1048606617126446,
+ "learning_rate": 5.458151621543744e-06,
+ "loss": 0.8476,
+ "step": 3433
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 1.1955957046056491,
+ "learning_rate": 5.452600295243653e-06,
+ "loss": 0.9248,
+ "step": 3434
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8214911260216353,
+ "learning_rate": 5.4470507351012116e-06,
+ "loss": 0.8425,
+ "step": 3435
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9478794539977947,
+ "learning_rate": 5.441502943271797e-06,
+ "loss": 0.9477,
+ "step": 3436
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9237745726439537,
+ "learning_rate": 5.4359569219101115e-06,
+ "loss": 0.9152,
+ "step": 3437
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9901683414325779,
+ "learning_rate": 5.430412673170167e-06,
+ "loss": 0.9568,
+ "step": 3438
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8629495060985444,
+ "learning_rate": 5.424870199205283e-06,
+ "loss": 0.923,
+ "step": 3439
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.901315763691827,
+ "learning_rate": 5.4193295021681e-06,
+ "loss": 0.8619,
+ "step": 3440
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8598993833740752,
+ "learning_rate": 5.413790584210551e-06,
+ "loss": 0.8478,
+ "step": 3441
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9830815371446981,
+ "learning_rate": 5.408253447483892e-06,
+ "loss": 0.9587,
+ "step": 3442
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9108769108808722,
+ "learning_rate": 5.402718094138688e-06,
+ "loss": 0.897,
+ "step": 3443
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 1.180401739791126,
+ "learning_rate": 5.397184526324792e-06,
+ "loss": 0.9519,
+ "step": 3444
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8650951591031756,
+ "learning_rate": 5.391652746191398e-06,
+ "loss": 0.9322,
+ "step": 3445
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.7978040458964462,
+ "learning_rate": 5.38612275588697e-06,
+ "loss": 0.9363,
+ "step": 3446
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9857910038109453,
+ "learning_rate": 5.380594557559298e-06,
+ "loss": 0.9757,
+ "step": 3447
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 1.0469742863993499,
+ "learning_rate": 5.375068153355474e-06,
+ "loss": 0.8857,
+ "step": 3448
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8684083143914649,
+ "learning_rate": 5.369543545421883e-06,
+ "loss": 0.9735,
+ "step": 3449
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9619029330079326,
+ "learning_rate": 5.364020735904223e-06,
+ "loss": 0.9339,
+ "step": 3450
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.8452340682502787,
+ "learning_rate": 5.358499726947488e-06,
+ "loss": 0.8801,
+ "step": 3451
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9216605665459061,
+ "learning_rate": 5.352980520695974e-06,
+ "loss": 0.8933,
+ "step": 3452
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9573668387838171,
+ "learning_rate": 5.347463119293283e-06,
+ "loss": 0.9458,
+ "step": 3453
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 1.1123828008567809,
+ "learning_rate": 5.341947524882301e-06,
+ "loss": 1.0189,
+ "step": 3454
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.7799207423793345,
+ "learning_rate": 5.336433739605227e-06,
+ "loss": 0.8433,
+ "step": 3455
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.9978239359608753,
+ "learning_rate": 5.330921765603549e-06,
+ "loss": 0.9548,
+ "step": 3456
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8877842351599056,
+ "learning_rate": 5.325411605018056e-06,
+ "loss": 0.8651,
+ "step": 3457
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8498171853637153,
+ "learning_rate": 5.31990325998883e-06,
+ "loss": 0.8913,
+ "step": 3458
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8677557509648534,
+ "learning_rate": 5.314396732655253e-06,
+ "loss": 0.9245,
+ "step": 3459
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.7876558975530807,
+ "learning_rate": 5.308892025155989e-06,
+ "loss": 0.8575,
+ "step": 3460
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8988856371919549,
+ "learning_rate": 5.303389139629007e-06,
+ "loss": 0.9101,
+ "step": 3461
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.7702792353501093,
+ "learning_rate": 5.297888078211564e-06,
+ "loss": 0.7773,
+ "step": 3462
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 1.0425983489613044,
+ "learning_rate": 5.2923888430402085e-06,
+ "loss": 0.9101,
+ "step": 3463
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9702026580483017,
+ "learning_rate": 5.286891436250785e-06,
+ "loss": 0.8841,
+ "step": 3464
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.938621341124513,
+ "learning_rate": 5.281395859978414e-06,
+ "loss": 0.9387,
+ "step": 3465
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.917526090070358,
+ "learning_rate": 5.2759021163575184e-06,
+ "loss": 0.8938,
+ "step": 3466
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 1.0450012948924594,
+ "learning_rate": 5.27041020752181e-06,
+ "loss": 0.9181,
+ "step": 3467
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 1.0748790354613298,
+ "learning_rate": 5.26492013560427e-06,
+ "loss": 0.8597,
+ "step": 3468
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9187536500879364,
+ "learning_rate": 5.259431902737195e-06,
+ "loss": 0.9202,
+ "step": 3469
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 1.0014879474178906,
+ "learning_rate": 5.2539455110521385e-06,
+ "loss": 0.9597,
+ "step": 3470
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.882044465734256,
+ "learning_rate": 5.248460962679958e-06,
+ "loss": 0.911,
+ "step": 3471
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9004670266382829,
+ "learning_rate": 5.24297825975079e-06,
+ "loss": 0.8858,
+ "step": 3472
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8306911476871505,
+ "learning_rate": 5.237497404394044e-06,
+ "loss": 0.8999,
+ "step": 3473
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8913704251594962,
+ "learning_rate": 5.232018398738436e-06,
+ "loss": 0.8846,
+ "step": 3474
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9629493517986126,
+ "learning_rate": 5.226541244911936e-06,
+ "loss": 0.9177,
+ "step": 3475
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9322762849895656,
+ "learning_rate": 5.221065945041811e-06,
+ "loss": 0.8872,
+ "step": 3476
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9157068821660157,
+ "learning_rate": 5.215592501254609e-06,
+ "loss": 0.9044,
+ "step": 3477
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9299916317898786,
+ "learning_rate": 5.210120915676147e-06,
+ "loss": 0.9175,
+ "step": 3478
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9492664707809794,
+ "learning_rate": 5.2046511904315265e-06,
+ "loss": 0.8981,
+ "step": 3479
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.752313027702845,
+ "learning_rate": 5.199183327645128e-06,
+ "loss": 0.8523,
+ "step": 3480
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9462743046685494,
+ "learning_rate": 5.193717329440604e-06,
+ "loss": 0.8856,
+ "step": 3481
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8622878800414172,
+ "learning_rate": 5.188253197940889e-06,
+ "loss": 0.8037,
+ "step": 3482
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9723519003250821,
+ "learning_rate": 5.182790935268185e-06,
+ "loss": 0.9007,
+ "step": 3483
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 1.1132024580277993,
+ "learning_rate": 5.177330543543971e-06,
+ "loss": 0.886,
+ "step": 3484
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 1.218071464406349,
+ "learning_rate": 5.171872024889004e-06,
+ "loss": 0.9158,
+ "step": 3485
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.841544800055019,
+ "learning_rate": 5.166415381423306e-06,
+ "loss": 0.9115,
+ "step": 3486
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 1.0313558388714028,
+ "learning_rate": 5.160960615266179e-06,
+ "loss": 0.9216,
+ "step": 3487
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8598936069898658,
+ "learning_rate": 5.155507728536191e-06,
+ "loss": 0.8526,
+ "step": 3488
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8501831376800932,
+ "learning_rate": 5.150056723351173e-06,
+ "loss": 0.8443,
+ "step": 3489
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8261694114785857,
+ "learning_rate": 5.14460760182824e-06,
+ "loss": 0.8292,
+ "step": 3490
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9169323447691452,
+ "learning_rate": 5.139160366083765e-06,
+ "loss": 0.8935,
+ "step": 3491
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8924186076730839,
+ "learning_rate": 5.133715018233393e-06,
+ "loss": 0.8515,
+ "step": 3492
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8536900006938354,
+ "learning_rate": 5.128271560392037e-06,
+ "loss": 0.875,
+ "step": 3493
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.78616365378165,
+ "learning_rate": 5.122829994673866e-06,
+ "loss": 0.8538,
+ "step": 3494
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9796149128469018,
+ "learning_rate": 5.117390323192326e-06,
+ "loss": 0.9023,
+ "step": 3495
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9537109643837329,
+ "learning_rate": 5.111952548060126e-06,
+ "loss": 0.8677,
+ "step": 3496
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 1.0113114525340483,
+ "learning_rate": 5.106516671389224e-06,
+ "loss": 0.9101,
+ "step": 3497
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8818485387427726,
+ "learning_rate": 5.101082695290866e-06,
+ "loss": 0.8817,
+ "step": 3498
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8490731104838284,
+ "learning_rate": 5.0956506218755344e-06,
+ "loss": 0.8968,
+ "step": 3499
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9062471249705372,
+ "learning_rate": 5.09022045325299e-06,
+ "loss": 0.9487,
+ "step": 3500
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8966273313807227,
+ "learning_rate": 5.0847921915322486e-06,
+ "loss": 0.91,
+ "step": 3501
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 1.0164216103994779,
+ "learning_rate": 5.07936583882158e-06,
+ "loss": 0.8968,
+ "step": 3502
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.969816784008669,
+ "learning_rate": 5.073941397228518e-06,
+ "loss": 0.9295,
+ "step": 3503
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9186888582933629,
+ "learning_rate": 5.068518868859854e-06,
+ "loss": 0.961,
+ "step": 3504
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.9719789691069631,
+ "learning_rate": 5.063098255821637e-06,
+ "loss": 0.9147,
+ "step": 3505
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.8443109595221848,
+ "learning_rate": 5.0576795602191734e-06,
+ "loss": 0.8007,
+ "step": 3506
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 1.0285499892269294,
+ "learning_rate": 5.052262784157014e-06,
+ "loss": 0.9377,
+ "step": 3507
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.848800503785638,
+ "learning_rate": 5.046847929738971e-06,
+ "loss": 0.846,
+ "step": 3508
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9289613009230611,
+ "learning_rate": 5.041434999068127e-06,
+ "loss": 0.8885,
+ "step": 3509
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9742100804060939,
+ "learning_rate": 5.036023994246787e-06,
+ "loss": 0.8607,
+ "step": 3510
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9279770257742669,
+ "learning_rate": 5.030614917376532e-06,
+ "loss": 0.8565,
+ "step": 3511
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.948576769450609,
+ "learning_rate": 5.025207770558176e-06,
+ "loss": 0.9564,
+ "step": 3512
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.8738299741755271,
+ "learning_rate": 5.0198025558917985e-06,
+ "loss": 0.8759,
+ "step": 3513
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.0576587162071487,
+ "learning_rate": 5.014399275476721e-06,
+ "loss": 0.9377,
+ "step": 3514
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.7807765494674233,
+ "learning_rate": 5.008997931411517e-06,
+ "loss": 0.8174,
+ "step": 3515
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.0736030632697564,
+ "learning_rate": 5.003598525794002e-06,
+ "loss": 0.9407,
+ "step": 3516
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9528980831443683,
+ "learning_rate": 4.998201060721253e-06,
+ "loss": 0.9393,
+ "step": 3517
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.8724255799327322,
+ "learning_rate": 4.992805538289571e-06,
+ "loss": 0.8755,
+ "step": 3518
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.8320551639912551,
+ "learning_rate": 4.987411960594521e-06,
+ "loss": 0.8651,
+ "step": 3519
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9031541070879855,
+ "learning_rate": 4.982020329730904e-06,
+ "loss": 0.9217,
+ "step": 3520
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9416704158419092,
+ "learning_rate": 4.976630647792771e-06,
+ "loss": 0.8387,
+ "step": 3521
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9104537436743043,
+ "learning_rate": 4.971242916873412e-06,
+ "loss": 0.8829,
+ "step": 3522
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.0406748404431048,
+ "learning_rate": 4.965857139065354e-06,
+ "loss": 0.8229,
+ "step": 3523
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.1918054177782331,
+ "learning_rate": 4.9604733164603755e-06,
+ "loss": 1.05,
+ "step": 3524
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.0500021260919787,
+ "learning_rate": 4.955091451149495e-06,
+ "loss": 0.9417,
+ "step": 3525
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9798488709150643,
+ "learning_rate": 4.9497115452229535e-06,
+ "loss": 0.9418,
+ "step": 3526
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.8378640528558038,
+ "learning_rate": 4.9443336007702614e-06,
+ "loss": 0.839,
+ "step": 3527
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9169392255615451,
+ "learning_rate": 4.938957619880138e-06,
+ "loss": 0.9173,
+ "step": 3528
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9630003863193658,
+ "learning_rate": 4.9335836046405575e-06,
+ "loss": 0.9257,
+ "step": 3529
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.0750064623369548,
+ "learning_rate": 4.928211557138728e-06,
+ "loss": 0.9082,
+ "step": 3530
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.2048749619137136,
+ "learning_rate": 4.922841479461083e-06,
+ "loss": 0.9164,
+ "step": 3531
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.0203452135898181,
+ "learning_rate": 4.917473373693305e-06,
+ "loss": 0.848,
+ "step": 3532
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.0157634466515504,
+ "learning_rate": 4.9121072419203016e-06,
+ "loss": 0.9171,
+ "step": 3533
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.8420438484394206,
+ "learning_rate": 4.906743086226218e-06,
+ "loss": 0.9127,
+ "step": 3534
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.8714977975687543,
+ "learning_rate": 4.901380908694434e-06,
+ "loss": 0.8889,
+ "step": 3535
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9561030187654711,
+ "learning_rate": 4.8960207114075495e-06,
+ "loss": 0.9149,
+ "step": 3536
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.8655696062325442,
+ "learning_rate": 4.890662496447407e-06,
+ "loss": 0.8512,
+ "step": 3537
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.1995418153027713,
+ "learning_rate": 4.8853062658950765e-06,
+ "loss": 0.9337,
+ "step": 3538
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.8345348055716388,
+ "learning_rate": 4.879952021830856e-06,
+ "loss": 0.8593,
+ "step": 3539
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9469728302461272,
+ "learning_rate": 4.874599766334276e-06,
+ "loss": 0.9105,
+ "step": 3540
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.7108020023733196,
+ "learning_rate": 4.8692495014840825e-06,
+ "loss": 0.8494,
+ "step": 3541
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9570948535842642,
+ "learning_rate": 4.863901229358261e-06,
+ "loss": 0.888,
+ "step": 3542
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9003542739663155,
+ "learning_rate": 4.858554952034019e-06,
+ "loss": 0.9285,
+ "step": 3543
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9129049815418941,
+ "learning_rate": 4.853210671587789e-06,
+ "loss": 0.8085,
+ "step": 3544
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.9280821059460705,
+ "learning_rate": 4.847868390095227e-06,
+ "loss": 0.9347,
+ "step": 3545
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.0447361437896092,
+ "learning_rate": 4.842528109631218e-06,
+ "loss": 0.9781,
+ "step": 3546
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.007431358107507,
+ "learning_rate": 4.837189832269858e-06,
+ "loss": 0.9104,
+ "step": 3547
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.761600653498451,
+ "learning_rate": 4.8318535600844775e-06,
+ "loss": 0.848,
+ "step": 3548
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.86557119556584,
+ "learning_rate": 4.8265192951476206e-06,
+ "loss": 0.9265,
+ "step": 3549
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.017409110173373,
+ "learning_rate": 4.8211870395310556e-06,
+ "loss": 0.8872,
+ "step": 3550
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.8784477424717826,
+ "learning_rate": 4.815856795305772e-06,
+ "loss": 0.9062,
+ "step": 3551
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.984678411366913,
+ "learning_rate": 4.81052856454197e-06,
+ "loss": 0.8783,
+ "step": 3552
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.8168804136984219,
+ "learning_rate": 4.805202349309074e-06,
+ "loss": 0.8347,
+ "step": 3553
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.230707134890969,
+ "learning_rate": 4.7998781516757295e-06,
+ "loss": 0.9391,
+ "step": 3554
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.927832533010434,
+ "learning_rate": 4.794555973709783e-06,
+ "loss": 0.8698,
+ "step": 3555
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.8985962191097416,
+ "learning_rate": 4.789235817478322e-06,
+ "loss": 0.8865,
+ "step": 3556
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.7628225331553036,
+ "learning_rate": 4.783917685047621e-06,
+ "loss": 0.8535,
+ "step": 3557
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 1.0530196419991154,
+ "learning_rate": 4.778601578483187e-06,
+ "loss": 0.9481,
+ "step": 3558
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.8505827536068966,
+ "learning_rate": 4.773287499849737e-06,
+ "loss": 0.7899,
+ "step": 3559
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.8592761492230445,
+ "learning_rate": 4.767975451211191e-06,
+ "loss": 0.8766,
+ "step": 3560
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8549943462053834,
+ "learning_rate": 4.762665434630692e-06,
+ "loss": 0.889,
+ "step": 3561
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8582391234373967,
+ "learning_rate": 4.757357452170588e-06,
+ "loss": 0.8579,
+ "step": 3562
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9547029286790802,
+ "learning_rate": 4.752051505892438e-06,
+ "loss": 0.9169,
+ "step": 3563
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.863217768449546,
+ "learning_rate": 4.746747597857014e-06,
+ "loss": 0.8767,
+ "step": 3564
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9146603782370458,
+ "learning_rate": 4.741445730124287e-06,
+ "loss": 0.9497,
+ "step": 3565
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9196974301843103,
+ "learning_rate": 4.736145904753445e-06,
+ "loss": 0.8861,
+ "step": 3566
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9557480114421328,
+ "learning_rate": 4.730848123802877e-06,
+ "loss": 0.8835,
+ "step": 3567
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9104180211533642,
+ "learning_rate": 4.725552389330183e-06,
+ "loss": 0.9101,
+ "step": 3568
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8632313053918585,
+ "learning_rate": 4.720258703392161e-06,
+ "loss": 0.9004,
+ "step": 3569
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9260839455836017,
+ "learning_rate": 4.714967068044826e-06,
+ "loss": 0.9357,
+ "step": 3570
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9090808377784159,
+ "learning_rate": 4.7096774853433765e-06,
+ "loss": 0.8252,
+ "step": 3571
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9913037418742416,
+ "learning_rate": 4.704389957342237e-06,
+ "loss": 0.9041,
+ "step": 3572
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.851799669945169,
+ "learning_rate": 4.699104486095008e-06,
+ "loss": 0.9213,
+ "step": 3573
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.919488826319392,
+ "learning_rate": 4.69382107365452e-06,
+ "loss": 0.9516,
+ "step": 3574
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8447172980269063,
+ "learning_rate": 4.6885397220727855e-06,
+ "loss": 0.8193,
+ "step": 3575
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9834920335283767,
+ "learning_rate": 4.683260433401016e-06,
+ "loss": 0.9533,
+ "step": 3576
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 1.013062596277445,
+ "learning_rate": 4.677983209689631e-06,
+ "loss": 0.8396,
+ "step": 3577
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.973626006187746,
+ "learning_rate": 4.6727080529882394e-06,
+ "loss": 0.854,
+ "step": 3578
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9454478664953159,
+ "learning_rate": 4.667434965345654e-06,
+ "loss": 0.9091,
+ "step": 3579
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.919546248047086,
+ "learning_rate": 4.6621639488098856e-06,
+ "loss": 0.9519,
+ "step": 3580
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9886482423401748,
+ "learning_rate": 4.656895005428127e-06,
+ "loss": 0.8573,
+ "step": 3581
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8844947281806744,
+ "learning_rate": 4.651628137246781e-06,
+ "loss": 0.8831,
+ "step": 3582
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9195399134259411,
+ "learning_rate": 4.6463633463114395e-06,
+ "loss": 0.9275,
+ "step": 3583
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8832282550046601,
+ "learning_rate": 4.641100634666877e-06,
+ "loss": 0.8965,
+ "step": 3584
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.7755744881842765,
+ "learning_rate": 4.635840004357086e-06,
+ "loss": 0.7934,
+ "step": 3585
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9326589562514537,
+ "learning_rate": 4.630581457425222e-06,
+ "loss": 0.858,
+ "step": 3586
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9542507937829572,
+ "learning_rate": 4.625324995913648e-06,
+ "loss": 0.9047,
+ "step": 3587
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8937640400868299,
+ "learning_rate": 4.620070621863917e-06,
+ "loss": 0.8765,
+ "step": 3588
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9699888873370087,
+ "learning_rate": 4.614818337316759e-06,
+ "loss": 0.8805,
+ "step": 3589
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9230127756400321,
+ "learning_rate": 4.609568144312107e-06,
+ "loss": 0.819,
+ "step": 3590
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8737618354540571,
+ "learning_rate": 4.6043200448890724e-06,
+ "loss": 0.88,
+ "step": 3591
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8538828166058943,
+ "learning_rate": 4.599074041085958e-06,
+ "loss": 0.8519,
+ "step": 3592
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 1.3215944655457885,
+ "learning_rate": 4.593830134940256e-06,
+ "loss": 0.875,
+ "step": 3593
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 1.0077055159975417,
+ "learning_rate": 4.588588328488629e-06,
+ "loss": 0.9125,
+ "step": 3594
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.873401485821927,
+ "learning_rate": 4.5833486237669414e-06,
+ "loss": 0.9075,
+ "step": 3595
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8818663016823189,
+ "learning_rate": 4.578111022810231e-06,
+ "loss": 0.8976,
+ "step": 3596
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9326040787549489,
+ "learning_rate": 4.5728755276527225e-06,
+ "loss": 0.9326,
+ "step": 3597
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.6718054616921444,
+ "learning_rate": 4.567642140327823e-06,
+ "loss": 0.7996,
+ "step": 3598
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8957026193299594,
+ "learning_rate": 4.562410862868123e-06,
+ "loss": 0.9123,
+ "step": 3599
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.7320437879618403,
+ "learning_rate": 4.557181697305383e-06,
+ "loss": 0.8548,
+ "step": 3600
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8683517744441273,
+ "learning_rate": 4.551954645670557e-06,
+ "loss": 0.8725,
+ "step": 3601
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8847525424074256,
+ "learning_rate": 4.546729709993762e-06,
+ "loss": 0.9144,
+ "step": 3602
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.909666832325789,
+ "learning_rate": 4.541506892304314e-06,
+ "loss": 0.8982,
+ "step": 3603
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9802842671920442,
+ "learning_rate": 4.536286194630694e-06,
+ "loss": 0.8473,
+ "step": 3604
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.7915695493847694,
+ "learning_rate": 4.531067619000553e-06,
+ "loss": 0.8109,
+ "step": 3605
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8858358371523316,
+ "learning_rate": 4.525851167440731e-06,
+ "loss": 0.9083,
+ "step": 3606
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 1.0016265029103604,
+ "learning_rate": 4.52063684197724e-06,
+ "loss": 0.9003,
+ "step": 3607
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8981930715099556,
+ "learning_rate": 4.515424644635254e-06,
+ "loss": 0.9083,
+ "step": 3608
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.8255039931415044,
+ "learning_rate": 4.510214577439146e-06,
+ "loss": 0.8856,
+ "step": 3609
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.855645467956242,
+ "learning_rate": 4.5050066424124324e-06,
+ "loss": 0.8586,
+ "step": 3610
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9436723158455018,
+ "learning_rate": 4.49980084157782e-06,
+ "loss": 0.8329,
+ "step": 3611
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.9755445794722121,
+ "learning_rate": 4.494597176957186e-06,
+ "loss": 0.9376,
+ "step": 3612
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9273158389191944,
+ "learning_rate": 4.489395650571562e-06,
+ "loss": 0.9061,
+ "step": 3613
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9215188409295219,
+ "learning_rate": 4.4841962644411765e-06,
+ "loss": 0.8865,
+ "step": 3614
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.967506317454882,
+ "learning_rate": 4.4789990205854e-06,
+ "loss": 0.8873,
+ "step": 3615
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8604527308695761,
+ "learning_rate": 4.473803921022784e-06,
+ "loss": 0.9015,
+ "step": 3616
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8256845675856916,
+ "learning_rate": 4.468610967771051e-06,
+ "loss": 0.871,
+ "step": 3617
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9000936146825755,
+ "learning_rate": 4.4634201628470766e-06,
+ "loss": 0.9217,
+ "step": 3618
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8592376043744393,
+ "learning_rate": 4.458231508266912e-06,
+ "loss": 0.8417,
+ "step": 3619
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9685441995930987,
+ "learning_rate": 4.453045006045773e-06,
+ "loss": 0.9404,
+ "step": 3620
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8454383433942781,
+ "learning_rate": 4.447860658198035e-06,
+ "loss": 0.8963,
+ "step": 3621
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9249400638460467,
+ "learning_rate": 4.442678466737245e-06,
+ "loss": 0.8715,
+ "step": 3622
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9643101473914554,
+ "learning_rate": 4.4374984336760975e-06,
+ "loss": 0.8994,
+ "step": 3623
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8843503186780189,
+ "learning_rate": 4.432320561026461e-06,
+ "loss": 0.8907,
+ "step": 3624
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9200710208123296,
+ "learning_rate": 4.427144850799363e-06,
+ "loss": 0.9304,
+ "step": 3625
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9552543674744298,
+ "learning_rate": 4.421971305004989e-06,
+ "loss": 0.9535,
+ "step": 3626
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.968030250245091,
+ "learning_rate": 4.416799925652684e-06,
+ "loss": 0.8622,
+ "step": 3627
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8650031437396379,
+ "learning_rate": 4.411630714750956e-06,
+ "loss": 0.8618,
+ "step": 3628
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8778782072026667,
+ "learning_rate": 4.4064636743074605e-06,
+ "loss": 0.8525,
+ "step": 3629
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 1.1072428127959597,
+ "learning_rate": 4.40129880632902e-06,
+ "loss": 0.9185,
+ "step": 3630
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.909317764651399,
+ "learning_rate": 4.396136112821608e-06,
+ "loss": 0.8654,
+ "step": 3631
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8521648959061848,
+ "learning_rate": 4.390975595790358e-06,
+ "loss": 0.8821,
+ "step": 3632
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9395599613042558,
+ "learning_rate": 4.385817257239556e-06,
+ "loss": 0.8634,
+ "step": 3633
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9519217321516676,
+ "learning_rate": 4.380661099172636e-06,
+ "loss": 0.8705,
+ "step": 3634
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.968085791215669,
+ "learning_rate": 4.375507123592194e-06,
+ "loss": 0.8764,
+ "step": 3635
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9412718300744525,
+ "learning_rate": 4.370355332499977e-06,
+ "loss": 0.9213,
+ "step": 3636
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9137980428032713,
+ "learning_rate": 4.365205727896872e-06,
+ "loss": 0.9273,
+ "step": 3637
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.6519028128529283,
+ "learning_rate": 4.36005831178294e-06,
+ "loss": 0.7965,
+ "step": 3638
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8202311466952675,
+ "learning_rate": 4.354913086157367e-06,
+ "loss": 0.8718,
+ "step": 3639
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 1.0289697032115919,
+ "learning_rate": 4.349770053018502e-06,
+ "loss": 0.8648,
+ "step": 3640
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9888059882518984,
+ "learning_rate": 4.344629214363845e-06,
+ "loss": 0.9252,
+ "step": 3641
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 1.062500981065704,
+ "learning_rate": 4.339490572190031e-06,
+ "loss": 0.8811,
+ "step": 3642
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8271425347190752,
+ "learning_rate": 4.334354128492851e-06,
+ "loss": 0.8298,
+ "step": 3643
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 1.1525897331279629,
+ "learning_rate": 4.329219885267244e-06,
+ "loss": 0.8341,
+ "step": 3644
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8754748020528021,
+ "learning_rate": 4.324087844507289e-06,
+ "loss": 0.811,
+ "step": 3645
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8956038071027671,
+ "learning_rate": 4.318958008206214e-06,
+ "loss": 0.9099,
+ "step": 3646
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9105125073309388,
+ "learning_rate": 4.313830378356384e-06,
+ "loss": 0.9078,
+ "step": 3647
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9846714006143011,
+ "learning_rate": 4.3087049569493136e-06,
+ "loss": 0.9495,
+ "step": 3648
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8898304315799884,
+ "learning_rate": 4.303581745975656e-06,
+ "loss": 0.8255,
+ "step": 3649
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9846922272472171,
+ "learning_rate": 4.2984607474252084e-06,
+ "loss": 0.8476,
+ "step": 3650
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8737467243862848,
+ "learning_rate": 4.293341963286912e-06,
+ "loss": 0.8575,
+ "step": 3651
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8551569355687991,
+ "learning_rate": 4.288225395548835e-06,
+ "loss": 0.9171,
+ "step": 3652
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 1.0060747251716518,
+ "learning_rate": 4.283111046198198e-06,
+ "loss": 0.8679,
+ "step": 3653
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8621218642512847,
+ "learning_rate": 4.277998917221354e-06,
+ "loss": 0.9173,
+ "step": 3654
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9715160176918434,
+ "learning_rate": 4.272889010603798e-06,
+ "loss": 0.8337,
+ "step": 3655
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8765010419824666,
+ "learning_rate": 4.267781328330155e-06,
+ "loss": 0.8877,
+ "step": 3656
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.7833574855213274,
+ "learning_rate": 4.262675872384197e-06,
+ "loss": 0.8347,
+ "step": 3657
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8611646854513554,
+ "learning_rate": 4.257572644748813e-06,
+ "loss": 0.8863,
+ "step": 3658
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.8946638088467714,
+ "learning_rate": 4.252471647406045e-06,
+ "loss": 0.8666,
+ "step": 3659
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9009900500063068,
+ "learning_rate": 4.2473728823370605e-06,
+ "loss": 0.867,
+ "step": 3660
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9518306446370538,
+ "learning_rate": 4.242276351522161e-06,
+ "loss": 0.8915,
+ "step": 3661
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.9812559709055975,
+ "learning_rate": 4.237182056940784e-06,
+ "loss": 0.9443,
+ "step": 3662
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.850129243043673,
+ "learning_rate": 4.232090000571488e-06,
+ "loss": 0.9138,
+ "step": 3663
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.7929788463165887,
+ "learning_rate": 4.2270001843919714e-06,
+ "loss": 0.8653,
+ "step": 3664
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8811424669031181,
+ "learning_rate": 4.221912610379065e-06,
+ "loss": 0.8096,
+ "step": 3665
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8896115887659922,
+ "learning_rate": 4.216827280508712e-06,
+ "loss": 0.8645,
+ "step": 3666
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9238072936348655,
+ "learning_rate": 4.211744196756011e-06,
+ "loss": 0.9175,
+ "step": 3667
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8575211788698613,
+ "learning_rate": 4.206663361095164e-06,
+ "loss": 0.9168,
+ "step": 3668
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9961048630487186,
+ "learning_rate": 4.201584775499509e-06,
+ "loss": 0.894,
+ "step": 3669
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9364828786132331,
+ "learning_rate": 4.196508441941516e-06,
+ "loss": 0.9498,
+ "step": 3670
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8557525875880314,
+ "learning_rate": 4.191434362392768e-06,
+ "loss": 0.8834,
+ "step": 3671
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.7465234278774255,
+ "learning_rate": 4.186362538823981e-06,
+ "loss": 0.7581,
+ "step": 3672
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 1.0441284821899917,
+ "learning_rate": 4.181292973204992e-06,
+ "loss": 0.8156,
+ "step": 3673
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8541623284036627,
+ "learning_rate": 4.1762256675047655e-06,
+ "loss": 0.8623,
+ "step": 3674
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9314759176837049,
+ "learning_rate": 4.171160623691384e-06,
+ "loss": 0.8624,
+ "step": 3675
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.7738345748275365,
+ "learning_rate": 4.166097843732048e-06,
+ "loss": 0.8071,
+ "step": 3676
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 1.0502808710711233,
+ "learning_rate": 4.161037329593085e-06,
+ "loss": 0.9185,
+ "step": 3677
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9624173301340887,
+ "learning_rate": 4.155979083239942e-06,
+ "loss": 0.9636,
+ "step": 3678
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 1.0671638564561505,
+ "learning_rate": 4.1509231066371815e-06,
+ "loss": 0.8894,
+ "step": 3679
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8506539202106217,
+ "learning_rate": 4.1458694017484915e-06,
+ "loss": 0.8199,
+ "step": 3680
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8824345879063262,
+ "learning_rate": 4.140817970536664e-06,
+ "loss": 0.8406,
+ "step": 3681
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8670283831534198,
+ "learning_rate": 4.135768814963622e-06,
+ "loss": 0.8598,
+ "step": 3682
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9740382190337725,
+ "learning_rate": 4.130721936990399e-06,
+ "loss": 0.8864,
+ "step": 3683
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 1.038045285775576,
+ "learning_rate": 4.1256773385771444e-06,
+ "loss": 0.8607,
+ "step": 3684
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8796107308648102,
+ "learning_rate": 4.120635021683122e-06,
+ "loss": 0.8795,
+ "step": 3685
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8135501191770208,
+ "learning_rate": 4.115594988266711e-06,
+ "loss": 0.8811,
+ "step": 3686
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9422800006768904,
+ "learning_rate": 4.1105572402853976e-06,
+ "loss": 0.9114,
+ "step": 3687
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 1.0563904231148622,
+ "learning_rate": 4.1055217796957895e-06,
+ "loss": 0.9047,
+ "step": 3688
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9743667412318251,
+ "learning_rate": 4.100488608453599e-06,
+ "loss": 0.8691,
+ "step": 3689
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.912896943805015,
+ "learning_rate": 4.095457728513652e-06,
+ "loss": 0.8444,
+ "step": 3690
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9309317896956503,
+ "learning_rate": 4.09042914182989e-06,
+ "loss": 0.8514,
+ "step": 3691
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8899955444001727,
+ "learning_rate": 4.08540285035535e-06,
+ "loss": 0.8736,
+ "step": 3692
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 1.1507668624737333,
+ "learning_rate": 4.0803788560421885e-06,
+ "loss": 0.8964,
+ "step": 3693
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9678083861215927,
+ "learning_rate": 4.075357160841671e-06,
+ "loss": 0.9272,
+ "step": 3694
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9188195767360579,
+ "learning_rate": 4.070337766704155e-06,
+ "loss": 0.8804,
+ "step": 3695
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.869403803685755,
+ "learning_rate": 4.065320675579132e-06,
+ "loss": 0.8146,
+ "step": 3696
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9021898867146443,
+ "learning_rate": 4.0603058894151685e-06,
+ "loss": 0.8502,
+ "step": 3697
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 1.0581369795813984,
+ "learning_rate": 4.055293410159954e-06,
+ "loss": 0.8897,
+ "step": 3698
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.970168825713092,
+ "learning_rate": 4.050283239760282e-06,
+ "loss": 0.8907,
+ "step": 3699
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.868714997732219,
+ "learning_rate": 4.045275380162038e-06,
+ "loss": 0.8378,
+ "step": 3700
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 1.105015776049482,
+ "learning_rate": 4.04026983331022e-06,
+ "loss": 0.9041,
+ "step": 3701
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9166576639363467,
+ "learning_rate": 4.035266601148924e-06,
+ "loss": 0.8837,
+ "step": 3702
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 1.0362019327067018,
+ "learning_rate": 4.03026568562135e-06,
+ "loss": 0.947,
+ "step": 3703
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9224286147142222,
+ "learning_rate": 4.025267088669797e-06,
+ "loss": 0.8797,
+ "step": 3704
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9790592463351313,
+ "learning_rate": 4.020270812235656e-06,
+ "loss": 0.8821,
+ "step": 3705
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8967074029017275,
+ "learning_rate": 4.015276858259427e-06,
+ "loss": 0.8708,
+ "step": 3706
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8484150230651468,
+ "learning_rate": 4.010285228680705e-06,
+ "loss": 0.8294,
+ "step": 3707
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8906581283251357,
+ "learning_rate": 4.005295925438181e-06,
+ "loss": 0.8891,
+ "step": 3708
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8936506974123093,
+ "learning_rate": 4.000308950469646e-06,
+ "loss": 0.913,
+ "step": 3709
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.976798839104124,
+ "learning_rate": 3.995324305711976e-06,
+ "loss": 0.8496,
+ "step": 3710
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8313037620095529,
+ "learning_rate": 3.990341993101154e-06,
+ "loss": 0.8452,
+ "step": 3711
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8763795340947595,
+ "learning_rate": 3.985362014572256e-06,
+ "loss": 0.9,
+ "step": 3712
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.8855051717952711,
+ "learning_rate": 3.9803843720594385e-06,
+ "loss": 0.9288,
+ "step": 3713
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.911714889970517,
+ "learning_rate": 3.97540906749597e-06,
+ "loss": 0.8818,
+ "step": 3714
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.9642610130649074,
+ "learning_rate": 3.970436102814203e-06,
+ "loss": 0.927,
+ "step": 3715
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 1.088904292924162,
+ "learning_rate": 3.965465479945569e-06,
+ "loss": 0.8459,
+ "step": 3716
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9484766597574151,
+ "learning_rate": 3.9604972008206085e-06,
+ "loss": 0.9588,
+ "step": 3717
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 1.319476922702796,
+ "learning_rate": 3.955531267368942e-06,
+ "loss": 0.8339,
+ "step": 3718
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9094710397946618,
+ "learning_rate": 3.950567681519279e-06,
+ "loss": 0.8279,
+ "step": 3719
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 1.0670769304223973,
+ "learning_rate": 3.945606445199427e-06,
+ "loss": 0.962,
+ "step": 3720
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.902022579528049,
+ "learning_rate": 3.940647560336262e-06,
+ "loss": 0.8559,
+ "step": 3721
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 1.0879261141181404,
+ "learning_rate": 3.935691028855763e-06,
+ "loss": 0.8914,
+ "step": 3722
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9768311946293221,
+ "learning_rate": 3.930736852682993e-06,
+ "loss": 0.8868,
+ "step": 3723
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.995014398280607,
+ "learning_rate": 3.9257850337420856e-06,
+ "loss": 0.8997,
+ "step": 3724
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 1.086378818739939,
+ "learning_rate": 3.920835573956285e-06,
+ "loss": 1.0138,
+ "step": 3725
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9072120853209293,
+ "learning_rate": 3.915888475247894e-06,
+ "loss": 0.838,
+ "step": 3726
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.8944818816763047,
+ "learning_rate": 3.910943739538313e-06,
+ "loss": 0.843,
+ "step": 3727
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.887414896353494,
+ "learning_rate": 3.906001368748023e-06,
+ "loss": 0.8405,
+ "step": 3728
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9071563125303521,
+ "learning_rate": 3.901061364796574e-06,
+ "loss": 0.8688,
+ "step": 3729
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9468193201052529,
+ "learning_rate": 3.8961237296026155e-06,
+ "loss": 0.8674,
+ "step": 3730
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9489145365417119,
+ "learning_rate": 3.891188465083865e-06,
+ "loss": 0.89,
+ "step": 3731
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.8841238683831928,
+ "learning_rate": 3.886255573157121e-06,
+ "loss": 0.8566,
+ "step": 3732
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.849586270418284,
+ "learning_rate": 3.88132505573827e-06,
+ "loss": 0.9073,
+ "step": 3733
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.8556714443775648,
+ "learning_rate": 3.876396914742258e-06,
+ "loss": 0.9178,
+ "step": 3734
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 1.0071523406950287,
+ "learning_rate": 3.871471152083121e-06,
+ "loss": 0.871,
+ "step": 3735
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.8090934902247617,
+ "learning_rate": 3.866547769673968e-06,
+ "loss": 0.8786,
+ "step": 3736
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.7782592851221304,
+ "learning_rate": 3.861626769426988e-06,
+ "loss": 0.8415,
+ "step": 3737
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9180548623713223,
+ "learning_rate": 3.8567081532534374e-06,
+ "loss": 0.8944,
+ "step": 3738
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9356932893834743,
+ "learning_rate": 3.851791923063655e-06,
+ "loss": 0.8429,
+ "step": 3739
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9364320056026952,
+ "learning_rate": 3.846878080767039e-06,
+ "loss": 0.861,
+ "step": 3740
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9460475861296517,
+ "learning_rate": 3.841966628272079e-06,
+ "loss": 0.9219,
+ "step": 3741
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9059453261456984,
+ "learning_rate": 3.837057567486314e-06,
+ "loss": 0.8831,
+ "step": 3742
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 1.0830084578853572,
+ "learning_rate": 3.832150900316377e-06,
+ "loss": 0.9647,
+ "step": 3743
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9417066203729135,
+ "learning_rate": 3.827246628667962e-06,
+ "loss": 0.9004,
+ "step": 3744
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.8675209367711789,
+ "learning_rate": 3.822344754445826e-06,
+ "loss": 0.8551,
+ "step": 3745
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.8383371044739671,
+ "learning_rate": 3.817445279553801e-06,
+ "loss": 0.8697,
+ "step": 3746
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.896975626808194,
+ "learning_rate": 3.8125482058947905e-06,
+ "loss": 0.8411,
+ "step": 3747
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9631984873338313,
+ "learning_rate": 3.8076535353707523e-06,
+ "loss": 0.8276,
+ "step": 3748
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9013302310877005,
+ "learning_rate": 3.8027612698827344e-06,
+ "loss": 0.9086,
+ "step": 3749
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.8654966212586118,
+ "learning_rate": 3.7978714113308246e-06,
+ "loss": 0.8791,
+ "step": 3750
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9333767603847324,
+ "learning_rate": 3.7929839616141917e-06,
+ "loss": 0.8529,
+ "step": 3751
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9179758734883217,
+ "learning_rate": 3.788098922631067e-06,
+ "loss": 0.9202,
+ "step": 3752
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9229409872103111,
+ "learning_rate": 3.7832162962787355e-06,
+ "loss": 0.8882,
+ "step": 3753
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 1.0139239200914962,
+ "learning_rate": 3.7783360844535653e-06,
+ "loss": 0.8768,
+ "step": 3754
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.8305625168363362,
+ "learning_rate": 3.773458289050963e-06,
+ "loss": 0.785,
+ "step": 3755
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9940432238969626,
+ "learning_rate": 3.768582911965414e-06,
+ "loss": 0.898,
+ "step": 3756
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.8831650295924641,
+ "learning_rate": 3.763709955090461e-06,
+ "loss": 0.8713,
+ "step": 3757
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 1.0273323162697248,
+ "learning_rate": 3.7588394203186963e-06,
+ "loss": 0.93,
+ "step": 3758
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.8402393066588723,
+ "learning_rate": 3.753971309541784e-06,
+ "loss": 0.9176,
+ "step": 3759
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9020695278691016,
+ "learning_rate": 3.7491056246504433e-06,
+ "loss": 0.9211,
+ "step": 3760
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9009607918800239,
+ "learning_rate": 3.7442423675344474e-06,
+ "loss": 0.9042,
+ "step": 3761
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.8580907122523047,
+ "learning_rate": 3.739381540082635e-06,
+ "loss": 0.8688,
+ "step": 3762
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 1.0207892189622805,
+ "learning_rate": 3.7345231441828876e-06,
+ "loss": 0.9277,
+ "step": 3763
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9646886351451739,
+ "learning_rate": 3.729667181722154e-06,
+ "loss": 0.895,
+ "step": 3764
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.844103034268996,
+ "learning_rate": 3.7248136545864345e-06,
+ "loss": 0.8389,
+ "step": 3765
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.8795968771573153,
+ "learning_rate": 3.719962564660783e-06,
+ "loss": 0.9113,
+ "step": 3766
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 1.0311629852878772,
+ "learning_rate": 3.7151139138293056e-06,
+ "loss": 0.9685,
+ "step": 3767
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.9008176806044091,
+ "learning_rate": 3.7102677039751667e-06,
+ "loss": 0.8292,
+ "step": 3768
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9876712228116549,
+ "learning_rate": 3.705423936980572e-06,
+ "loss": 0.8982,
+ "step": 3769
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8662282545257485,
+ "learning_rate": 3.700582614726791e-06,
+ "loss": 0.9215,
+ "step": 3770
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.877429447426921,
+ "learning_rate": 3.6957437390941274e-06,
+ "loss": 0.871,
+ "step": 3771
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9509863016409777,
+ "learning_rate": 3.6909073119619555e-06,
+ "loss": 0.8889,
+ "step": 3772
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9126104542246645,
+ "learning_rate": 3.6860733352086866e-06,
+ "loss": 0.8137,
+ "step": 3773
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9830157855203383,
+ "learning_rate": 3.6812418107117765e-06,
+ "loss": 0.8587,
+ "step": 3774
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8835491755842453,
+ "learning_rate": 3.6764127403477347e-06,
+ "loss": 0.8573,
+ "step": 3775
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8986686975313434,
+ "learning_rate": 3.6715861259921226e-06,
+ "loss": 0.8854,
+ "step": 3776
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.925232639208042,
+ "learning_rate": 3.6667619695195287e-06,
+ "loss": 0.887,
+ "step": 3777
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8889916711790783,
+ "learning_rate": 3.6619402728036157e-06,
+ "loss": 0.8327,
+ "step": 3778
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8844815945929162,
+ "learning_rate": 3.657121037717064e-06,
+ "loss": 0.9186,
+ "step": 3779
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9206190350339126,
+ "learning_rate": 3.652304266131612e-06,
+ "loss": 0.8743,
+ "step": 3780
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9228023702640018,
+ "learning_rate": 3.6474899599180426e-06,
+ "loss": 0.8922,
+ "step": 3781
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 1.0192228075406065,
+ "learning_rate": 3.642678120946168e-06,
+ "loss": 0.9031,
+ "step": 3782
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9149993805947546,
+ "learning_rate": 3.6378687510848576e-06,
+ "loss": 0.8891,
+ "step": 3783
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8161914582243244,
+ "learning_rate": 3.6330618522020124e-06,
+ "loss": 0.9129,
+ "step": 3784
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8442882576276589,
+ "learning_rate": 3.6282574261645776e-06,
+ "loss": 0.8699,
+ "step": 3785
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.937560353472258,
+ "learning_rate": 3.62345547483854e-06,
+ "loss": 0.8278,
+ "step": 3786
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9279691052881753,
+ "learning_rate": 3.618656000088916e-06,
+ "loss": 0.9297,
+ "step": 3787
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8434983729293868,
+ "learning_rate": 3.6138590037797695e-06,
+ "loss": 0.828,
+ "step": 3788
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8572199091834554,
+ "learning_rate": 3.6090644877741986e-06,
+ "loss": 0.8668,
+ "step": 3789
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8270755574423517,
+ "learning_rate": 3.6042724539343378e-06,
+ "loss": 0.8988,
+ "step": 3790
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.924326821040318,
+ "learning_rate": 3.599482904121361e-06,
+ "loss": 0.8947,
+ "step": 3791
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9984656300391526,
+ "learning_rate": 3.594695840195468e-06,
+ "loss": 0.8627,
+ "step": 3792
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9244615897474393,
+ "learning_rate": 3.5899112640159017e-06,
+ "loss": 0.9139,
+ "step": 3793
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8220097609234314,
+ "learning_rate": 3.585129177440938e-06,
+ "loss": 0.9084,
+ "step": 3794
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9956606576720209,
+ "learning_rate": 3.580349582327882e-06,
+ "loss": 0.9089,
+ "step": 3795
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9076291400880901,
+ "learning_rate": 3.575572480533076e-06,
+ "loss": 0.8554,
+ "step": 3796
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8625138392020574,
+ "learning_rate": 3.570797873911892e-06,
+ "loss": 0.8523,
+ "step": 3797
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 1.053483439962704,
+ "learning_rate": 3.566025764318728e-06,
+ "loss": 0.8772,
+ "step": 3798
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9282207856467962,
+ "learning_rate": 3.5612561536070213e-06,
+ "loss": 0.881,
+ "step": 3799
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9230812349575398,
+ "learning_rate": 3.5564890436292243e-06,
+ "loss": 0.9451,
+ "step": 3800
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9694542363185262,
+ "learning_rate": 3.5517244362368363e-06,
+ "loss": 0.9329,
+ "step": 3801
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9840594327042361,
+ "learning_rate": 3.5469623332803795e-06,
+ "loss": 0.9131,
+ "step": 3802
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8896833948392223,
+ "learning_rate": 3.5422027366093893e-06,
+ "loss": 0.9036,
+ "step": 3803
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9454777156720028,
+ "learning_rate": 3.5374456480724427e-06,
+ "loss": 0.9143,
+ "step": 3804
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8921333290905952,
+ "learning_rate": 3.532691069517142e-06,
+ "loss": 0.857,
+ "step": 3805
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.7135657626973516,
+ "learning_rate": 3.5279390027901004e-06,
+ "loss": 0.83,
+ "step": 3806
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 1.1027756347210746,
+ "learning_rate": 3.5231894497369802e-06,
+ "loss": 0.871,
+ "step": 3807
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8528195294796473,
+ "learning_rate": 3.5184424122024406e-06,
+ "loss": 0.8525,
+ "step": 3808
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9336178795936354,
+ "learning_rate": 3.5136978920301822e-06,
+ "loss": 0.8834,
+ "step": 3809
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8806797277091982,
+ "learning_rate": 3.508955891062924e-06,
+ "loss": 0.8245,
+ "step": 3810
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8683615528003018,
+ "learning_rate": 3.5042164111423983e-06,
+ "loss": 0.8264,
+ "step": 3811
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9440372185903397,
+ "learning_rate": 3.4994794541093667e-06,
+ "loss": 0.9524,
+ "step": 3812
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 1.1584916221881574,
+ "learning_rate": 3.4947450218036106e-06,
+ "loss": 0.8967,
+ "step": 3813
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8858977602295007,
+ "learning_rate": 3.4900131160639283e-06,
+ "loss": 0.8261,
+ "step": 3814
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9012923918685176,
+ "learning_rate": 3.485283738728139e-06,
+ "loss": 0.8689,
+ "step": 3815
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9050659875092949,
+ "learning_rate": 3.4805568916330747e-06,
+ "loss": 0.8779,
+ "step": 3816
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.9245864252851095,
+ "learning_rate": 3.4758325766145896e-06,
+ "loss": 0.9252,
+ "step": 3817
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8432005259251041,
+ "learning_rate": 3.471110795507554e-06,
+ "loss": 0.877,
+ "step": 3818
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.8981035108024167,
+ "learning_rate": 3.4663915501458523e-06,
+ "loss": 0.8993,
+ "step": 3819
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 1.0600438461788986,
+ "learning_rate": 3.4616748423623893e-06,
+ "loss": 0.8758,
+ "step": 3820
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.0540241762688183,
+ "learning_rate": 3.4569606739890737e-06,
+ "loss": 0.9104,
+ "step": 3821
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9115841602144225,
+ "learning_rate": 3.452249046856836e-06,
+ "loss": 0.9174,
+ "step": 3822
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.0016106992660168,
+ "learning_rate": 3.4475399627956197e-06,
+ "loss": 0.882,
+ "step": 3823
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.1025168971628794,
+ "learning_rate": 3.4428334236343774e-06,
+ "loss": 0.8971,
+ "step": 3824
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.0077975820945424,
+ "learning_rate": 3.438129431201075e-06,
+ "loss": 0.9041,
+ "step": 3825
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9622346034155563,
+ "learning_rate": 3.433427987322693e-06,
+ "loss": 0.8852,
+ "step": 3826
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9901312244363298,
+ "learning_rate": 3.4287290938252103e-06,
+ "loss": 0.9628,
+ "step": 3827
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.8865845180571811,
+ "learning_rate": 3.424032752533627e-06,
+ "loss": 0.8573,
+ "step": 3828
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9971330704212689,
+ "learning_rate": 3.4193389652719478e-06,
+ "loss": 0.8595,
+ "step": 3829
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.0212239287622955,
+ "learning_rate": 3.4146477338631856e-06,
+ "loss": 0.8652,
+ "step": 3830
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.0018839210365473,
+ "learning_rate": 3.4099590601293632e-06,
+ "loss": 0.8818,
+ "step": 3831
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9634015411273907,
+ "learning_rate": 3.4052729458915024e-06,
+ "loss": 0.9257,
+ "step": 3832
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9051038008041504,
+ "learning_rate": 3.4005893929696377e-06,
+ "loss": 0.864,
+ "step": 3833
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9773506088648896,
+ "learning_rate": 3.3959084031828114e-06,
+ "loss": 0.8858,
+ "step": 3834
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.0271759026403005,
+ "learning_rate": 3.3912299783490567e-06,
+ "loss": 0.9403,
+ "step": 3835
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9675445160786589,
+ "learning_rate": 3.3865541202854314e-06,
+ "loss": 0.8929,
+ "step": 3836
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9584455722139147,
+ "learning_rate": 3.3818808308079753e-06,
+ "loss": 0.8911,
+ "step": 3837
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9138810258332464,
+ "learning_rate": 3.3772101117317437e-06,
+ "loss": 0.9271,
+ "step": 3838
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.021130646460032,
+ "learning_rate": 3.372541964870795e-06,
+ "loss": 0.9575,
+ "step": 3839
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.8581462216709524,
+ "learning_rate": 3.367876392038174e-06,
+ "loss": 0.9059,
+ "step": 3840
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.2142157453153244,
+ "learning_rate": 3.363213395045941e-06,
+ "loss": 0.9043,
+ "step": 3841
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.904892437735682,
+ "learning_rate": 3.3585529757051504e-06,
+ "loss": 0.8587,
+ "step": 3842
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.6982794348245435,
+ "learning_rate": 3.353895135825854e-06,
+ "loss": 0.8143,
+ "step": 3843
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.901569538506316,
+ "learning_rate": 3.3492398772171074e-06,
+ "loss": 0.9083,
+ "step": 3844
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.0673496105059448,
+ "learning_rate": 3.344587201686952e-06,
+ "loss": 0.9181,
+ "step": 3845
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.8687097006222544,
+ "learning_rate": 3.3399371110424372e-06,
+ "loss": 0.9455,
+ "step": 3846
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9077193266238223,
+ "learning_rate": 3.3352896070896057e-06,
+ "loss": 0.9256,
+ "step": 3847
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.8722578281757957,
+ "learning_rate": 3.330644691633492e-06,
+ "loss": 0.9152,
+ "step": 3848
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.8528602763484596,
+ "learning_rate": 3.3260023664781326e-06,
+ "loss": 0.9078,
+ "step": 3849
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.8345205411570323,
+ "learning_rate": 3.321362633426547e-06,
+ "loss": 0.8108,
+ "step": 3850
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9559890862890268,
+ "learning_rate": 3.316725494280757e-06,
+ "loss": 0.9015,
+ "step": 3851
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9670124868351486,
+ "learning_rate": 3.3120909508417754e-06,
+ "loss": 0.8538,
+ "step": 3852
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.0259181840795324,
+ "learning_rate": 3.307459004909599e-06,
+ "loss": 0.9078,
+ "step": 3853
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.8985185254919762,
+ "learning_rate": 3.3028296582832285e-06,
+ "loss": 0.912,
+ "step": 3854
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.8807026809368774,
+ "learning_rate": 3.2982029127606517e-06,
+ "loss": 0.8238,
+ "step": 3855
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9674412742423509,
+ "learning_rate": 3.2935787701388346e-06,
+ "loss": 0.8398,
+ "step": 3856
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.8801883586144479,
+ "learning_rate": 3.2889572322137454e-06,
+ "loss": 0.8291,
+ "step": 3857
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.8129415192200613,
+ "learning_rate": 3.2843383007803364e-06,
+ "loss": 0.8318,
+ "step": 3858
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.0665443183875298,
+ "learning_rate": 3.279721977632546e-06,
+ "loss": 0.8963,
+ "step": 3859
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.0169379053982934,
+ "learning_rate": 3.275108264563306e-06,
+ "loss": 0.8996,
+ "step": 3860
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9653918063759743,
+ "learning_rate": 3.270497163364521e-06,
+ "loss": 0.9263,
+ "step": 3861
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.0804833222073364,
+ "learning_rate": 3.2658886758270947e-06,
+ "loss": 1.01,
+ "step": 3862
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.7825848052851337,
+ "learning_rate": 3.2612828037409116e-06,
+ "loss": 0.8095,
+ "step": 3863
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.8740520318111609,
+ "learning_rate": 3.256679548894831e-06,
+ "loss": 0.8568,
+ "step": 3864
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9248697002242029,
+ "learning_rate": 3.252078913076718e-06,
+ "loss": 0.8778,
+ "step": 3865
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.9166993605718344,
+ "learning_rate": 3.247480898073395e-06,
+ "loss": 0.9255,
+ "step": 3866
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.942994888452231,
+ "learning_rate": 3.242885505670681e-06,
+ "loss": 0.8851,
+ "step": 3867
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.8579958740991174,
+ "learning_rate": 3.238292737653379e-06,
+ "loss": 0.7956,
+ "step": 3868
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.0671331629788738,
+ "learning_rate": 3.233702595805258e-06,
+ "loss": 0.8741,
+ "step": 3869
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.0403988775450346,
+ "learning_rate": 3.229115081909082e-06,
+ "loss": 0.9403,
+ "step": 3870
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.8396367235125283,
+ "learning_rate": 3.224530197746587e-06,
+ "loss": 0.8832,
+ "step": 3871
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 1.941308923849184,
+ "learning_rate": 3.2199479450984892e-06,
+ "loss": 0.9302,
+ "step": 3872
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.7109457997345888,
+ "learning_rate": 3.2153683257444856e-06,
+ "loss": 0.8001,
+ "step": 3873
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9168204463099244,
+ "learning_rate": 3.210791341463243e-06,
+ "loss": 0.8222,
+ "step": 3874
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 1.1756587432014478,
+ "learning_rate": 3.206216994032411e-06,
+ "loss": 0.8802,
+ "step": 3875
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9711068750938427,
+ "learning_rate": 3.2016452852286127e-06,
+ "loss": 0.9294,
+ "step": 3876
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8328199420707374,
+ "learning_rate": 3.1970762168274495e-06,
+ "loss": 0.8489,
+ "step": 3877
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.970375815132571,
+ "learning_rate": 3.1925097906034962e-06,
+ "loss": 0.8944,
+ "step": 3878
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8361310216799411,
+ "learning_rate": 3.187946008330295e-06,
+ "loss": 0.8675,
+ "step": 3879
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8372116888064216,
+ "learning_rate": 3.1833848717803674e-06,
+ "loss": 0.8857,
+ "step": 3880
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9518388836739485,
+ "learning_rate": 3.178826382725212e-06,
+ "loss": 0.8927,
+ "step": 3881
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.7763825392554807,
+ "learning_rate": 3.1742705429352827e-06,
+ "loss": 0.8024,
+ "step": 3882
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9627172510352615,
+ "learning_rate": 3.1697173541800254e-06,
+ "loss": 0.9271,
+ "step": 3883
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 1.0784742072275648,
+ "learning_rate": 3.165166818227845e-06,
+ "loss": 0.9468,
+ "step": 3884
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9122477128885562,
+ "learning_rate": 3.1606189368461117e-06,
+ "loss": 0.8504,
+ "step": 3885
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 1.0019713180779037,
+ "learning_rate": 3.156073711801172e-06,
+ "loss": 0.9236,
+ "step": 3886
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 1.0756036994552267,
+ "learning_rate": 3.151531144858344e-06,
+ "loss": 0.9483,
+ "step": 3887
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9158802438241802,
+ "learning_rate": 3.146991237781899e-06,
+ "loss": 0.843,
+ "step": 3888
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 1.144773394801962,
+ "learning_rate": 3.142453992335096e-06,
+ "loss": 0.8776,
+ "step": 3889
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9731819597349555,
+ "learning_rate": 3.137919410280139e-06,
+ "loss": 0.8764,
+ "step": 3890
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8748329635929616,
+ "learning_rate": 3.1333874933782114e-06,
+ "loss": 0.902,
+ "step": 3891
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9314269659571606,
+ "learning_rate": 3.128858243389461e-06,
+ "loss": 0.909,
+ "step": 3892
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8906883757821964,
+ "learning_rate": 3.124331662072987e-06,
+ "loss": 0.9506,
+ "step": 3893
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8106075732357755,
+ "learning_rate": 3.119807751186872e-06,
+ "loss": 0.8466,
+ "step": 3894
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.7576273502711641,
+ "learning_rate": 3.1152865124881436e-06,
+ "loss": 0.8018,
+ "step": 3895
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8414156539000981,
+ "learning_rate": 3.110767947732801e-06,
+ "loss": 0.8405,
+ "step": 3896
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9285031095149952,
+ "learning_rate": 3.106252058675806e-06,
+ "loss": 0.9048,
+ "step": 3897
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 1.052669313469202,
+ "learning_rate": 3.101738847071072e-06,
+ "loss": 0.8893,
+ "step": 3898
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.854607871535346,
+ "learning_rate": 3.097228314671481e-06,
+ "loss": 0.9223,
+ "step": 3899
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9140545124332589,
+ "learning_rate": 3.092720463228872e-06,
+ "loss": 0.9431,
+ "step": 3900
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9498296331245554,
+ "learning_rate": 3.0882152944940423e-06,
+ "loss": 0.8876,
+ "step": 3901
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8881688900569868,
+ "learning_rate": 3.0837128102167514e-06,
+ "loss": 0.9169,
+ "step": 3902
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9882043928296107,
+ "learning_rate": 3.079213012145705e-06,
+ "loss": 0.8815,
+ "step": 3903
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8758226010035889,
+ "learning_rate": 3.0747159020285766e-06,
+ "loss": 0.8993,
+ "step": 3904
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8950531987539941,
+ "learning_rate": 3.0702214816119925e-06,
+ "loss": 0.8729,
+ "step": 3905
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9594547393814911,
+ "learning_rate": 3.065729752641532e-06,
+ "loss": 0.8942,
+ "step": 3906
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 1.0051751598929568,
+ "learning_rate": 3.0612407168617352e-06,
+ "loss": 0.9542,
+ "step": 3907
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8311213445322736,
+ "learning_rate": 3.0567543760160866e-06,
+ "loss": 0.8194,
+ "step": 3908
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 1.0236288594295317,
+ "learning_rate": 3.05227073184703e-06,
+ "loss": 0.8839,
+ "step": 3909
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8903513302834326,
+ "learning_rate": 3.047789786095967e-06,
+ "loss": 0.8664,
+ "step": 3910
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 1.0508218390928494,
+ "learning_rate": 3.0433115405032334e-06,
+ "loss": 0.9473,
+ "step": 3911
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.7852377591241896,
+ "learning_rate": 3.0388359968081395e-06,
+ "loss": 0.8157,
+ "step": 3912
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 1.3317701777457944,
+ "learning_rate": 3.034363156748933e-06,
+ "loss": 0.9152,
+ "step": 3913
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.97264052334293,
+ "learning_rate": 3.0298930220628086e-06,
+ "loss": 0.8268,
+ "step": 3914
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8431080760763496,
+ "learning_rate": 3.025425594485919e-06,
+ "loss": 0.8334,
+ "step": 3915
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9659543078257832,
+ "learning_rate": 3.0209608757533626e-06,
+ "loss": 0.9043,
+ "step": 3916
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8277431354918725,
+ "learning_rate": 3.0164988675991768e-06,
+ "loss": 0.9059,
+ "step": 3917
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9681457535692266,
+ "learning_rate": 3.0120395717563655e-06,
+ "loss": 0.8347,
+ "step": 3918
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 1.0696919917523706,
+ "learning_rate": 3.00758298995686e-06,
+ "loss": 0.9063,
+ "step": 3919
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.8832360160073505,
+ "learning_rate": 3.0031291239315473e-06,
+ "loss": 0.8955,
+ "step": 3920
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9513983119172031,
+ "learning_rate": 2.9986779754102613e-06,
+ "loss": 0.8222,
+ "step": 3921
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 1.0298065915415877,
+ "learning_rate": 2.9942295461217698e-06,
+ "loss": 0.9406,
+ "step": 3922
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9344331046185815,
+ "learning_rate": 2.9897838377937947e-06,
+ "loss": 0.8591,
+ "step": 3923
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.9697692593743283,
+ "learning_rate": 2.985340852152999e-06,
+ "loss": 0.9087,
+ "step": 3924
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.8768939678506315,
+ "learning_rate": 2.9809005909249866e-06,
+ "loss": 0.9502,
+ "step": 3925
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9118163255293353,
+ "learning_rate": 2.9764630558343064e-06,
+ "loss": 0.9045,
+ "step": 3926
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.7673649652691692,
+ "learning_rate": 2.9720282486044407e-06,
+ "loss": 0.8274,
+ "step": 3927
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9151544803470711,
+ "learning_rate": 2.9675961709578194e-06,
+ "loss": 0.9434,
+ "step": 3928
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.921591784931005,
+ "learning_rate": 2.9631668246158105e-06,
+ "loss": 0.8549,
+ "step": 3929
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 1.0492749845033982,
+ "learning_rate": 2.958740211298722e-06,
+ "loss": 0.9197,
+ "step": 3930
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9882213471656914,
+ "learning_rate": 2.9543163327258016e-06,
+ "loss": 0.912,
+ "step": 3931
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.8140070065617956,
+ "learning_rate": 2.949895190615227e-06,
+ "loss": 0.8183,
+ "step": 3932
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9866899907692384,
+ "learning_rate": 2.9454767866841225e-06,
+ "loss": 0.8712,
+ "step": 3933
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9893039822513295,
+ "learning_rate": 2.941061122648545e-06,
+ "loss": 0.9179,
+ "step": 3934
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9258487979575096,
+ "learning_rate": 2.9366482002234874e-06,
+ "loss": 0.9134,
+ "step": 3935
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9759343029344542,
+ "learning_rate": 2.932238021122877e-06,
+ "loss": 0.8791,
+ "step": 3936
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9033871906886145,
+ "learning_rate": 2.9278305870595814e-06,
+ "loss": 0.8056,
+ "step": 3937
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 1.0131907727090592,
+ "learning_rate": 2.92342589974539e-06,
+ "loss": 0.9067,
+ "step": 3938
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.8632993518217443,
+ "learning_rate": 2.919023960891039e-06,
+ "loss": 0.8436,
+ "step": 3939
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9046033199438586,
+ "learning_rate": 2.9146247722061806e-06,
+ "loss": 0.9821,
+ "step": 3940
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.887536273847992,
+ "learning_rate": 2.910228335399419e-06,
+ "loss": 0.8661,
+ "step": 3941
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9516740042440787,
+ "learning_rate": 2.90583465217828e-06,
+ "loss": 0.843,
+ "step": 3942
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.7621048480641931,
+ "learning_rate": 2.9014437242492133e-06,
+ "loss": 0.8682,
+ "step": 3943
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9378964700898026,
+ "learning_rate": 2.897055553317607e-06,
+ "loss": 0.9173,
+ "step": 3944
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9375154849104759,
+ "learning_rate": 2.89267014108778e-06,
+ "loss": 0.9205,
+ "step": 3945
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9078507796143372,
+ "learning_rate": 2.8882874892629654e-06,
+ "loss": 0.8917,
+ "step": 3946
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.7924118884903499,
+ "learning_rate": 2.883907599545348e-06,
+ "loss": 0.8755,
+ "step": 3947
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9845009472008538,
+ "learning_rate": 2.8795304736360184e-06,
+ "loss": 0.9097,
+ "step": 3948
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9330035909593539,
+ "learning_rate": 2.8751561132350025e-06,
+ "loss": 0.8708,
+ "step": 3949
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.7318427202512602,
+ "learning_rate": 2.8707845200412567e-06,
+ "loss": 0.8349,
+ "step": 3950
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9082116900829754,
+ "learning_rate": 2.866415695752649e-06,
+ "loss": 0.9196,
+ "step": 3951
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9088991120971996,
+ "learning_rate": 2.862049642065986e-06,
+ "loss": 0.9053,
+ "step": 3952
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.8610842604456892,
+ "learning_rate": 2.857686360676991e-06,
+ "loss": 0.8836,
+ "step": 3953
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9041538069995724,
+ "learning_rate": 2.853325853280312e-06,
+ "loss": 0.8356,
+ "step": 3954
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.8539247020789565,
+ "learning_rate": 2.8489681215695242e-06,
+ "loss": 0.9016,
+ "step": 3955
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.909722474364938,
+ "learning_rate": 2.8446131672371136e-06,
+ "loss": 0.9056,
+ "step": 3956
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9037994274043482,
+ "learning_rate": 2.840260991974497e-06,
+ "loss": 0.8243,
+ "step": 3957
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.8707120164808277,
+ "learning_rate": 2.83591159747201e-06,
+ "loss": 0.8917,
+ "step": 3958
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9462037466305331,
+ "learning_rate": 2.8315649854189066e-06,
+ "loss": 0.9,
+ "step": 3959
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9059820462435377,
+ "learning_rate": 2.8272211575033635e-06,
+ "loss": 0.8936,
+ "step": 3960
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9792666135086842,
+ "learning_rate": 2.8228801154124687e-06,
+ "loss": 0.9378,
+ "step": 3961
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.8921530343106873,
+ "learning_rate": 2.8185418608322344e-06,
+ "loss": 0.8279,
+ "step": 3962
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9150192931369385,
+ "learning_rate": 2.814206395447593e-06,
+ "loss": 0.9013,
+ "step": 3963
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.8238345876329904,
+ "learning_rate": 2.80987372094238e-06,
+ "loss": 0.8487,
+ "step": 3964
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9002758978851323,
+ "learning_rate": 2.805543838999364e-06,
+ "loss": 0.8564,
+ "step": 3965
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.7934565646830434,
+ "learning_rate": 2.801216751300223e-06,
+ "loss": 0.8001,
+ "step": 3966
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9701515328523692,
+ "learning_rate": 2.796892459525541e-06,
+ "loss": 0.9406,
+ "step": 3967
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.999615143967618,
+ "learning_rate": 2.7925709653548295e-06,
+ "loss": 0.8838,
+ "step": 3968
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9995070391192742,
+ "learning_rate": 2.788252270466497e-06,
+ "loss": 0.9814,
+ "step": 3969
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9313000390350881,
+ "learning_rate": 2.783936376537886e-06,
+ "loss": 0.8649,
+ "step": 3970
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9073792564934218,
+ "learning_rate": 2.7796232852452378e-06,
+ "loss": 0.9364,
+ "step": 3971
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.9311758925066174,
+ "learning_rate": 2.775312998263703e-06,
+ "loss": 0.8874,
+ "step": 3972
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.8825963778289101,
+ "learning_rate": 2.771005517267349e-06,
+ "loss": 0.8729,
+ "step": 3973
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 1.0156201367078397,
+ "learning_rate": 2.7667008439291552e-06,
+ "loss": 0.9193,
+ "step": 3974
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.8974079974149981,
+ "learning_rate": 2.762398979920998e-06,
+ "loss": 0.9169,
+ "step": 3975
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 1.1233402822593637,
+ "learning_rate": 2.7580999269136854e-06,
+ "loss": 0.8839,
+ "step": 3976
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9969838223507892,
+ "learning_rate": 2.7538036865769093e-06,
+ "loss": 0.873,
+ "step": 3977
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.936130371584019,
+ "learning_rate": 2.749510260579282e-06,
+ "loss": 0.9124,
+ "step": 3978
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9999080935634322,
+ "learning_rate": 2.7452196505883265e-06,
+ "loss": 0.8821,
+ "step": 3979
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.7232369934331282,
+ "learning_rate": 2.7409318582704594e-06,
+ "loss": 0.7474,
+ "step": 3980
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.867562927995819,
+ "learning_rate": 2.736646885291011e-06,
+ "loss": 0.9341,
+ "step": 3981
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8499419441123263,
+ "learning_rate": 2.7323647333142176e-06,
+ "loss": 0.9247,
+ "step": 3982
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8675976700902036,
+ "learning_rate": 2.728085404003217e-06,
+ "loss": 0.8664,
+ "step": 3983
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9082003014018147,
+ "learning_rate": 2.7238088990200538e-06,
+ "loss": 0.8688,
+ "step": 3984
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 1.0136988017010031,
+ "learning_rate": 2.7195352200256675e-06,
+ "loss": 0.9222,
+ "step": 3985
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9517713920169355,
+ "learning_rate": 2.7152643686799095e-06,
+ "loss": 0.9413,
+ "step": 3986
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9765890427796976,
+ "learning_rate": 2.710996346641528e-06,
+ "loss": 0.982,
+ "step": 3987
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9450695049660316,
+ "learning_rate": 2.706731155568175e-06,
+ "loss": 0.8556,
+ "step": 3988
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8783435829796219,
+ "learning_rate": 2.7024687971164032e-06,
+ "loss": 0.8494,
+ "step": 3989
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9080928358669355,
+ "learning_rate": 2.698209272941659e-06,
+ "loss": 0.8968,
+ "step": 3990
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8795663074369697,
+ "learning_rate": 2.693952584698294e-06,
+ "loss": 0.9132,
+ "step": 3991
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8858328071594844,
+ "learning_rate": 2.689698734039561e-06,
+ "loss": 0.8763,
+ "step": 3992
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8634775510871611,
+ "learning_rate": 2.685447722617597e-06,
+ "loss": 0.8472,
+ "step": 3993
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8552191671796697,
+ "learning_rate": 2.6811995520834543e-06,
+ "loss": 0.9068,
+ "step": 3994
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9119674262779448,
+ "learning_rate": 2.676954224087075e-06,
+ "loss": 0.8004,
+ "step": 3995
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9129993004929062,
+ "learning_rate": 2.6727117402772886e-06,
+ "loss": 0.9193,
+ "step": 3996
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 1.0262267579702624,
+ "learning_rate": 2.668472102301829e-06,
+ "loss": 0.9168,
+ "step": 3997
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8402002882161677,
+ "learning_rate": 2.664235311807327e-06,
+ "loss": 0.8912,
+ "step": 3998
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9883382087246059,
+ "learning_rate": 2.6600013704392946e-06,
+ "loss": 0.8919,
+ "step": 3999
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 1.1079905687977718,
+ "learning_rate": 2.655770279842157e-06,
+ "loss": 0.8696,
+ "step": 4000
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.860415843538877,
+ "learning_rate": 2.651542041659211e-06,
+ "loss": 0.9333,
+ "step": 4001
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8435081142389877,
+ "learning_rate": 2.6473166575326603e-06,
+ "loss": 0.9256,
+ "step": 4002
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 1.0396659674376807,
+ "learning_rate": 2.6430941291035984e-06,
+ "loss": 0.9139,
+ "step": 4003
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8764834914707512,
+ "learning_rate": 2.6388744580119975e-06,
+ "loss": 0.872,
+ "step": 4004
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8885146098838413,
+ "learning_rate": 2.6346576458967397e-06,
+ "loss": 0.8757,
+ "step": 4005
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 1.0940675197953051,
+ "learning_rate": 2.630443694395579e-06,
+ "loss": 0.9263,
+ "step": 4006
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8552372241634835,
+ "learning_rate": 2.6262326051451683e-06,
+ "loss": 0.8334,
+ "step": 4007
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9322542428007772,
+ "learning_rate": 2.6220243797810483e-06,
+ "loss": 0.9023,
+ "step": 4008
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9451728835430094,
+ "learning_rate": 2.6178190199376394e-06,
+ "loss": 0.9039,
+ "step": 4009
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.942235268274314,
+ "learning_rate": 2.6136165272482596e-06,
+ "loss": 0.9178,
+ "step": 4010
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 1.0203195968712637,
+ "learning_rate": 2.6094169033451066e-06,
+ "loss": 0.8793,
+ "step": 4011
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8950489385865193,
+ "learning_rate": 2.605220149859267e-06,
+ "loss": 0.8705,
+ "step": 4012
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9667015002148893,
+ "learning_rate": 2.6010262684207134e-06,
+ "loss": 0.8429,
+ "step": 4013
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8242413686636327,
+ "learning_rate": 2.596835260658297e-06,
+ "loss": 0.8166,
+ "step": 4014
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8657516382124385,
+ "learning_rate": 2.5926471281997577e-06,
+ "loss": 0.8587,
+ "step": 4015
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9275974136193426,
+ "learning_rate": 2.588461872671719e-06,
+ "loss": 0.9189,
+ "step": 4016
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8849700860961025,
+ "learning_rate": 2.5842794956996863e-06,
+ "loss": 0.9164,
+ "step": 4017
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8904611707155811,
+ "learning_rate": 2.580099998908049e-06,
+ "loss": 0.9107,
+ "step": 4018
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9383535438474248,
+ "learning_rate": 2.575923383920069e-06,
+ "loss": 0.8524,
+ "step": 4019
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.824236618205373,
+ "learning_rate": 2.5717496523578998e-06,
+ "loss": 0.8418,
+ "step": 4020
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8960245871106074,
+ "learning_rate": 2.5675788058425723e-06,
+ "loss": 0.7927,
+ "step": 4021
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9997637668059359,
+ "learning_rate": 2.563410845993988e-06,
+ "loss": 0.9317,
+ "step": 4022
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 1.0945901385876757,
+ "learning_rate": 2.5592457744309405e-06,
+ "loss": 0.8989,
+ "step": 4023
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.7829907181592052,
+ "learning_rate": 2.5550835927710982e-06,
+ "loss": 0.8767,
+ "step": 4024
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.8033207897195992,
+ "learning_rate": 2.5509243026309983e-06,
+ "loss": 0.8061,
+ "step": 4025
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 1.0069900773031213,
+ "learning_rate": 2.546767905626063e-06,
+ "loss": 0.9182,
+ "step": 4026
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 1.1001463255802362,
+ "learning_rate": 2.5426144033705937e-06,
+ "loss": 0.9219,
+ "step": 4027
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.9630224280845321,
+ "learning_rate": 2.5384637974777513e-06,
+ "loss": 0.9327,
+ "step": 4028
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 1.0035980552605197,
+ "learning_rate": 2.5343160895595977e-06,
+ "loss": 0.7907,
+ "step": 4029
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 1.0742246335041878,
+ "learning_rate": 2.530171281227044e-06,
+ "loss": 0.9693,
+ "step": 4030
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 1.2155405982873126,
+ "learning_rate": 2.52602937408989e-06,
+ "loss": 0.8633,
+ "step": 4031
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 1.0103436831615604,
+ "learning_rate": 2.5218903697568075e-06,
+ "loss": 0.8354,
+ "step": 4032
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9607433216427947,
+ "learning_rate": 2.517754269835332e-06,
+ "loss": 0.8591,
+ "step": 4033
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.967769594887381,
+ "learning_rate": 2.5136210759318814e-06,
+ "loss": 0.8976,
+ "step": 4034
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9875104240098554,
+ "learning_rate": 2.5094907896517383e-06,
+ "loss": 0.8792,
+ "step": 4035
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 1.001837531331814,
+ "learning_rate": 2.505363412599059e-06,
+ "loss": 0.9074,
+ "step": 4036
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9563728281489,
+ "learning_rate": 2.5012389463768737e-06,
+ "loss": 0.882,
+ "step": 4037
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 1.052483921655931,
+ "learning_rate": 2.4971173925870694e-06,
+ "loss": 0.899,
+ "step": 4038
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.8939458997742014,
+ "learning_rate": 2.4929987528304144e-06,
+ "loss": 0.9054,
+ "step": 4039
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9388636875761888,
+ "learning_rate": 2.4888830287065414e-06,
+ "loss": 0.873,
+ "step": 4040
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9735791321651764,
+ "learning_rate": 2.4847702218139493e-06,
+ "loss": 0.8823,
+ "step": 4041
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 1.0298245970949436,
+ "learning_rate": 2.480660333750007e-06,
+ "loss": 0.861,
+ "step": 4042
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9908190333010166,
+ "learning_rate": 2.476553366110944e-06,
+ "loss": 0.8903,
+ "step": 4043
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9123243136347812,
+ "learning_rate": 2.4724493204918598e-06,
+ "loss": 0.9315,
+ "step": 4044
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9745453873730154,
+ "learning_rate": 2.4683481984867207e-06,
+ "loss": 0.8608,
+ "step": 4045
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9006398511842804,
+ "learning_rate": 2.4642500016883532e-06,
+ "loss": 0.8974,
+ "step": 4046
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9162971825383597,
+ "learning_rate": 2.4601547316884544e-06,
+ "loss": 0.7963,
+ "step": 4047
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.7954773502759751,
+ "learning_rate": 2.4560623900775728e-06,
+ "loss": 0.8196,
+ "step": 4048
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9200893131269684,
+ "learning_rate": 2.45197297844513e-06,
+ "loss": 0.8808,
+ "step": 4049
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.963848590067518,
+ "learning_rate": 2.447886498379409e-06,
+ "loss": 0.9146,
+ "step": 4050
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.8370481026817104,
+ "learning_rate": 2.4438029514675444e-06,
+ "loss": 0.8167,
+ "step": 4051
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9124154083158099,
+ "learning_rate": 2.439722339295545e-06,
+ "loss": 0.8688,
+ "step": 4052
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9596788206716473,
+ "learning_rate": 2.4356446634482756e-06,
+ "loss": 0.9,
+ "step": 4053
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9026104302825495,
+ "learning_rate": 2.4315699255094516e-06,
+ "loss": 0.8507,
+ "step": 4054
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.8799880059621884,
+ "learning_rate": 2.427498127061658e-06,
+ "loss": 0.8473,
+ "step": 4055
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 1.055007090316962,
+ "learning_rate": 2.423429269686336e-06,
+ "loss": 0.8429,
+ "step": 4056
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 1.0000623859562858,
+ "learning_rate": 2.4193633549637765e-06,
+ "loss": 0.8758,
+ "step": 4057
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 1.1037124690313722,
+ "learning_rate": 2.4153003844731425e-06,
+ "loss": 0.8649,
+ "step": 4058
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 1.0369699282478542,
+ "learning_rate": 2.411240359792438e-06,
+ "loss": 0.8646,
+ "step": 4059
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9569318585945045,
+ "learning_rate": 2.407183282498534e-06,
+ "loss": 0.8687,
+ "step": 4060
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.8754398527489688,
+ "learning_rate": 2.403129154167153e-06,
+ "loss": 0.8689,
+ "step": 4061
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.887308770315621,
+ "learning_rate": 2.3990779763728666e-06,
+ "loss": 0.8295,
+ "step": 4062
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9346038990203916,
+ "learning_rate": 2.3950297506891084e-06,
+ "loss": 0.8883,
+ "step": 4063
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9206691830265393,
+ "learning_rate": 2.390984478688164e-06,
+ "loss": 0.9462,
+ "step": 4064
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9715102836408156,
+ "learning_rate": 2.386942161941169e-06,
+ "loss": 0.9543,
+ "step": 4065
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.7966573981430686,
+ "learning_rate": 2.3829028020181154e-06,
+ "loss": 0.8603,
+ "step": 4066
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.8329182349694442,
+ "learning_rate": 2.3788664004878405e-06,
+ "loss": 0.8719,
+ "step": 4067
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.81818784771716,
+ "learning_rate": 2.374832958918035e-06,
+ "loss": 0.8929,
+ "step": 4068
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9050910748280558,
+ "learning_rate": 2.3708024788752448e-06,
+ "loss": 0.8851,
+ "step": 4069
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 1.0899187351907824,
+ "learning_rate": 2.3667749619248614e-06,
+ "loss": 0.8801,
+ "step": 4070
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9991684980316599,
+ "learning_rate": 2.3627504096311273e-06,
+ "loss": 0.8863,
+ "step": 4071
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.7524456863232081,
+ "learning_rate": 2.3587288235571258e-06,
+ "loss": 0.8853,
+ "step": 4072
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.8491884838780307,
+ "learning_rate": 2.354710205264801e-06,
+ "loss": 0.7814,
+ "step": 4073
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9268409171191525,
+ "learning_rate": 2.350694556314934e-06,
+ "loss": 0.8927,
+ "step": 4074
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9290743263792193,
+ "learning_rate": 2.3466818782671597e-06,
+ "loss": 0.9389,
+ "step": 4075
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9259989615515022,
+ "learning_rate": 2.3426721726799573e-06,
+ "loss": 0.8902,
+ "step": 4076
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.6910196071179008,
+ "learning_rate": 2.3386654411106446e-06,
+ "loss": 0.8074,
+ "step": 4077
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.9340996247053994,
+ "learning_rate": 2.3346616851153935e-06,
+ "loss": 0.8979,
+ "step": 4078
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 1.0653573731934745,
+ "learning_rate": 2.330660906249218e-06,
+ "loss": 0.9188,
+ "step": 4079
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.8937439750598773,
+ "learning_rate": 2.3266631060659685e-06,
+ "loss": 0.9171,
+ "step": 4080
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8707541188020759,
+ "learning_rate": 2.32266828611835e-06,
+ "loss": 0.8642,
+ "step": 4081
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9385840024984932,
+ "learning_rate": 2.318676447957907e-06,
+ "loss": 0.8472,
+ "step": 4082
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.7598230892124757,
+ "learning_rate": 2.3146875931350165e-06,
+ "loss": 0.8482,
+ "step": 4083
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8959896109499784,
+ "learning_rate": 2.310701723198908e-06,
+ "loss": 0.901,
+ "step": 4084
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8885092414865883,
+ "learning_rate": 2.3067188396976482e-06,
+ "loss": 0.8991,
+ "step": 4085
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9028384744600961,
+ "learning_rate": 2.3027389441781368e-06,
+ "loss": 0.8974,
+ "step": 4086
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8879483662772303,
+ "learning_rate": 2.2987620381861288e-06,
+ "loss": 0.9344,
+ "step": 4087
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9041839529362387,
+ "learning_rate": 2.2947881232662007e-06,
+ "loss": 0.8936,
+ "step": 4088
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9028787232059573,
+ "learning_rate": 2.290817200961779e-06,
+ "loss": 0.8831,
+ "step": 4089
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.926786829646905,
+ "learning_rate": 2.286849272815126e-06,
+ "loss": 0.9009,
+ "step": 4090
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 1.1179100068671728,
+ "learning_rate": 2.2828843403673338e-06,
+ "loss": 0.9245,
+ "step": 4091
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8656564298300758,
+ "learning_rate": 2.2789224051583403e-06,
+ "loss": 0.8905,
+ "step": 4092
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9607165923938596,
+ "learning_rate": 2.274963468726914e-06,
+ "loss": 0.8875,
+ "step": 4093
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9634473240315448,
+ "learning_rate": 2.2710075326106618e-06,
+ "loss": 0.9263,
+ "step": 4094
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 1.014904182740305,
+ "learning_rate": 2.2670545983460245e-06,
+ "loss": 0.8968,
+ "step": 4095
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9080426502478219,
+ "learning_rate": 2.263104667468272e-06,
+ "loss": 0.8786,
+ "step": 4096
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9237970035424105,
+ "learning_rate": 2.259157741511515e-06,
+ "loss": 0.835,
+ "step": 4097
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8695469303036835,
+ "learning_rate": 2.2552138220086927e-06,
+ "loss": 0.8819,
+ "step": 4098
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9232875172692364,
+ "learning_rate": 2.2512729104915787e-06,
+ "loss": 0.9221,
+ "step": 4099
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9126458734436097,
+ "learning_rate": 2.2473350084907806e-06,
+ "loss": 0.9396,
+ "step": 4100
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8850388026242367,
+ "learning_rate": 2.243400117535729e-06,
+ "loss": 0.9147,
+ "step": 4101
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 1.0384132801287065,
+ "learning_rate": 2.2394682391546928e-06,
+ "loss": 0.9521,
+ "step": 4102
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9729842423856129,
+ "learning_rate": 2.2355393748747702e-06,
+ "loss": 0.8762,
+ "step": 4103
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9946317186957229,
+ "learning_rate": 2.2316135262218787e-06,
+ "loss": 0.9835,
+ "step": 4104
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8642999456933055,
+ "learning_rate": 2.227690694720784e-06,
+ "loss": 0.8872,
+ "step": 4105
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8566492152229089,
+ "learning_rate": 2.223770881895061e-06,
+ "loss": 0.8275,
+ "step": 4106
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9836308489734794,
+ "learning_rate": 2.2198540892671215e-06,
+ "loss": 0.8499,
+ "step": 4107
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8928198690489183,
+ "learning_rate": 2.215940318358206e-06,
+ "loss": 0.9258,
+ "step": 4108
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 1.097043195844042,
+ "learning_rate": 2.2120295706883698e-06,
+ "loss": 0.9376,
+ "step": 4109
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8653501898374938,
+ "learning_rate": 2.2081218477765097e-06,
+ "loss": 0.8886,
+ "step": 4110
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8394851362498404,
+ "learning_rate": 2.204217151140342e-06,
+ "loss": 0.883,
+ "step": 4111
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9337083108295533,
+ "learning_rate": 2.200315482296398e-06,
+ "loss": 0.9003,
+ "step": 4112
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 1.1482500165327554,
+ "learning_rate": 2.1964168427600462e-06,
+ "loss": 0.8394,
+ "step": 4113
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9123848050228393,
+ "learning_rate": 2.1925212340454737e-06,
+ "loss": 0.9529,
+ "step": 4114
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9879145476720529,
+ "learning_rate": 2.1886286576656834e-06,
+ "loss": 0.959,
+ "step": 4115
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8606231062834674,
+ "learning_rate": 2.184739115132517e-06,
+ "loss": 0.911,
+ "step": 4116
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8886655729982125,
+ "learning_rate": 2.1808526079566215e-06,
+ "loss": 0.8061,
+ "step": 4117
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.926451670918023,
+ "learning_rate": 2.1769691376474722e-06,
+ "loss": 0.9191,
+ "step": 4118
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9565823558080365,
+ "learning_rate": 2.1730887057133678e-06,
+ "loss": 0.8662,
+ "step": 4119
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.7389054326253872,
+ "learning_rate": 2.1692113136614177e-06,
+ "loss": 0.8202,
+ "step": 4120
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 1.1228020185487793,
+ "learning_rate": 2.1653369629975595e-06,
+ "loss": 0.8583,
+ "step": 4121
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8963055951822942,
+ "learning_rate": 2.1614656552265457e-06,
+ "loss": 0.9219,
+ "step": 4122
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9207401832410509,
+ "learning_rate": 2.157597391851949e-06,
+ "loss": 0.9327,
+ "step": 4123
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8111609539925131,
+ "learning_rate": 2.1537321743761587e-06,
+ "loss": 0.8323,
+ "step": 4124
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 1.040173267158971,
+ "learning_rate": 2.1498700043003773e-06,
+ "loss": 0.8708,
+ "step": 4125
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 1.0454868556219965,
+ "learning_rate": 2.1460108831246295e-06,
+ "loss": 0.9322,
+ "step": 4126
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 1.1145646862077467,
+ "learning_rate": 2.142154812347753e-06,
+ "loss": 0.9242,
+ "step": 4127
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.7542894098810323,
+ "learning_rate": 2.1383017934674012e-06,
+ "loss": 0.8264,
+ "step": 4128
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 1.0853165928601176,
+ "learning_rate": 2.1344518279800454e-06,
+ "loss": 0.8932,
+ "step": 4129
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8619712325796545,
+ "learning_rate": 2.130604917380962e-06,
+ "loss": 0.897,
+ "step": 4130
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.8526274123517904,
+ "learning_rate": 2.12676106316425e-06,
+ "loss": 0.854,
+ "step": 4131
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.9174692413105993,
+ "learning_rate": 2.1229202668228197e-06,
+ "loss": 0.8768,
+ "step": 4132
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.97909810000739,
+ "learning_rate": 2.1190825298483855e-06,
+ "loss": 0.8886,
+ "step": 4133
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9333357359673061,
+ "learning_rate": 2.115247853731488e-06,
+ "loss": 0.8792,
+ "step": 4134
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8494576226832132,
+ "learning_rate": 2.11141623996147e-06,
+ "loss": 0.8449,
+ "step": 4135
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8537837931344772,
+ "learning_rate": 2.107587690026481e-06,
+ "loss": 0.8122,
+ "step": 4136
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9388101837707732,
+ "learning_rate": 2.103762205413493e-06,
+ "loss": 0.8795,
+ "step": 4137
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8949476894539724,
+ "learning_rate": 2.0999397876082726e-06,
+ "loss": 0.8947,
+ "step": 4138
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8823012117828377,
+ "learning_rate": 2.096120438095404e-06,
+ "loss": 0.8823,
+ "step": 4139
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8593543554123816,
+ "learning_rate": 2.092304158358286e-06,
+ "loss": 0.8173,
+ "step": 4140
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 1.0204157630345805,
+ "learning_rate": 2.0884909498791106e-06,
+ "loss": 0.8671,
+ "step": 4141
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8854304487164367,
+ "learning_rate": 2.0846808141388852e-06,
+ "loss": 0.8411,
+ "step": 4142
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9389146274622053,
+ "learning_rate": 2.080873752617426e-06,
+ "loss": 0.9183,
+ "step": 4143
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.908298147521934,
+ "learning_rate": 2.0770697667933436e-06,
+ "loss": 0.8465,
+ "step": 4144
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9507813187926712,
+ "learning_rate": 2.073268858144074e-06,
+ "loss": 0.8763,
+ "step": 4145
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9261199670051468,
+ "learning_rate": 2.0694710281458372e-06,
+ "loss": 0.8529,
+ "step": 4146
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8087655514954948,
+ "learning_rate": 2.0656762782736693e-06,
+ "loss": 0.8418,
+ "step": 4147
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9368211053881321,
+ "learning_rate": 2.061884610001411e-06,
+ "loss": 0.8984,
+ "step": 4148
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8912197616398623,
+ "learning_rate": 2.0580960248016966e-06,
+ "loss": 0.8444,
+ "step": 4149
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8122310280623217,
+ "learning_rate": 2.0543105241459713e-06,
+ "loss": 0.8401,
+ "step": 4150
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8079381273920042,
+ "learning_rate": 2.0505281095044804e-06,
+ "loss": 0.8429,
+ "step": 4151
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9062019494180443,
+ "learning_rate": 2.0467487823462696e-06,
+ "loss": 0.9086,
+ "step": 4152
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9883686201862377,
+ "learning_rate": 2.042972544139189e-06,
+ "loss": 0.9257,
+ "step": 4153
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8550115895013028,
+ "learning_rate": 2.039199396349881e-06,
+ "loss": 0.94,
+ "step": 4154
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9516941578752195,
+ "learning_rate": 2.0354293404437963e-06,
+ "loss": 0.8815,
+ "step": 4155
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9962738337135307,
+ "learning_rate": 2.0316623778851784e-06,
+ "loss": 0.9483,
+ "step": 4156
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.7772316116470102,
+ "learning_rate": 2.0278985101370753e-06,
+ "loss": 0.7887,
+ "step": 4157
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 1.0407315491959217,
+ "learning_rate": 2.024137738661329e-06,
+ "loss": 0.8767,
+ "step": 4158
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8603796905613208,
+ "learning_rate": 2.020380064918579e-06,
+ "loss": 0.7653,
+ "step": 4159
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 1.0884817702885652,
+ "learning_rate": 2.01662549036826e-06,
+ "loss": 0.885,
+ "step": 4160
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 1.0035394010366627,
+ "learning_rate": 2.0128740164686134e-06,
+ "loss": 0.8887,
+ "step": 4161
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9935432481978964,
+ "learning_rate": 2.009125644676656e-06,
+ "loss": 0.9244,
+ "step": 4162
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 1.070597955130549,
+ "learning_rate": 2.0053803764482226e-06,
+ "loss": 1.0022,
+ "step": 4163
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9887648205586059,
+ "learning_rate": 2.001638213237932e-06,
+ "loss": 0.9367,
+ "step": 4164
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8919775718097533,
+ "learning_rate": 1.997899156499191e-06,
+ "loss": 0.9015,
+ "step": 4165
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9436937895966855,
+ "learning_rate": 1.994163207684212e-06,
+ "loss": 0.9032,
+ "step": 4166
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 1.0769371787866853,
+ "learning_rate": 1.9904303682439896e-06,
+ "loss": 0.8859,
+ "step": 4167
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9240989582938395,
+ "learning_rate": 1.986700639628316e-06,
+ "loss": 0.8362,
+ "step": 4168
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.989569753210682,
+ "learning_rate": 1.9829740232857807e-06,
+ "loss": 0.9232,
+ "step": 4169
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8151679258449964,
+ "learning_rate": 1.9792505206637523e-06,
+ "loss": 0.8911,
+ "step": 4170
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9416777841936684,
+ "learning_rate": 1.9755301332083997e-06,
+ "loss": 0.9009,
+ "step": 4171
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8610798178870233,
+ "learning_rate": 1.9718128623646792e-06,
+ "loss": 0.8805,
+ "step": 4172
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.9108907842189863,
+ "learning_rate": 1.9680987095763315e-06,
+ "loss": 0.8947,
+ "step": 4173
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8849803096210145,
+ "learning_rate": 1.964387676285894e-06,
+ "loss": 0.8781,
+ "step": 4174
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8302805443398776,
+ "learning_rate": 1.9606797639346874e-06,
+ "loss": 0.8453,
+ "step": 4175
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.7957440027524559,
+ "learning_rate": 1.9569749739628243e-06,
+ "loss": 0.826,
+ "step": 4176
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.943797668429873,
+ "learning_rate": 1.9532733078092034e-06,
+ "loss": 0.8818,
+ "step": 4177
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8500557631151054,
+ "learning_rate": 1.9495747669115062e-06,
+ "loss": 0.8259,
+ "step": 4178
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8967577054252616,
+ "learning_rate": 1.9458793527062035e-06,
+ "loss": 0.8825,
+ "step": 4179
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 1.2295227916186988,
+ "learning_rate": 1.9421870666285523e-06,
+ "loss": 0.8864,
+ "step": 4180
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8969752125296094,
+ "learning_rate": 1.9384979101125944e-06,
+ "loss": 0.892,
+ "step": 4181
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.866511603452024,
+ "learning_rate": 1.934811884591159e-06,
+ "loss": 0.8546,
+ "step": 4182
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 1.0143956233364166,
+ "learning_rate": 1.93112899149585e-06,
+ "loss": 0.9279,
+ "step": 4183
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.8971611525998221,
+ "learning_rate": 1.9274492322570616e-06,
+ "loss": 0.8872,
+ "step": 4184
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8421146961160477,
+ "learning_rate": 1.923772608303972e-06,
+ "loss": 0.85,
+ "step": 4185
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9453142979896407,
+ "learning_rate": 1.9200991210645394e-06,
+ "loss": 0.9192,
+ "step": 4186
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 1.0462688231882833,
+ "learning_rate": 1.916428771965506e-06,
+ "loss": 0.9497,
+ "step": 4187
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8628255841189849,
+ "learning_rate": 1.912761562432388e-06,
+ "loss": 0.8165,
+ "step": 4188
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.7352639278433662,
+ "learning_rate": 1.9090974938894902e-06,
+ "loss": 0.799,
+ "step": 4189
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 1.07980904107261,
+ "learning_rate": 1.9054365677598963e-06,
+ "loss": 0.9411,
+ "step": 4190
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9064350449233921,
+ "learning_rate": 1.9017787854654613e-06,
+ "loss": 0.8661,
+ "step": 4191
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9686829188986636,
+ "learning_rate": 1.898124148426832e-06,
+ "loss": 0.9502,
+ "step": 4192
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 1.0384564388980664,
+ "learning_rate": 1.8944726580634287e-06,
+ "loss": 0.9211,
+ "step": 4193
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.7584584646982807,
+ "learning_rate": 1.8908243157934424e-06,
+ "loss": 0.7989,
+ "step": 4194
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.834550012029811,
+ "learning_rate": 1.8871791230338499e-06,
+ "loss": 0.8809,
+ "step": 4195
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 1.0410863356256124,
+ "learning_rate": 1.883537081200404e-06,
+ "loss": 0.8692,
+ "step": 4196
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9521083798907651,
+ "learning_rate": 1.8798981917076254e-06,
+ "loss": 0.916,
+ "step": 4197
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 1.0754796019560438,
+ "learning_rate": 1.876262455968826e-06,
+ "loss": 0.9022,
+ "step": 4198
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 1.0363029182823422,
+ "learning_rate": 1.872629875396076e-06,
+ "loss": 0.8648,
+ "step": 4199
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.864903522918182,
+ "learning_rate": 1.8690004514002314e-06,
+ "loss": 0.847,
+ "step": 4200
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9460555407020449,
+ "learning_rate": 1.8653741853909201e-06,
+ "loss": 0.9304,
+ "step": 4201
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8459959079469654,
+ "learning_rate": 1.861751078776538e-06,
+ "loss": 0.8592,
+ "step": 4202
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.7811839567522102,
+ "learning_rate": 1.8581311329642592e-06,
+ "loss": 0.836,
+ "step": 4203
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 1.011020768515595,
+ "learning_rate": 1.8545143493600293e-06,
+ "loss": 0.9431,
+ "step": 4204
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9643408748874488,
+ "learning_rate": 1.8509007293685666e-06,
+ "loss": 0.8591,
+ "step": 4205
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9576397938951362,
+ "learning_rate": 1.8472902743933608e-06,
+ "loss": 0.8651,
+ "step": 4206
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9167226135139332,
+ "learning_rate": 1.8436829858366655e-06,
+ "loss": 0.8573,
+ "step": 4207
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8704974275409894,
+ "learning_rate": 1.8400788650995137e-06,
+ "loss": 0.8649,
+ "step": 4208
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8820246946473046,
+ "learning_rate": 1.8364779135817045e-06,
+ "loss": 0.8062,
+ "step": 4209
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8346480363282573,
+ "learning_rate": 1.8328801326818045e-06,
+ "loss": 0.8575,
+ "step": 4210
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8120428420288254,
+ "learning_rate": 1.829285523797155e-06,
+ "loss": 0.8611,
+ "step": 4211
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 1.0025796855274374,
+ "learning_rate": 1.8256940883238538e-06,
+ "loss": 0.9148,
+ "step": 4212
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8364500447554787,
+ "learning_rate": 1.822105827656776e-06,
+ "loss": 0.8318,
+ "step": 4213
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9864320303697556,
+ "learning_rate": 1.8185207431895613e-06,
+ "loss": 0.8939,
+ "step": 4214
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8851820165130008,
+ "learning_rate": 1.8149388363146148e-06,
+ "loss": 0.9133,
+ "step": 4215
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9235879806978636,
+ "learning_rate": 1.8113601084231091e-06,
+ "loss": 0.8847,
+ "step": 4216
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9317026569999612,
+ "learning_rate": 1.8077845609049782e-06,
+ "loss": 0.8405,
+ "step": 4217
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8877757388003821,
+ "learning_rate": 1.8042121951489254e-06,
+ "loss": 0.8611,
+ "step": 4218
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 1.0125013366148803,
+ "learning_rate": 1.800643012542418e-06,
+ "loss": 0.8934,
+ "step": 4219
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.77328029528668,
+ "learning_rate": 1.7970770144716777e-06,
+ "loss": 0.8451,
+ "step": 4220
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9026477480194623,
+ "learning_rate": 1.7935142023217056e-06,
+ "loss": 0.9102,
+ "step": 4221
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9540904370482145,
+ "learning_rate": 1.7899545774762573e-06,
+ "loss": 0.8549,
+ "step": 4222
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9659909913805189,
+ "learning_rate": 1.7863981413178433e-06,
+ "loss": 0.8758,
+ "step": 4223
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8974931301760378,
+ "learning_rate": 1.7828448952277456e-06,
+ "loss": 0.8486,
+ "step": 4224
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9038804982423293,
+ "learning_rate": 1.7792948405860079e-06,
+ "loss": 0.9082,
+ "step": 4225
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8518238489561752,
+ "learning_rate": 1.7757479787714217e-06,
+ "loss": 0.8289,
+ "step": 4226
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9732642755656459,
+ "learning_rate": 1.7722043111615572e-06,
+ "loss": 0.8674,
+ "step": 4227
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9981586354999044,
+ "learning_rate": 1.768663839132727e-06,
+ "loss": 0.8676,
+ "step": 4228
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.9767566547508983,
+ "learning_rate": 1.7651265640600113e-06,
+ "loss": 0.8986,
+ "step": 4229
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.7944108016619675,
+ "learning_rate": 1.7615924873172506e-06,
+ "loss": 0.8403,
+ "step": 4230
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8543094994212218,
+ "learning_rate": 1.7580616102770353e-06,
+ "loss": 0.8804,
+ "step": 4231
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8680298111959078,
+ "learning_rate": 1.754533934310717e-06,
+ "loss": 0.8702,
+ "step": 4232
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.798555398853347,
+ "learning_rate": 1.7510094607884075e-06,
+ "loss": 0.8088,
+ "step": 4233
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 1.0168098896767641,
+ "learning_rate": 1.7474881910789698e-06,
+ "loss": 0.8939,
+ "step": 4234
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 1.010765749661305,
+ "learning_rate": 1.7439701265500274e-06,
+ "loss": 0.8634,
+ "step": 4235
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.8669125584507791,
+ "learning_rate": 1.740455268567951e-06,
+ "loss": 0.8659,
+ "step": 4236
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 1.0328717545418735,
+ "learning_rate": 1.7369436184978738e-06,
+ "loss": 0.8859,
+ "step": 4237
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.8446788596592253,
+ "learning_rate": 1.7334351777036807e-06,
+ "loss": 0.8365,
+ "step": 4238
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 1.0790026405786928,
+ "learning_rate": 1.729929947548008e-06,
+ "loss": 0.9184,
+ "step": 4239
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.8880365172860483,
+ "learning_rate": 1.7264279293922503e-06,
+ "loss": 0.8756,
+ "step": 4240
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.852480900993756,
+ "learning_rate": 1.7229291245965463e-06,
+ "loss": 0.8552,
+ "step": 4241
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.8995156844257409,
+ "learning_rate": 1.7194335345197933e-06,
+ "loss": 0.8795,
+ "step": 4242
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 1.033019126091575,
+ "learning_rate": 1.7159411605196407e-06,
+ "loss": 0.9104,
+ "step": 4243
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9042868541549544,
+ "learning_rate": 1.7124520039524805e-06,
+ "loss": 0.8678,
+ "step": 4244
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9728929156802403,
+ "learning_rate": 1.7089660661734685e-06,
+ "loss": 0.9187,
+ "step": 4245
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9176002094908589,
+ "learning_rate": 1.7054833485364962e-06,
+ "loss": 0.9312,
+ "step": 4246
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9092868064217556,
+ "learning_rate": 1.702003852394214e-06,
+ "loss": 0.8577,
+ "step": 4247
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.8676912630242394,
+ "learning_rate": 1.6985275790980205e-06,
+ "loss": 0.8912,
+ "step": 4248
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.8621206859482691,
+ "learning_rate": 1.6950545299980526e-06,
+ "loss": 0.9031,
+ "step": 4249
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9537558641892446,
+ "learning_rate": 1.691584706443209e-06,
+ "loss": 0.9074,
+ "step": 4250
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.8069863327611584,
+ "learning_rate": 1.6881181097811305e-06,
+ "loss": 0.8193,
+ "step": 4251
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.8971134429881099,
+ "learning_rate": 1.6846547413581981e-06,
+ "loss": 0.8558,
+ "step": 4252
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 1.010128964422933,
+ "learning_rate": 1.6811946025195459e-06,
+ "loss": 0.8793,
+ "step": 4253
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.826250798207253,
+ "learning_rate": 1.6777376946090552e-06,
+ "loss": 0.8046,
+ "step": 4254
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9388189750495171,
+ "learning_rate": 1.674284018969342e-06,
+ "loss": 0.912,
+ "step": 4255
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.8952634639495284,
+ "learning_rate": 1.6708335769417827e-06,
+ "loss": 0.9091,
+ "step": 4256
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9927273287526445,
+ "learning_rate": 1.667386369866484e-06,
+ "loss": 0.8048,
+ "step": 4257
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.959179914383579,
+ "learning_rate": 1.6639423990823011e-06,
+ "loss": 0.8558,
+ "step": 4258
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9412212006934919,
+ "learning_rate": 1.660501665926838e-06,
+ "loss": 0.8604,
+ "step": 4259
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 1.0107033510164385,
+ "learning_rate": 1.6570641717364277e-06,
+ "loss": 0.9321,
+ "step": 4260
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9162877665794047,
+ "learning_rate": 1.653629917846159e-06,
+ "loss": 0.845,
+ "step": 4261
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9356101234694048,
+ "learning_rate": 1.6501989055898537e-06,
+ "loss": 0.9274,
+ "step": 4262
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.7825221035897323,
+ "learning_rate": 1.6467711363000794e-06,
+ "loss": 0.8273,
+ "step": 4263
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.915839237746525,
+ "learning_rate": 1.6433466113081442e-06,
+ "loss": 0.9426,
+ "step": 4264
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9492463773319484,
+ "learning_rate": 1.6399253319440888e-06,
+ "loss": 0.9638,
+ "step": 4265
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 1.01435861381373,
+ "learning_rate": 1.6365072995367004e-06,
+ "loss": 0.8703,
+ "step": 4266
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 1.032310735939065,
+ "learning_rate": 1.6330925154135057e-06,
+ "loss": 0.9089,
+ "step": 4267
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9474316903677986,
+ "learning_rate": 1.6296809809007652e-06,
+ "loss": 0.899,
+ "step": 4268
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9122522786407117,
+ "learning_rate": 1.6262726973234844e-06,
+ "loss": 0.8979,
+ "step": 4269
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9293180651291256,
+ "learning_rate": 1.6228676660053932e-06,
+ "loss": 0.8791,
+ "step": 4270
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.7856105609802073,
+ "learning_rate": 1.6194658882689718e-06,
+ "loss": 0.8457,
+ "step": 4271
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.8979354808600734,
+ "learning_rate": 1.6160673654354331e-06,
+ "loss": 0.9186,
+ "step": 4272
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 1.0921143405482687,
+ "learning_rate": 1.6126720988247168e-06,
+ "loss": 0.8975,
+ "step": 4273
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9670671740207992,
+ "learning_rate": 1.609280089755515e-06,
+ "loss": 0.8516,
+ "step": 4274
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9351974673019499,
+ "learning_rate": 1.605891339545237e-06,
+ "loss": 0.9331,
+ "step": 4275
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9718101490560755,
+ "learning_rate": 1.6025058495100388e-06,
+ "loss": 0.8632,
+ "step": 4276
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 1.0254335153782985,
+ "learning_rate": 1.5991236209648052e-06,
+ "loss": 0.8578,
+ "step": 4277
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 1.023925867885646,
+ "learning_rate": 1.5957446552231526e-06,
+ "loss": 0.8793,
+ "step": 4278
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9311023098856142,
+ "learning_rate": 1.5923689535974307e-06,
+ "loss": 0.8639,
+ "step": 4279
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.9273209097209918,
+ "learning_rate": 1.588996517398731e-06,
+ "loss": 0.9106,
+ "step": 4280
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.8100356359717925,
+ "learning_rate": 1.5856273479368611e-06,
+ "loss": 0.8314,
+ "step": 4281
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 1.08493525966539,
+ "learning_rate": 1.582261446520371e-06,
+ "loss": 0.8687,
+ "step": 4282
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 1.0588228353494755,
+ "learning_rate": 1.5788988144565397e-06,
+ "loss": 0.9386,
+ "step": 4283
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.8846910688163809,
+ "learning_rate": 1.575539453051369e-06,
+ "loss": 0.8948,
+ "step": 4284
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.8823865932843107,
+ "learning_rate": 1.572183363609603e-06,
+ "loss": 0.834,
+ "step": 4285
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.916367312075249,
+ "learning_rate": 1.568830547434703e-06,
+ "loss": 0.7975,
+ "step": 4286
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.8689353941661974,
+ "learning_rate": 1.5654810058288661e-06,
+ "loss": 0.902,
+ "step": 4287
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.8394528428749988,
+ "learning_rate": 1.5621347400930176e-06,
+ "loss": 0.8281,
+ "step": 4288
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8775838554267056,
+ "learning_rate": 1.5587917515268048e-06,
+ "loss": 0.867,
+ "step": 4289
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.964160629339188,
+ "learning_rate": 1.5554520414286067e-06,
+ "loss": 0.9083,
+ "step": 4290
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9985951864949679,
+ "learning_rate": 1.5521156110955293e-06,
+ "loss": 0.8989,
+ "step": 4291
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 1.0504657147965988,
+ "learning_rate": 1.5487824618234049e-06,
+ "loss": 0.8712,
+ "step": 4292
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8078373213446511,
+ "learning_rate": 1.54545259490679e-06,
+ "loss": 0.8519,
+ "step": 4293
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.937211660161243,
+ "learning_rate": 1.5421260116389636e-06,
+ "loss": 0.9001,
+ "step": 4294
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9732871656878034,
+ "learning_rate": 1.5388027133119343e-06,
+ "loss": 0.8315,
+ "step": 4295
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8633160539608518,
+ "learning_rate": 1.535482701216433e-06,
+ "loss": 0.9105,
+ "step": 4296
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8641619017556765,
+ "learning_rate": 1.5321659766419129e-06,
+ "loss": 0.8381,
+ "step": 4297
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8865491462244038,
+ "learning_rate": 1.5288525408765564e-06,
+ "loss": 0.8793,
+ "step": 4298
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9223542637078772,
+ "learning_rate": 1.5255423952072567e-06,
+ "loss": 0.8887,
+ "step": 4299
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.849204339706377,
+ "learning_rate": 1.52223554091964e-06,
+ "loss": 0.8348,
+ "step": 4300
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8827438209095655,
+ "learning_rate": 1.5189319792980517e-06,
+ "loss": 0.8546,
+ "step": 4301
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8658099000101591,
+ "learning_rate": 1.5156317116255515e-06,
+ "loss": 0.8301,
+ "step": 4302
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9465128463748287,
+ "learning_rate": 1.5123347391839305e-06,
+ "loss": 0.9351,
+ "step": 4303
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 1.162625936895587,
+ "learning_rate": 1.5090410632536968e-06,
+ "loss": 0.904,
+ "step": 4304
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9492950425963446,
+ "learning_rate": 1.5057506851140701e-06,
+ "loss": 0.8723,
+ "step": 4305
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8847624309484248,
+ "learning_rate": 1.5024636060429998e-06,
+ "loss": 0.8647,
+ "step": 4306
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 1.0154618576957262,
+ "learning_rate": 1.4991798273171465e-06,
+ "loss": 0.9599,
+ "step": 4307
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8993919045724885,
+ "learning_rate": 1.49589935021189e-06,
+ "loss": 0.8754,
+ "step": 4308
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8120404721237,
+ "learning_rate": 1.4926221760013393e-06,
+ "loss": 0.8671,
+ "step": 4309
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9686759027044225,
+ "learning_rate": 1.4893483059583014e-06,
+ "loss": 0.9182,
+ "step": 4310
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8825178910172966,
+ "learning_rate": 1.4860777413543138e-06,
+ "loss": 0.9025,
+ "step": 4311
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8492752577428371,
+ "learning_rate": 1.4828104834596268e-06,
+ "loss": 0.8736,
+ "step": 4312
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9995808467990425,
+ "learning_rate": 1.4795465335432036e-06,
+ "loss": 0.824,
+ "step": 4313
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9215206901437705,
+ "learning_rate": 1.4762858928727241e-06,
+ "loss": 0.902,
+ "step": 4314
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9387090032416386,
+ "learning_rate": 1.4730285627145858e-06,
+ "loss": 0.8241,
+ "step": 4315
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8184468693723096,
+ "learning_rate": 1.4697745443338984e-06,
+ "loss": 0.8341,
+ "step": 4316
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8969274254049279,
+ "learning_rate": 1.4665238389944859e-06,
+ "loss": 0.8625,
+ "step": 4317
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8976121630810855,
+ "learning_rate": 1.46327644795888e-06,
+ "loss": 0.883,
+ "step": 4318
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 1.026625129207781,
+ "learning_rate": 1.4600323724883337e-06,
+ "loss": 0.922,
+ "step": 4319
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9489282750554974,
+ "learning_rate": 1.4567916138428072e-06,
+ "loss": 0.8446,
+ "step": 4320
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8444995307678088,
+ "learning_rate": 1.4535541732809755e-06,
+ "loss": 0.8586,
+ "step": 4321
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8727797663941129,
+ "learning_rate": 1.4503200520602245e-06,
+ "loss": 0.8977,
+ "step": 4322
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9610062570706849,
+ "learning_rate": 1.4470892514366442e-06,
+ "loss": 0.8862,
+ "step": 4323
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9010078578242248,
+ "learning_rate": 1.443861772665044e-06,
+ "loss": 0.9004,
+ "step": 4324
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9458466443889767,
+ "learning_rate": 1.4406376169989389e-06,
+ "loss": 0.8412,
+ "step": 4325
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9975906724948811,
+ "learning_rate": 1.4374167856905542e-06,
+ "loss": 0.936,
+ "step": 4326
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8817184421188767,
+ "learning_rate": 1.4341992799908255e-06,
+ "loss": 0.9086,
+ "step": 4327
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9092642430322561,
+ "learning_rate": 1.4309851011493903e-06,
+ "loss": 0.8793,
+ "step": 4328
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8750464387687253,
+ "learning_rate": 1.427774250414601e-06,
+ "loss": 0.8816,
+ "step": 4329
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9331332280697446,
+ "learning_rate": 1.4245667290335175e-06,
+ "loss": 0.9228,
+ "step": 4330
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8619414036259035,
+ "learning_rate": 1.421362538251897e-06,
+ "loss": 0.8856,
+ "step": 4331
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8126223098413,
+ "learning_rate": 1.4181616793142173e-06,
+ "loss": 0.8912,
+ "step": 4332
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8076391294951282,
+ "learning_rate": 1.414964153463655e-06,
+ "loss": 0.8616,
+ "step": 4333
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9215220035914787,
+ "learning_rate": 1.4117699619420878e-06,
+ "loss": 0.7838,
+ "step": 4334
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9121929534750399,
+ "learning_rate": 1.4085791059901077e-06,
+ "loss": 0.9036,
+ "step": 4335
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8708612949484579,
+ "learning_rate": 1.4053915868470013e-06,
+ "loss": 0.8908,
+ "step": 4336
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8819912375555637,
+ "learning_rate": 1.402207405750765e-06,
+ "loss": 0.8541,
+ "step": 4337
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.7941368072678817,
+ "learning_rate": 1.399026563938105e-06,
+ "loss": 0.855,
+ "step": 4338
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.9231545152873868,
+ "learning_rate": 1.3958490626444154e-06,
+ "loss": 0.8964,
+ "step": 4339
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.8888049189829651,
+ "learning_rate": 1.3926749031038055e-06,
+ "loss": 0.9064,
+ "step": 4340
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9477947025863747,
+ "learning_rate": 1.3895040865490817e-06,
+ "loss": 0.8516,
+ "step": 4341
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9144954215299261,
+ "learning_rate": 1.3863366142117506e-06,
+ "loss": 0.8939,
+ "step": 4342
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9933436776611138,
+ "learning_rate": 1.383172487322023e-06,
+ "loss": 0.8657,
+ "step": 4343
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9069322246148085,
+ "learning_rate": 1.3800117071088104e-06,
+ "loss": 0.8992,
+ "step": 4344
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9546694266275502,
+ "learning_rate": 1.3768542747997215e-06,
+ "loss": 0.8876,
+ "step": 4345
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 1.1519189255583402,
+ "learning_rate": 1.3737001916210713e-06,
+ "loss": 0.987,
+ "step": 4346
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.8450222424007697,
+ "learning_rate": 1.370549458797863e-06,
+ "loss": 0.8736,
+ "step": 4347
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 1.027797851084786,
+ "learning_rate": 1.3674020775538078e-06,
+ "loss": 0.9744,
+ "step": 4348
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9596396303364736,
+ "learning_rate": 1.3642580491113122e-06,
+ "loss": 0.8457,
+ "step": 4349
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.8489943977959429,
+ "learning_rate": 1.3611173746914797e-06,
+ "loss": 0.839,
+ "step": 4350
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9177898684165542,
+ "learning_rate": 1.3579800555141165e-06,
+ "loss": 0.8443,
+ "step": 4351
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 1.0084342244773004,
+ "learning_rate": 1.3548460927977158e-06,
+ "loss": 0.8285,
+ "step": 4352
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9712532252970955,
+ "learning_rate": 1.351715487759474e-06,
+ "loss": 0.9103,
+ "step": 4353
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 1.0264245984946392,
+ "learning_rate": 1.3485882416152819e-06,
+ "loss": 0.8653,
+ "step": 4354
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9309697174629245,
+ "learning_rate": 1.3454643555797276e-06,
+ "loss": 0.8802,
+ "step": 4355
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 1.0621696950991983,
+ "learning_rate": 1.3423438308660929e-06,
+ "loss": 0.9771,
+ "step": 4356
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9076268387887377,
+ "learning_rate": 1.3392266686863508e-06,
+ "loss": 0.8588,
+ "step": 4357
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9943609267593755,
+ "learning_rate": 1.3361128702511716e-06,
+ "loss": 0.9281,
+ "step": 4358
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 1.0745052503952597,
+ "learning_rate": 1.3330024367699224e-06,
+ "loss": 0.9177,
+ "step": 4359
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9178533986339459,
+ "learning_rate": 1.3298953694506522e-06,
+ "loss": 0.8428,
+ "step": 4360
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 1.0221921399852922,
+ "learning_rate": 1.3267916695001172e-06,
+ "loss": 0.8804,
+ "step": 4361
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.8451997649832802,
+ "learning_rate": 1.3236913381237592e-06,
+ "loss": 0.8618,
+ "step": 4362
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9532388337988882,
+ "learning_rate": 1.3205943765257057e-06,
+ "loss": 0.8892,
+ "step": 4363
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.8623572184502248,
+ "learning_rate": 1.317500785908783e-06,
+ "loss": 0.8846,
+ "step": 4364
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9096356998930603,
+ "learning_rate": 1.31441056747451e-06,
+ "loss": 0.8485,
+ "step": 4365
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9319788853039657,
+ "learning_rate": 1.3113237224230836e-06,
+ "loss": 0.9086,
+ "step": 4366
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.8546538595116254,
+ "learning_rate": 1.3082402519534076e-06,
+ "loss": 0.8384,
+ "step": 4367
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9420415566779518,
+ "learning_rate": 1.3051601572630611e-06,
+ "loss": 0.892,
+ "step": 4368
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9229043665417269,
+ "learning_rate": 1.3020834395483195e-06,
+ "loss": 0.9592,
+ "step": 4369
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 1.0142996996904374,
+ "learning_rate": 1.2990101000041445e-06,
+ "loss": 0.9196,
+ "step": 4370
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9893710753174146,
+ "learning_rate": 1.2959401398241844e-06,
+ "loss": 0.9642,
+ "step": 4371
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.8935486040273064,
+ "learning_rate": 1.2928735602007768e-06,
+ "loss": 0.9024,
+ "step": 4372
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.8980300632527639,
+ "learning_rate": 1.2898103623249458e-06,
+ "loss": 0.931,
+ "step": 4373
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9400340634044065,
+ "learning_rate": 1.2867505473864029e-06,
+ "loss": 0.8775,
+ "step": 4374
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.8852922367628429,
+ "learning_rate": 1.283694116573546e-06,
+ "loss": 0.8526,
+ "step": 4375
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9308637212715061,
+ "learning_rate": 1.2806410710734552e-06,
+ "loss": 0.893,
+ "step": 4376
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 1.054242291129353,
+ "learning_rate": 1.2775914120718992e-06,
+ "loss": 0.9159,
+ "step": 4377
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9734805693778715,
+ "learning_rate": 1.2745451407533294e-06,
+ "loss": 0.923,
+ "step": 4378
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9250030033040264,
+ "learning_rate": 1.2715022583008851e-06,
+ "loss": 0.8875,
+ "step": 4379
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.8912635587673091,
+ "learning_rate": 1.2684627658963865e-06,
+ "loss": 0.8646,
+ "step": 4380
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.789099016737345,
+ "learning_rate": 1.265426664720334e-06,
+ "loss": 0.8652,
+ "step": 4381
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9141014334851884,
+ "learning_rate": 1.2623939559519161e-06,
+ "loss": 0.883,
+ "step": 4382
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 1.1470673379107672,
+ "learning_rate": 1.2593646407690051e-06,
+ "loss": 0.8575,
+ "step": 4383
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.7437164663470661,
+ "learning_rate": 1.2563387203481447e-06,
+ "loss": 0.8078,
+ "step": 4384
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.8812285120839243,
+ "learning_rate": 1.2533161958645755e-06,
+ "loss": 0.874,
+ "step": 4385
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9886474623505253,
+ "learning_rate": 1.2502970684922067e-06,
+ "loss": 0.91,
+ "step": 4386
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.8510526707996923,
+ "learning_rate": 1.2472813394036344e-06,
+ "loss": 0.8029,
+ "step": 4387
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9220893707095607,
+ "learning_rate": 1.2442690097701327e-06,
+ "loss": 0.8489,
+ "step": 4388
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.8019016015834931,
+ "learning_rate": 1.2412600807616526e-06,
+ "loss": 0.7543,
+ "step": 4389
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 1.0926757629078143,
+ "learning_rate": 1.2382545535468316e-06,
+ "loss": 0.9594,
+ "step": 4390
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.953036855555765,
+ "learning_rate": 1.2352524292929823e-06,
+ "loss": 0.9097,
+ "step": 4391
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.9419931200211884,
+ "learning_rate": 1.2322537091660912e-06,
+ "loss": 0.896,
+ "step": 4392
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.0783066417312557,
+ "learning_rate": 1.22925839433083e-06,
+ "loss": 0.8952,
+ "step": 4393
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9010516525713899,
+ "learning_rate": 1.2262664859505434e-06,
+ "loss": 0.8173,
+ "step": 4394
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.0404129292108966,
+ "learning_rate": 1.2232779851872511e-06,
+ "loss": 0.9002,
+ "step": 4395
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9959939362032887,
+ "learning_rate": 1.2202928932016588e-06,
+ "loss": 0.8607,
+ "step": 4396
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.0519236391406372,
+ "learning_rate": 1.217311211153137e-06,
+ "loss": 0.9187,
+ "step": 4397
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8847643265914652,
+ "learning_rate": 1.2143329401997372e-06,
+ "loss": 0.8927,
+ "step": 4398
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.011836449899806,
+ "learning_rate": 1.2113580814981884e-06,
+ "loss": 0.9013,
+ "step": 4399
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9457684850126732,
+ "learning_rate": 1.2083866362038865e-06,
+ "loss": 0.8474,
+ "step": 4400
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8794146370800098,
+ "learning_rate": 1.2054186054709105e-06,
+ "loss": 0.9051,
+ "step": 4401
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9994042976622503,
+ "learning_rate": 1.2024539904520072e-06,
+ "loss": 0.8828,
+ "step": 4402
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9326353211162844,
+ "learning_rate": 1.1994927922985999e-06,
+ "loss": 0.8799,
+ "step": 4403
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8746336354630223,
+ "learning_rate": 1.1965350121607866e-06,
+ "loss": 0.8828,
+ "step": 4404
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9927828679864845,
+ "learning_rate": 1.1935806511873306e-06,
+ "loss": 0.8525,
+ "step": 4405
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.0753569955639017,
+ "learning_rate": 1.1906297105256725e-06,
+ "loss": 0.9051,
+ "step": 4406
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.089473780777804,
+ "learning_rate": 1.187682191321925e-06,
+ "loss": 0.8594,
+ "step": 4407
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9250170782041444,
+ "learning_rate": 1.1847380947208697e-06,
+ "loss": 0.9507,
+ "step": 4408
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8945205817460992,
+ "learning_rate": 1.1817974218659621e-06,
+ "loss": 0.9071,
+ "step": 4409
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9040203881922112,
+ "learning_rate": 1.178860173899321e-06,
+ "loss": 0.8559,
+ "step": 4410
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8544334862845641,
+ "learning_rate": 1.1759263519617437e-06,
+ "loss": 0.8399,
+ "step": 4411
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.105335993006846,
+ "learning_rate": 1.172995957192693e-06,
+ "loss": 0.9251,
+ "step": 4412
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9253771127170801,
+ "learning_rate": 1.1700689907302953e-06,
+ "loss": 0.7862,
+ "step": 4413
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.1841069055677755,
+ "learning_rate": 1.167145453711358e-06,
+ "loss": 0.8982,
+ "step": 4414
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.074350471656947,
+ "learning_rate": 1.1642253472713427e-06,
+ "loss": 0.8916,
+ "step": 4415
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.039963082327553,
+ "learning_rate": 1.161308672544389e-06,
+ "loss": 0.9328,
+ "step": 4416
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9599735733589884,
+ "learning_rate": 1.1583954306633004e-06,
+ "loss": 0.8265,
+ "step": 4417
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9678466463162355,
+ "learning_rate": 1.1554856227595435e-06,
+ "loss": 0.839,
+ "step": 4418
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.0088444774876635,
+ "learning_rate": 1.1525792499632526e-06,
+ "loss": 0.8801,
+ "step": 4419
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8932524790191629,
+ "learning_rate": 1.1496763134032363e-06,
+ "loss": 0.8493,
+ "step": 4420
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9222967778651467,
+ "learning_rate": 1.1467768142069546e-06,
+ "loss": 0.8544,
+ "step": 4421
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9780327951047955,
+ "learning_rate": 1.1438807535005437e-06,
+ "loss": 0.9147,
+ "step": 4422
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9667663733169486,
+ "learning_rate": 1.1409881324088013e-06,
+ "loss": 0.8188,
+ "step": 4423
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9365509433655029,
+ "learning_rate": 1.138098952055181e-06,
+ "loss": 0.8807,
+ "step": 4424
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8565813918026189,
+ "learning_rate": 1.1352132135618165e-06,
+ "loss": 0.9126,
+ "step": 4425
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.075644450832635,
+ "learning_rate": 1.13233091804949e-06,
+ "loss": 0.8761,
+ "step": 4426
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8165419744649355,
+ "learning_rate": 1.1294520666376518e-06,
+ "loss": 0.7936,
+ "step": 4427
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.0323805322011725,
+ "learning_rate": 1.1265766604444172e-06,
+ "loss": 0.8706,
+ "step": 4428
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.7495495075304767,
+ "learning_rate": 1.1237047005865576e-06,
+ "loss": 0.8419,
+ "step": 4429
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.025298418486006,
+ "learning_rate": 1.1208361881795116e-06,
+ "loss": 0.8654,
+ "step": 4430
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8658167366495689,
+ "learning_rate": 1.1179711243373736e-06,
+ "loss": 0.8158,
+ "step": 4431
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8703812611612662,
+ "learning_rate": 1.1151095101729047e-06,
+ "loss": 0.8659,
+ "step": 4432
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9237099858287684,
+ "learning_rate": 1.1122513467975237e-06,
+ "loss": 0.8601,
+ "step": 4433
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8753879343874093,
+ "learning_rate": 1.1093966353213036e-06,
+ "loss": 0.8508,
+ "step": 4434
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.0279338600477002,
+ "learning_rate": 1.1065453768529844e-06,
+ "loss": 0.982,
+ "step": 4435
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8896273082857911,
+ "learning_rate": 1.103697572499961e-06,
+ "loss": 0.9002,
+ "step": 4436
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.0127050002489055,
+ "learning_rate": 1.1008532233682878e-06,
+ "loss": 0.9153,
+ "step": 4437
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9888636471260973,
+ "learning_rate": 1.0980123305626812e-06,
+ "loss": 0.9356,
+ "step": 4438
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 1.1176917582011996,
+ "learning_rate": 1.0951748951865048e-06,
+ "loss": 0.9203,
+ "step": 4439
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9076607048301628,
+ "learning_rate": 1.0923409183417887e-06,
+ "loss": 0.9021,
+ "step": 4440
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8021459869631091,
+ "learning_rate": 1.0895104011292202e-06,
+ "loss": 0.8594,
+ "step": 4441
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8521248017433695,
+ "learning_rate": 1.0866833446481317e-06,
+ "loss": 0.8437,
+ "step": 4442
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.8199654243704576,
+ "learning_rate": 1.0838597499965276e-06,
+ "loss": 0.8561,
+ "step": 4443
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.9017441943740964,
+ "learning_rate": 1.0810396182710535e-06,
+ "loss": 0.8628,
+ "step": 4444
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 1.0635085735985401,
+ "learning_rate": 1.0782229505670195e-06,
+ "loss": 0.857,
+ "step": 4445
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 1.0110512531596023,
+ "learning_rate": 1.0754097479783876e-06,
+ "loss": 0.9095,
+ "step": 4446
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 1.0330156889110493,
+ "learning_rate": 1.0726000115977696e-06,
+ "loss": 0.9477,
+ "step": 4447
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.8635230495765193,
+ "learning_rate": 1.069793742516435e-06,
+ "loss": 0.8596,
+ "step": 4448
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9609302996834761,
+ "learning_rate": 1.066990941824312e-06,
+ "loss": 0.8814,
+ "step": 4449
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.7887306989653831,
+ "learning_rate": 1.0641916106099691e-06,
+ "loss": 0.8378,
+ "step": 4450
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.8799238336963068,
+ "learning_rate": 1.0613957499606388e-06,
+ "loss": 0.8461,
+ "step": 4451
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9332652089990722,
+ "learning_rate": 1.0586033609622004e-06,
+ "loss": 0.9113,
+ "step": 4452
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 1.0120415338344462,
+ "learning_rate": 1.0558144446991836e-06,
+ "loss": 0.8404,
+ "step": 4453
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 1.153849426673812,
+ "learning_rate": 1.053029002254773e-06,
+ "loss": 0.9283,
+ "step": 4454
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.8558094985018742,
+ "learning_rate": 1.0502470347108017e-06,
+ "loss": 0.8964,
+ "step": 4455
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9485891526993969,
+ "learning_rate": 1.0474685431477537e-06,
+ "loss": 0.8982,
+ "step": 4456
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.953405765044882,
+ "learning_rate": 1.0446935286447657e-06,
+ "loss": 0.9423,
+ "step": 4457
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9537729372835468,
+ "learning_rate": 1.0419219922796175e-06,
+ "loss": 0.8686,
+ "step": 4458
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 1.124640597984598,
+ "learning_rate": 1.039153935128744e-06,
+ "loss": 1.0009,
+ "step": 4459
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9752016852506975,
+ "learning_rate": 1.0363893582672246e-06,
+ "loss": 0.8911,
+ "step": 4460
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9389763281898983,
+ "learning_rate": 1.033628262768792e-06,
+ "loss": 0.8828,
+ "step": 4461
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.8817628018783435,
+ "learning_rate": 1.0308706497058252e-06,
+ "loss": 0.8775,
+ "step": 4462
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 1.149099371920294,
+ "learning_rate": 1.0281165201493437e-06,
+ "loss": 0.9358,
+ "step": 4463
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9563098643398701,
+ "learning_rate": 1.0253658751690232e-06,
+ "loss": 0.905,
+ "step": 4464
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.7786100041703143,
+ "learning_rate": 1.0226187158331825e-06,
+ "loss": 0.8446,
+ "step": 4465
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.8717003504265479,
+ "learning_rate": 1.0198750432087855e-06,
+ "loss": 0.8463,
+ "step": 4466
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9698813047001333,
+ "learning_rate": 1.017134858361446e-06,
+ "loss": 0.9045,
+ "step": 4467
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9273075261187677,
+ "learning_rate": 1.0143981623554155e-06,
+ "loss": 0.9144,
+ "step": 4468
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 1.0034713315580532,
+ "learning_rate": 1.0116649562535984e-06,
+ "loss": 0.8566,
+ "step": 4469
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9388287245230369,
+ "learning_rate": 1.0089352411175424e-06,
+ "loss": 0.9042,
+ "step": 4470
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9197462394117867,
+ "learning_rate": 1.006209018007429e-06,
+ "loss": 0.8565,
+ "step": 4471
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.8200700243995165,
+ "learning_rate": 1.0034862879821029e-06,
+ "loss": 0.8142,
+ "step": 4472
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.839765862246526,
+ "learning_rate": 1.0007670520990331e-06,
+ "loss": 0.84,
+ "step": 4473
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 1.018420139672052,
+ "learning_rate": 9.98051311414342e-07,
+ "loss": 0.9043,
+ "step": 4474
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.8344410831177014,
+ "learning_rate": 9.953390669827944e-07,
+ "loss": 0.899,
+ "step": 4475
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.978469099315929,
+ "learning_rate": 9.926303198577913e-07,
+ "loss": 0.8867,
+ "step": 4476
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.8968384685972451,
+ "learning_rate": 9.899250710913767e-07,
+ "loss": 0.8743,
+ "step": 4477
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 1.0064599184167913,
+ "learning_rate": 9.872233217342463e-07,
+ "loss": 0.8609,
+ "step": 4478
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9467028482314083,
+ "learning_rate": 9.845250728357214e-07,
+ "loss": 0.8863,
+ "step": 4479
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9683227171178042,
+ "learning_rate": 9.818303254437723e-07,
+ "loss": 0.8644,
+ "step": 4480
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.8955860844728178,
+ "learning_rate": 9.791390806050117e-07,
+ "loss": 0.8493,
+ "step": 4481
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.8326166348027098,
+ "learning_rate": 9.764513393646812e-07,
+ "loss": 0.8601,
+ "step": 4482
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.8680581770069223,
+ "learning_rate": 9.737671027666728e-07,
+ "loss": 0.8561,
+ "step": 4483
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9915909435698445,
+ "learning_rate": 9.710863718535135e-07,
+ "loss": 0.9376,
+ "step": 4484
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9446431900793263,
+ "learning_rate": 9.684091476663659e-07,
+ "loss": 0.8778,
+ "step": 4485
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 1.109495272715226,
+ "learning_rate": 9.657354312450363e-07,
+ "loss": 0.8336,
+ "step": 4486
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.8918388169589561,
+ "learning_rate": 9.630652236279626e-07,
+ "loss": 0.8686,
+ "step": 4487
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 1.0448814368667654,
+ "learning_rate": 9.603985258522219e-07,
+ "loss": 0.9076,
+ "step": 4488
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.882671986823637,
+ "learning_rate": 9.577353389535315e-07,
+ "loss": 0.8426,
+ "step": 4489
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.7884297891161138,
+ "learning_rate": 9.550756639662417e-07,
+ "loss": 0.8667,
+ "step": 4490
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 1.0291184667949875,
+ "learning_rate": 9.524195019233407e-07,
+ "loss": 0.8691,
+ "step": 4491
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9151564314671728,
+ "learning_rate": 9.497668538564475e-07,
+ "loss": 0.8411,
+ "step": 4492
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 1.094450327487545,
+ "learning_rate": 9.471177207958238e-07,
+ "loss": 0.8771,
+ "step": 4493
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.8300597469314912,
+ "learning_rate": 9.444721037703597e-07,
+ "loss": 0.8728,
+ "step": 4494
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9139620150552796,
+ "learning_rate": 9.418300038075845e-07,
+ "loss": 0.945,
+ "step": 4495
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.9659572796858202,
+ "learning_rate": 9.391914219336606e-07,
+ "loss": 0.8612,
+ "step": 4496
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 1.018001634380619,
+ "learning_rate": 9.365563591733784e-07,
+ "loss": 0.9111,
+ "step": 4497
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9504457128628776,
+ "learning_rate": 9.33924816550168e-07,
+ "loss": 0.8944,
+ "step": 4498
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9986578534857201,
+ "learning_rate": 9.31296795086093e-07,
+ "loss": 0.817,
+ "step": 4499
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9395606158893343,
+ "learning_rate": 9.286722958018391e-07,
+ "loss": 0.868,
+ "step": 4500
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9602523091812222,
+ "learning_rate": 9.260513197167398e-07,
+ "loss": 0.8538,
+ "step": 4501
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.87687563248471,
+ "learning_rate": 9.234338678487509e-07,
+ "loss": 0.8714,
+ "step": 4502
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.949608181317551,
+ "learning_rate": 9.208199412144559e-07,
+ "loss": 0.8639,
+ "step": 4503
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9271674225765744,
+ "learning_rate": 9.182095408290781e-07,
+ "loss": 0.9022,
+ "step": 4504
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.964669129027502,
+ "learning_rate": 9.156026677064633e-07,
+ "loss": 0.9241,
+ "step": 4505
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9310203042552556,
+ "learning_rate": 9.129993228590917e-07,
+ "loss": 0.8388,
+ "step": 4506
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9643926178742298,
+ "learning_rate": 9.103995072980765e-07,
+ "loss": 0.9154,
+ "step": 4507
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.8671705146578107,
+ "learning_rate": 9.078032220331523e-07,
+ "loss": 0.8392,
+ "step": 4508
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 1.2225591193258285,
+ "learning_rate": 9.052104680726859e-07,
+ "loss": 0.8898,
+ "step": 4509
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9660630856365235,
+ "learning_rate": 9.026212464236772e-07,
+ "loss": 0.8241,
+ "step": 4510
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9340500282731702,
+ "learning_rate": 9.000355580917464e-07,
+ "loss": 0.8555,
+ "step": 4511
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9537654138619575,
+ "learning_rate": 8.974534040811444e-07,
+ "loss": 0.8894,
+ "step": 4512
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.895198937752595,
+ "learning_rate": 8.948747853947526e-07,
+ "loss": 0.9024,
+ "step": 4513
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.8766192717238223,
+ "learning_rate": 8.922997030340752e-07,
+ "loss": 0.8115,
+ "step": 4514
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.8815951005276368,
+ "learning_rate": 8.897281579992467e-07,
+ "loss": 0.9074,
+ "step": 4515
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.6959857370267856,
+ "learning_rate": 8.871601512890238e-07,
+ "loss": 0.7915,
+ "step": 4516
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9412151747214554,
+ "learning_rate": 8.845956839007897e-07,
+ "loss": 0.8855,
+ "step": 4517
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 1.0426813298387085,
+ "learning_rate": 8.820347568305543e-07,
+ "loss": 0.8681,
+ "step": 4518
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.8092244554699878,
+ "learning_rate": 8.794773710729543e-07,
+ "loss": 0.8603,
+ "step": 4519
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.8680896047248979,
+ "learning_rate": 8.769235276212496e-07,
+ "loss": 0.8215,
+ "step": 4520
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.896381811745064,
+ "learning_rate": 8.743732274673189e-07,
+ "loss": 0.7978,
+ "step": 4521
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.8977254594725402,
+ "learning_rate": 8.718264716016722e-07,
+ "loss": 0.9257,
+ "step": 4522
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.798855769126776,
+ "learning_rate": 8.692832610134428e-07,
+ "loss": 0.8884,
+ "step": 4523
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9157501102272877,
+ "learning_rate": 8.66743596690377e-07,
+ "loss": 0.8669,
+ "step": 4524
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9695653131871415,
+ "learning_rate": 8.642074796188594e-07,
+ "loss": 0.8686,
+ "step": 4525
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9793148217726962,
+ "learning_rate": 8.61674910783884e-07,
+ "loss": 0.9255,
+ "step": 4526
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9576973985763716,
+ "learning_rate": 8.59145891169072e-07,
+ "loss": 0.8332,
+ "step": 4527
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9081255730978633,
+ "learning_rate": 8.566204217566664e-07,
+ "loss": 0.8195,
+ "step": 4528
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.8708156229354137,
+ "learning_rate": 8.540985035275273e-07,
+ "loss": 0.9023,
+ "step": 4529
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.8691762245849086,
+ "learning_rate": 8.515801374611432e-07,
+ "loss": 0.8578,
+ "step": 4530
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.7866221852061148,
+ "learning_rate": 8.490653245356184e-07,
+ "loss": 0.7866,
+ "step": 4531
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 1.1989636305157028,
+ "learning_rate": 8.465540657276728e-07,
+ "loss": 0.9471,
+ "step": 4532
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9767141601100402,
+ "learning_rate": 8.44046362012656e-07,
+ "loss": 0.9228,
+ "step": 4533
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.8303567486057429,
+ "learning_rate": 8.415422143645247e-07,
+ "loss": 0.8237,
+ "step": 4534
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.8703551504870906,
+ "learning_rate": 8.390416237558641e-07,
+ "loss": 0.8909,
+ "step": 4535
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 1.0998537880731005,
+ "learning_rate": 8.365445911578785e-07,
+ "loss": 0.9127,
+ "step": 4536
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9489973630795188,
+ "learning_rate": 8.340511175403809e-07,
+ "loss": 0.8361,
+ "step": 4537
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 1.1220324062106524,
+ "learning_rate": 8.315612038718101e-07,
+ "loss": 0.916,
+ "step": 4538
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.7752459181440666,
+ "learning_rate": 8.290748511192214e-07,
+ "loss": 0.8526,
+ "step": 4539
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9759138766592379,
+ "learning_rate": 8.265920602482825e-07,
+ "loss": 0.9052,
+ "step": 4540
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.88200196522921,
+ "learning_rate": 8.241128322232816e-07,
+ "loss": 0.9174,
+ "step": 4541
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.8480227788746386,
+ "learning_rate": 8.216371680071244e-07,
+ "loss": 0.8847,
+ "step": 4542
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.7761758733544654,
+ "learning_rate": 8.191650685613273e-07,
+ "loss": 0.8348,
+ "step": 4543
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9288089411677429,
+ "learning_rate": 8.166965348460298e-07,
+ "loss": 0.9418,
+ "step": 4544
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.8512522320599516,
+ "learning_rate": 8.142315678199764e-07,
+ "loss": 0.8531,
+ "step": 4545
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.8622197409594021,
+ "learning_rate": 8.117701684405343e-07,
+ "loss": 0.8356,
+ "step": 4546
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.9047252942224157,
+ "learning_rate": 8.093123376636836e-07,
+ "loss": 0.8358,
+ "step": 4547
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.7767940998413876,
+ "learning_rate": 8.06858076444017e-07,
+ "loss": 0.8704,
+ "step": 4548
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.8395712335488582,
+ "learning_rate": 8.044073857347423e-07,
+ "loss": 0.8137,
+ "step": 4549
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 1.0484595587985202,
+ "learning_rate": 8.019602664876758e-07,
+ "loss": 0.9307,
+ "step": 4550
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.880460137991095,
+ "learning_rate": 7.995167196532527e-07,
+ "loss": 0.8784,
+ "step": 4551
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9149158139313484,
+ "learning_rate": 7.970767461805218e-07,
+ "loss": 0.8461,
+ "step": 4552
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9131255299353188,
+ "learning_rate": 7.946403470171326e-07,
+ "loss": 0.827,
+ "step": 4553
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.8824362005579756,
+ "learning_rate": 7.922075231093628e-07,
+ "loss": 0.8546,
+ "step": 4554
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.8832373651669211,
+ "learning_rate": 7.897782754020889e-07,
+ "loss": 0.8728,
+ "step": 4555
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.8069595830282974,
+ "learning_rate": 7.873526048388025e-07,
+ "loss": 0.8346,
+ "step": 4556
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 1.1228418926088712,
+ "learning_rate": 7.849305123616091e-07,
+ "loss": 0.9323,
+ "step": 4557
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9533667074702398,
+ "learning_rate": 7.825119989112173e-07,
+ "loss": 0.8716,
+ "step": 4558
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.923293228778207,
+ "learning_rate": 7.800970654269513e-07,
+ "loss": 0.8432,
+ "step": 4559
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 1.11626140350239,
+ "learning_rate": 7.776857128467464e-07,
+ "loss": 0.8332,
+ "step": 4560
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.8938496801044397,
+ "learning_rate": 7.75277942107141e-07,
+ "loss": 0.8468,
+ "step": 4561
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 1.0354649146549935,
+ "learning_rate": 7.728737541432862e-07,
+ "loss": 0.8427,
+ "step": 4562
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.8860950842084511,
+ "learning_rate": 7.704731498889428e-07,
+ "loss": 0.8772,
+ "step": 4563
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.867606498901801,
+ "learning_rate": 7.680761302764727e-07,
+ "loss": 0.7783,
+ "step": 4564
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.7494571050820372,
+ "learning_rate": 7.65682696236858e-07,
+ "loss": 0.7823,
+ "step": 4565
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9718045190437227,
+ "learning_rate": 7.632928486996749e-07,
+ "loss": 0.829,
+ "step": 4566
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.8814772987383863,
+ "learning_rate": 7.609065885931155e-07,
+ "loss": 0.905,
+ "step": 4567
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9680328247678344,
+ "learning_rate": 7.585239168439762e-07,
+ "loss": 0.962,
+ "step": 4568
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9590121467205059,
+ "learning_rate": 7.561448343776567e-07,
+ "loss": 0.8774,
+ "step": 4569
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.8510595020547232,
+ "learning_rate": 7.537693421181658e-07,
+ "loss": 0.8738,
+ "step": 4570
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9654411670041684,
+ "learning_rate": 7.513974409881186e-07,
+ "loss": 0.8577,
+ "step": 4571
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.6154410191182488,
+ "learning_rate": 7.49029131908734e-07,
+ "loss": 0.7772,
+ "step": 4572
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.8124019210849639,
+ "learning_rate": 7.466644157998371e-07,
+ "loss": 0.868,
+ "step": 4573
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9371761053086015,
+ "learning_rate": 7.443032935798533e-07,
+ "loss": 0.895,
+ "step": 4574
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9690588905208973,
+ "learning_rate": 7.419457661658169e-07,
+ "loss": 0.9118,
+ "step": 4575
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.96849815157878,
+ "learning_rate": 7.395918344733644e-07,
+ "loss": 0.9247,
+ "step": 4576
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9868973998873788,
+ "learning_rate": 7.372414994167354e-07,
+ "loss": 0.8891,
+ "step": 4577
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 1.0239718137607154,
+ "learning_rate": 7.348947619087754e-07,
+ "loss": 0.9041,
+ "step": 4578
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9296765587267058,
+ "learning_rate": 7.325516228609264e-07,
+ "loss": 0.8811,
+ "step": 4579
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9506405539975408,
+ "learning_rate": 7.302120831832382e-07,
+ "loss": 0.8922,
+ "step": 4580
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 1.0671397784944519,
+ "learning_rate": 7.278761437843629e-07,
+ "loss": 0.9144,
+ "step": 4581
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9326971853607836,
+ "learning_rate": 7.255438055715469e-07,
+ "loss": 0.9119,
+ "step": 4582
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.856892892626281,
+ "learning_rate": 7.232150694506512e-07,
+ "loss": 0.8945,
+ "step": 4583
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9502589806243291,
+ "learning_rate": 7.208899363261234e-07,
+ "loss": 0.9065,
+ "step": 4584
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9183385431355955,
+ "learning_rate": 7.185684071010224e-07,
+ "loss": 0.8913,
+ "step": 4585
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9281050915140219,
+ "learning_rate": 7.162504826770033e-07,
+ "loss": 0.8903,
+ "step": 4586
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9488448838720547,
+ "learning_rate": 7.139361639543185e-07,
+ "loss": 0.902,
+ "step": 4587
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 1.1643793453730171,
+ "learning_rate": 7.116254518318222e-07,
+ "loss": 0.8976,
+ "step": 4588
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.8929835684417432,
+ "learning_rate": 7.093183472069753e-07,
+ "loss": 0.8782,
+ "step": 4589
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.8728833870870203,
+ "learning_rate": 7.070148509758223e-07,
+ "loss": 0.848,
+ "step": 4590
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 1.1138405382131311,
+ "learning_rate": 7.047149640330197e-07,
+ "loss": 0.9057,
+ "step": 4591
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.9119819160478895,
+ "learning_rate": 7.024186872718164e-07,
+ "loss": 0.8971,
+ "step": 4592
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 1.0586261924649834,
+ "learning_rate": 7.001260215840567e-07,
+ "loss": 0.8966,
+ "step": 4593
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.8558439206138747,
+ "learning_rate": 6.978369678601892e-07,
+ "loss": 0.8803,
+ "step": 4594
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 1.1313114584415687,
+ "learning_rate": 6.955515269892533e-07,
+ "loss": 0.9254,
+ "step": 4595
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.8687686930697606,
+ "learning_rate": 6.932696998588895e-07,
+ "loss": 0.8345,
+ "step": 4596
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.972762762695167,
+ "learning_rate": 6.909914873553347e-07,
+ "loss": 0.9169,
+ "step": 4597
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 1.0722597366094893,
+ "learning_rate": 6.887168903634178e-07,
+ "loss": 0.8694,
+ "step": 4598
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.8700407063622763,
+ "learning_rate": 6.864459097665654e-07,
+ "loss": 0.9118,
+ "step": 4599
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.7231028526782665,
+ "learning_rate": 6.84178546446802e-07,
+ "loss": 0.8277,
+ "step": 4600
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9397621900449872,
+ "learning_rate": 6.819148012847454e-07,
+ "loss": 0.9099,
+ "step": 4601
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8027870927796138,
+ "learning_rate": 6.796546751596089e-07,
+ "loss": 0.8059,
+ "step": 4602
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.825610473131502,
+ "learning_rate": 6.77398168949196e-07,
+ "loss": 0.8641,
+ "step": 4603
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 1.0333130499480339,
+ "learning_rate": 6.751452835299111e-07,
+ "loss": 0.8968,
+ "step": 4604
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.7461096269793116,
+ "learning_rate": 6.728960197767475e-07,
+ "loss": 0.764,
+ "step": 4605
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 1.1053663946086056,
+ "learning_rate": 6.706503785632934e-07,
+ "loss": 0.8871,
+ "step": 4606
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8087669755123348,
+ "learning_rate": 6.68408360761732e-07,
+ "loss": 0.8347,
+ "step": 4607
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8228167450449801,
+ "learning_rate": 6.661699672428334e-07,
+ "loss": 0.8943,
+ "step": 4608
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 1.044853902103288,
+ "learning_rate": 6.639351988759657e-07,
+ "loss": 0.9091,
+ "step": 4609
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9646694942021371,
+ "learning_rate": 6.6170405652909e-07,
+ "loss": 0.8832,
+ "step": 4610
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9862719073656867,
+ "learning_rate": 6.594765410687487e-07,
+ "loss": 0.8745,
+ "step": 4611
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9469706978275599,
+ "learning_rate": 6.57252653360092e-07,
+ "loss": 0.8867,
+ "step": 4612
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9621907313147184,
+ "learning_rate": 6.550323942668469e-07,
+ "loss": 0.9048,
+ "step": 4613
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9551120725938772,
+ "learning_rate": 6.528157646513378e-07,
+ "loss": 0.916,
+ "step": 4614
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 1.165242059134334,
+ "learning_rate": 6.506027653744796e-07,
+ "loss": 0.901,
+ "step": 4615
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8276585384744689,
+ "learning_rate": 6.483933972957734e-07,
+ "loss": 0.8597,
+ "step": 4616
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8674009756824886,
+ "learning_rate": 6.461876612733109e-07,
+ "loss": 0.8642,
+ "step": 4617
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8732681246085701,
+ "learning_rate": 6.4398555816378e-07,
+ "loss": 0.8679,
+ "step": 4618
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 1.0698624969243604,
+ "learning_rate": 6.417870888224476e-07,
+ "loss": 0.9166,
+ "step": 4619
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9269176635216481,
+ "learning_rate": 6.395922541031741e-07,
+ "loss": 0.7976,
+ "step": 4620
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8911483169879542,
+ "learning_rate": 6.374010548584119e-07,
+ "loss": 0.7714,
+ "step": 4621
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9166620422433125,
+ "learning_rate": 6.352134919391928e-07,
+ "loss": 0.8986,
+ "step": 4622
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8503582591660069,
+ "learning_rate": 6.330295661951436e-07,
+ "loss": 0.8285,
+ "step": 4623
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9065331328143107,
+ "learning_rate": 6.308492784744746e-07,
+ "loss": 0.8659,
+ "step": 4624
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9446631454458843,
+ "learning_rate": 6.286726296239854e-07,
+ "loss": 0.8569,
+ "step": 4625
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8410711191297997,
+ "learning_rate": 6.264996204890628e-07,
+ "loss": 0.8798,
+ "step": 4626
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.967536775476823,
+ "learning_rate": 6.24330251913674e-07,
+ "loss": 0.912,
+ "step": 4627
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8490844246194825,
+ "learning_rate": 6.221645247403807e-07,
+ "loss": 0.8055,
+ "step": 4628
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8079323917488994,
+ "learning_rate": 6.200024398103255e-07,
+ "loss": 0.8039,
+ "step": 4629
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9217043952152276,
+ "learning_rate": 6.178439979632367e-07,
+ "loss": 0.9196,
+ "step": 4630
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8245142052913529,
+ "learning_rate": 6.156892000374293e-07,
+ "loss": 0.7905,
+ "step": 4631
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8264377267146533,
+ "learning_rate": 6.135380468698004e-07,
+ "loss": 0.7877,
+ "step": 4632
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9572302543274727,
+ "learning_rate": 6.113905392958342e-07,
+ "loss": 0.9471,
+ "step": 4633
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9894955129199708,
+ "learning_rate": 6.092466781495976e-07,
+ "loss": 0.8881,
+ "step": 4634
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.898577258005014,
+ "learning_rate": 6.071064642637404e-07,
+ "loss": 0.8959,
+ "step": 4635
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9397410181632516,
+ "learning_rate": 6.049698984695007e-07,
+ "loss": 0.8166,
+ "step": 4636
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.7923090009464244,
+ "learning_rate": 6.028369815966917e-07,
+ "loss": 0.8396,
+ "step": 4637
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 1.2202817464242952,
+ "learning_rate": 6.007077144737161e-07,
+ "loss": 0.9118,
+ "step": 4638
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.969129140892316,
+ "learning_rate": 5.985820979275569e-07,
+ "loss": 0.8887,
+ "step": 4639
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 1.0783977420547077,
+ "learning_rate": 5.96460132783776e-07,
+ "loss": 0.8832,
+ "step": 4640
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9137748341764742,
+ "learning_rate": 5.943418198665251e-07,
+ "loss": 0.8517,
+ "step": 4641
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9473977104987698,
+ "learning_rate": 5.922271599985286e-07,
+ "loss": 0.9196,
+ "step": 4642
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8204196536378642,
+ "learning_rate": 5.901161540010969e-07,
+ "loss": 0.8097,
+ "step": 4643
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9277511181073421,
+ "learning_rate": 5.880088026941233e-07,
+ "loss": 0.8416,
+ "step": 4644
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9452367143360048,
+ "learning_rate": 5.859051068960741e-07,
+ "loss": 0.9401,
+ "step": 4645
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.987232865712406,
+ "learning_rate": 5.838050674240025e-07,
+ "loss": 0.9142,
+ "step": 4646
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9357750824353854,
+ "learning_rate": 5.817086850935416e-07,
+ "loss": 0.8483,
+ "step": 4647
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9534772237743926,
+ "learning_rate": 5.796159607189001e-07,
+ "loss": 0.838,
+ "step": 4648
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 1.0211652109876297,
+ "learning_rate": 5.775268951128676e-07,
+ "loss": 0.8509,
+ "step": 4649
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.9323112138160999,
+ "learning_rate": 5.754414890868154e-07,
+ "loss": 0.8589,
+ "step": 4650
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.8750232423096678,
+ "learning_rate": 5.733597434506877e-07,
+ "loss": 0.8532,
+ "step": 4651
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 1.026382237023704,
+ "learning_rate": 5.712816590130133e-07,
+ "loss": 0.9044,
+ "step": 4652
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9624758633770257,
+ "learning_rate": 5.692072365808954e-07,
+ "loss": 0.8804,
+ "step": 4653
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9989242644321323,
+ "learning_rate": 5.671364769600162e-07,
+ "loss": 0.9247,
+ "step": 4654
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9616525628604601,
+ "learning_rate": 5.650693809546348e-07,
+ "loss": 0.9033,
+ "step": 4655
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9096015769269892,
+ "learning_rate": 5.630059493675866e-07,
+ "loss": 0.8009,
+ "step": 4656
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 1.0637758788778562,
+ "learning_rate": 5.60946183000285e-07,
+ "loss": 0.9068,
+ "step": 4657
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.8901945313508792,
+ "learning_rate": 5.5889008265272e-07,
+ "loss": 0.8788,
+ "step": 4658
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9099875967178034,
+ "learning_rate": 5.568376491234562e-07,
+ "loss": 0.8796,
+ "step": 4659
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 1.0202809122623,
+ "learning_rate": 5.547888832096382e-07,
+ "loss": 0.9962,
+ "step": 4660
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.954658548268079,
+ "learning_rate": 5.527437857069784e-07,
+ "loss": 0.8394,
+ "step": 4661
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9320796453469051,
+ "learning_rate": 5.507023574097725e-07,
+ "loss": 0.9111,
+ "step": 4662
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 1.0099055589396055,
+ "learning_rate": 5.486645991108875e-07,
+ "loss": 0.8903,
+ "step": 4663
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9958238294406384,
+ "learning_rate": 5.466305116017623e-07,
+ "loss": 0.9471,
+ "step": 4664
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.8651903899596718,
+ "learning_rate": 5.446000956724174e-07,
+ "loss": 0.8909,
+ "step": 4665
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 1.0929962562558704,
+ "learning_rate": 5.425733521114396e-07,
+ "loss": 0.9027,
+ "step": 4666
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 1.002891888040804,
+ "learning_rate": 5.405502817059937e-07,
+ "loss": 0.9048,
+ "step": 4667
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 1.0631978643606623,
+ "learning_rate": 5.385308852418191e-07,
+ "loss": 0.9402,
+ "step": 4668
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 1.0592678495354233,
+ "learning_rate": 5.365151635032218e-07,
+ "loss": 0.8854,
+ "step": 4669
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9465884927280965,
+ "learning_rate": 5.345031172730875e-07,
+ "loss": 0.9383,
+ "step": 4670
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 1.055539568239519,
+ "learning_rate": 5.324947473328735e-07,
+ "loss": 0.8672,
+ "step": 4671
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 1.0178495256136613,
+ "learning_rate": 5.304900544626046e-07,
+ "loss": 0.9695,
+ "step": 4672
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9207883190778684,
+ "learning_rate": 5.284890394408826e-07,
+ "loss": 0.9053,
+ "step": 4673
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 1.0413991164040401,
+ "learning_rate": 5.264917030448757e-07,
+ "loss": 0.917,
+ "step": 4674
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9852974225095827,
+ "learning_rate": 5.244980460503268e-07,
+ "loss": 0.9199,
+ "step": 4675
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9101478254967975,
+ "learning_rate": 5.225080692315532e-07,
+ "loss": 0.8581,
+ "step": 4676
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9224577901270811,
+ "learning_rate": 5.205217733614353e-07,
+ "loss": 0.8575,
+ "step": 4677
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.8052472896237471,
+ "learning_rate": 5.185391592114286e-07,
+ "loss": 0.8244,
+ "step": 4678
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9836455868456991,
+ "learning_rate": 5.165602275515592e-07,
+ "loss": 0.9301,
+ "step": 4679
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 1.1892255587314617,
+ "learning_rate": 5.145849791504187e-07,
+ "loss": 0.9277,
+ "step": 4680
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9869571273541833,
+ "learning_rate": 5.126134147751716e-07,
+ "loss": 0.831,
+ "step": 4681
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.8052676873067713,
+ "learning_rate": 5.106455351915507e-07,
+ "loss": 0.79,
+ "step": 4682
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9338371554851755,
+ "learning_rate": 5.086813411638581e-07,
+ "loss": 0.9352,
+ "step": 4683
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9015422339295802,
+ "learning_rate": 5.067208334549656e-07,
+ "loss": 0.9437,
+ "step": 4684
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 1.054167179399989,
+ "learning_rate": 5.047640128263087e-07,
+ "loss": 0.9158,
+ "step": 4685
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9082560787862947,
+ "learning_rate": 5.028108800378961e-07,
+ "loss": 0.8905,
+ "step": 4686
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.8219471768430575,
+ "learning_rate": 5.008614358483021e-07,
+ "loss": 0.8757,
+ "step": 4687
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.780745425301807,
+ "learning_rate": 4.989156810146667e-07,
+ "loss": 0.8158,
+ "step": 4688
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9234343419367949,
+ "learning_rate": 4.969736162927019e-07,
+ "loss": 0.8761,
+ "step": 4689
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 1.0014977203303232,
+ "learning_rate": 4.9503524243668e-07,
+ "loss": 0.9522,
+ "step": 4690
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9339230282937171,
+ "learning_rate": 4.931005601994432e-07,
+ "loss": 0.9174,
+ "step": 4691
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9498858891420158,
+ "learning_rate": 4.911695703324038e-07,
+ "loss": 0.8962,
+ "step": 4692
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.8832518040504395,
+ "learning_rate": 4.892422735855284e-07,
+ "loss": 0.8549,
+ "step": 4693
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.8408928804118271,
+ "learning_rate": 4.873186707073663e-07,
+ "loss": 0.8011,
+ "step": 4694
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9207618156688188,
+ "learning_rate": 4.853987624450151e-07,
+ "loss": 0.8566,
+ "step": 4695
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9262469810019246,
+ "learning_rate": 4.834825495441475e-07,
+ "loss": 0.8915,
+ "step": 4696
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.8846754187057477,
+ "learning_rate": 4.815700327490014e-07,
+ "loss": 0.9321,
+ "step": 4697
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9216289128668232,
+ "learning_rate": 4.796612128023726e-07,
+ "loss": 0.8932,
+ "step": 4698
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.8957453245571164,
+ "learning_rate": 4.777560904456236e-07,
+ "loss": 0.867,
+ "step": 4699
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9659405217417328,
+ "learning_rate": 4.7585466641868696e-07,
+ "loss": 0.8371,
+ "step": 4700
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9246429973713162,
+ "learning_rate": 4.7395694146004976e-07,
+ "loss": 0.8532,
+ "step": 4701
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.9730178048995508,
+ "learning_rate": 4.7206291630677024e-07,
+ "loss": 0.8628,
+ "step": 4702
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.8908886670821141,
+ "learning_rate": 4.7017259169446104e-07,
+ "loss": 0.8401,
+ "step": 4703
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 1.2569337175476047,
+ "learning_rate": 4.6828596835730487e-07,
+ "loss": 0.9729,
+ "step": 4704
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8871567475172962,
+ "learning_rate": 4.664030470280467e-07,
+ "loss": 0.8877,
+ "step": 4705
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8771797489505685,
+ "learning_rate": 4.645238284379883e-07,
+ "loss": 0.876,
+ "step": 4706
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8974155182599607,
+ "learning_rate": 4.626483133169968e-07,
+ "loss": 0.8647,
+ "step": 4707
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 1.03217165439121,
+ "learning_rate": 4.60776502393504e-07,
+ "loss": 0.9047,
+ "step": 4708
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8647869508096034,
+ "learning_rate": 4.5890839639449514e-07,
+ "loss": 0.9272,
+ "step": 4709
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.9483061117258482,
+ "learning_rate": 4.5704399604552417e-07,
+ "loss": 0.8963,
+ "step": 4710
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 1.0459603859839062,
+ "learning_rate": 4.551833020707008e-07,
+ "loss": 0.752,
+ "step": 4711
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8095336947195265,
+ "learning_rate": 4.533263151926981e-07,
+ "loss": 0.8358,
+ "step": 4712
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8731203447007811,
+ "learning_rate": 4.514730361327502e-07,
+ "loss": 0.8537,
+ "step": 4713
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.7576831899020193,
+ "learning_rate": 4.4962346561064574e-07,
+ "loss": 0.7908,
+ "step": 4714
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.9335440679572502,
+ "learning_rate": 4.4777760434473796e-07,
+ "loss": 0.8944,
+ "step": 4715
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 1.2370637457358102,
+ "learning_rate": 4.4593545305193774e-07,
+ "loss": 0.8908,
+ "step": 4716
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.920934958052355,
+ "learning_rate": 4.440970124477173e-07,
+ "loss": 0.8929,
+ "step": 4717
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.848866861588948,
+ "learning_rate": 4.4226228324610544e-07,
+ "loss": 0.8371,
+ "step": 4718
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.7779812376352646,
+ "learning_rate": 4.404312661596877e-07,
+ "loss": 0.7985,
+ "step": 4719
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.7910637849699313,
+ "learning_rate": 4.386039618996119e-07,
+ "loss": 0.8588,
+ "step": 4720
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.9347730768411243,
+ "learning_rate": 4.3678037117558135e-07,
+ "loss": 0.869,
+ "step": 4721
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8317945691068949,
+ "learning_rate": 4.349604946958563e-07,
+ "loss": 0.9009,
+ "step": 4722
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.7744877050845054,
+ "learning_rate": 4.331443331672591e-07,
+ "loss": 0.7898,
+ "step": 4723
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 1.0950992364196257,
+ "learning_rate": 4.313318872951633e-07,
+ "loss": 0.8883,
+ "step": 4724
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 1.021349287582722,
+ "learning_rate": 4.295231577835024e-07,
+ "loss": 0.8475,
+ "step": 4725
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.91315891502072,
+ "learning_rate": 4.2771814533476787e-07,
+ "loss": 0.9307,
+ "step": 4726
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 1.009209027305893,
+ "learning_rate": 4.2591685065000223e-07,
+ "loss": 0.848,
+ "step": 4727
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.9653479615075518,
+ "learning_rate": 4.241192744288092e-07,
+ "loss": 0.8995,
+ "step": 4728
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 1.039540610615299,
+ "learning_rate": 4.223254173693492e-07,
+ "loss": 0.9164,
+ "step": 4729
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.929264173702999,
+ "learning_rate": 4.2053528016833267e-07,
+ "loss": 0.9028,
+ "step": 4730
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8152833470357382,
+ "learning_rate": 4.1874886352103015e-07,
+ "loss": 0.8695,
+ "step": 4731
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8560674313813604,
+ "learning_rate": 4.1696616812126333e-07,
+ "loss": 0.8124,
+ "step": 4732
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.9548884844167742,
+ "learning_rate": 4.1518719466141165e-07,
+ "loss": 0.8691,
+ "step": 4733
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8539519754960126,
+ "learning_rate": 4.13411943832408e-07,
+ "loss": 0.7609,
+ "step": 4734
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.9552903690370225,
+ "learning_rate": 4.116404163237386e-07,
+ "loss": 0.8559,
+ "step": 4735
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.9345544038413095,
+ "learning_rate": 4.0987261282344425e-07,
+ "loss": 0.8994,
+ "step": 4736
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8670718582046053,
+ "learning_rate": 4.081085340181223e-07,
+ "loss": 0.8565,
+ "step": 4737
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.9917587272279675,
+ "learning_rate": 4.06348180592917e-07,
+ "loss": 0.9179,
+ "step": 4738
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.9062623994797032,
+ "learning_rate": 4.0459155323153034e-07,
+ "loss": 0.8471,
+ "step": 4739
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8640138948660695,
+ "learning_rate": 4.028386526162176e-07,
+ "loss": 0.8527,
+ "step": 4740
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.9711039171948731,
+ "learning_rate": 4.010894794277831e-07,
+ "loss": 0.9265,
+ "step": 4741
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8932933479919942,
+ "learning_rate": 3.993440343455879e-07,
+ "loss": 0.851,
+ "step": 4742
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 1.072058425315494,
+ "learning_rate": 3.976023180475397e-07,
+ "loss": 0.8821,
+ "step": 4743
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8879501099847686,
+ "learning_rate": 3.95864331210104e-07,
+ "loss": 0.8876,
+ "step": 4744
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 1.0169064021683811,
+ "learning_rate": 3.941300745082932e-07,
+ "loss": 0.8965,
+ "step": 4745
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.971834892069378,
+ "learning_rate": 3.9239954861567177e-07,
+ "loss": 0.9413,
+ "step": 4746
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.9077071568664802,
+ "learning_rate": 3.906727542043598e-07,
+ "loss": 0.9478,
+ "step": 4747
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.9783496792117957,
+ "learning_rate": 3.8894969194502083e-07,
+ "loss": 0.8484,
+ "step": 4748
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.7577751457087538,
+ "learning_rate": 3.87230362506873e-07,
+ "loss": 0.7764,
+ "step": 4749
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8327591036758225,
+ "learning_rate": 3.8551476655768527e-07,
+ "loss": 0.8193,
+ "step": 4750
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8376407389609154,
+ "learning_rate": 3.8380290476377255e-07,
+ "loss": 0.8738,
+ "step": 4751
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 1.041387180792664,
+ "learning_rate": 3.8209477779000637e-07,
+ "loss": 0.7963,
+ "step": 4752
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 1.0508160439123053,
+ "learning_rate": 3.803903862998004e-07,
+ "loss": 0.9625,
+ "step": 4753
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8819817754321476,
+ "learning_rate": 3.7868973095512185e-07,
+ "loss": 0.8513,
+ "step": 4754
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.8633950002628878,
+ "learning_rate": 3.7699281241648565e-07,
+ "loss": 0.8388,
+ "step": 4755
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9430616004578857,
+ "learning_rate": 3.7529963134295466e-07,
+ "loss": 0.8794,
+ "step": 4756
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8202230404710976,
+ "learning_rate": 3.736101883921406e-07,
+ "loss": 0.8608,
+ "step": 4757
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9111800394922659,
+ "learning_rate": 3.719244842202074e-07,
+ "loss": 0.8701,
+ "step": 4758
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9648783251420716,
+ "learning_rate": 3.702425194818582e-07,
+ "loss": 0.9106,
+ "step": 4759
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8601473565674794,
+ "learning_rate": 3.685642948303503e-07,
+ "loss": 0.9002,
+ "step": 4760
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8695453215948808,
+ "learning_rate": 3.66889810917489e-07,
+ "loss": 0.8081,
+ "step": 4761
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8832004349362633,
+ "learning_rate": 3.6521906839362187e-07,
+ "loss": 0.9008,
+ "step": 4762
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9373870441318044,
+ "learning_rate": 3.635520679076465e-07,
+ "loss": 0.8846,
+ "step": 4763
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8537989605488135,
+ "learning_rate": 3.6188881010700725e-07,
+ "loss": 0.874,
+ "step": 4764
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9555087172024136,
+ "learning_rate": 3.6022929563769513e-07,
+ "loss": 0.8977,
+ "step": 4765
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.89565777069052,
+ "learning_rate": 3.5857352514424573e-07,
+ "loss": 0.8879,
+ "step": 4766
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9683873603155793,
+ "learning_rate": 3.5692149926974006e-07,
+ "loss": 0.8812,
+ "step": 4767
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8446197704108228,
+ "learning_rate": 3.552732186558072e-07,
+ "loss": 0.8336,
+ "step": 4768
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.7131766899855473,
+ "learning_rate": 3.536286839426195e-07,
+ "loss": 0.8001,
+ "step": 4769
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8623764633556321,
+ "learning_rate": 3.51987895768896e-07,
+ "loss": 0.835,
+ "step": 4770
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8598354513537586,
+ "learning_rate": 3.5035085477190143e-07,
+ "loss": 0.8347,
+ "step": 4771
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8913383301607862,
+ "learning_rate": 3.4871756158744054e-07,
+ "loss": 0.8973,
+ "step": 4772
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9345288783189069,
+ "learning_rate": 3.4708801684986693e-07,
+ "loss": 0.9262,
+ "step": 4773
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9447309844765478,
+ "learning_rate": 3.454622211920766e-07,
+ "loss": 0.8473,
+ "step": 4774
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9736499373557043,
+ "learning_rate": 3.4384017524551116e-07,
+ "loss": 0.883,
+ "step": 4775
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8309086476311517,
+ "learning_rate": 3.422218796401544e-07,
+ "loss": 0.8497,
+ "step": 4776
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 1.0225774239584255,
+ "learning_rate": 3.4060733500453247e-07,
+ "loss": 0.895,
+ "step": 4777
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8911865934497273,
+ "learning_rate": 3.38996541965716e-07,
+ "loss": 0.9195,
+ "step": 4778
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8443701653753309,
+ "learning_rate": 3.3738950114932e-07,
+ "loss": 0.8844,
+ "step": 4779
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.7107396921526531,
+ "learning_rate": 3.3578621317949755e-07,
+ "loss": 0.779,
+ "step": 4780
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8399893121659415,
+ "learning_rate": 3.341866786789505e-07,
+ "loss": 0.8436,
+ "step": 4781
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9685815613770961,
+ "learning_rate": 3.325908982689185e-07,
+ "loss": 0.9355,
+ "step": 4782
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9010482476442921,
+ "learning_rate": 3.309988725691837e-07,
+ "loss": 0.8462,
+ "step": 4783
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 1.004028997913816,
+ "learning_rate": 3.294106021980714e-07,
+ "loss": 0.8934,
+ "step": 4784
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9467496885123551,
+ "learning_rate": 3.278260877724471e-07,
+ "loss": 0.8518,
+ "step": 4785
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9611481887716877,
+ "learning_rate": 3.2624532990771507e-07,
+ "loss": 0.9238,
+ "step": 4786
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9211305354120007,
+ "learning_rate": 3.2466832921782986e-07,
+ "loss": 0.8427,
+ "step": 4787
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9780646020436263,
+ "learning_rate": 3.2309508631527486e-07,
+ "loss": 0.9016,
+ "step": 4788
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8852928545464044,
+ "learning_rate": 3.215256018110824e-07,
+ "loss": 0.8977,
+ "step": 4789
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8279288966181191,
+ "learning_rate": 3.199598763148215e-07,
+ "loss": 0.7681,
+ "step": 4790
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.909226871258729,
+ "learning_rate": 3.183979104346002e-07,
+ "loss": 0.8608,
+ "step": 4791
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 1.0171373695462986,
+ "learning_rate": 3.1683970477706994e-07,
+ "loss": 0.899,
+ "step": 4792
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 1.0009925275785496,
+ "learning_rate": 3.1528525994741876e-07,
+ "loss": 0.9874,
+ "step": 4793
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8224974832611328,
+ "learning_rate": 3.13734576549376e-07,
+ "loss": 0.8328,
+ "step": 4794
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8683064937148369,
+ "learning_rate": 3.121876551852099e-07,
+ "loss": 0.8416,
+ "step": 4795
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8806177741538391,
+ "learning_rate": 3.1064449645572536e-07,
+ "loss": 0.8782,
+ "step": 4796
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9123231858787465,
+ "learning_rate": 3.091051009602675e-07,
+ "loss": 0.8457,
+ "step": 4797
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.7618300549174845,
+ "learning_rate": 3.0756946929672017e-07,
+ "loss": 0.8462,
+ "step": 4798
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8791781943865703,
+ "learning_rate": 3.060376020615052e-07,
+ "loss": 0.8286,
+ "step": 4799
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.951463421136622,
+ "learning_rate": 3.0450949984958347e-07,
+ "loss": 0.8521,
+ "step": 4800
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.8866527917252199,
+ "learning_rate": 3.0298516325444893e-07,
+ "loss": 0.8836,
+ "step": 4801
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.893886355824411,
+ "learning_rate": 3.0146459286813924e-07,
+ "loss": 0.7967,
+ "step": 4802
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.961422977617786,
+ "learning_rate": 2.999477892812264e-07,
+ "loss": 0.8197,
+ "step": 4803
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 1.0167513068531195,
+ "learning_rate": 2.984347530828158e-07,
+ "loss": 0.9593,
+ "step": 4804
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 1.0707092859236371,
+ "learning_rate": 2.969254848605585e-07,
+ "loss": 0.8534,
+ "step": 4805
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 1.0635957905024185,
+ "learning_rate": 2.9541998520063344e-07,
+ "loss": 0.8137,
+ "step": 4806
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.9105437286875733,
+ "learning_rate": 2.9391825468775946e-07,
+ "loss": 0.8645,
+ "step": 4807
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.8364016857339801,
+ "learning_rate": 2.9242029390519454e-07,
+ "loss": 0.8066,
+ "step": 4808
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.009222610726187,
+ "learning_rate": 2.909261034347255e-07,
+ "loss": 0.923,
+ "step": 4809
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.8628487728144087,
+ "learning_rate": 2.894356838566792e-07,
+ "loss": 0.9011,
+ "step": 4810
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.7467133884872036,
+ "learning_rate": 2.879490357499204e-07,
+ "loss": 0.8536,
+ "step": 4811
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.0318317822299221,
+ "learning_rate": 2.864661596918428e-07,
+ "loss": 0.8737,
+ "step": 4812
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.1852552119588342,
+ "learning_rate": 2.849870562583812e-07,
+ "loss": 0.9416,
+ "step": 4813
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.7617444243444661,
+ "learning_rate": 2.8351172602399945e-07,
+ "loss": 0.7771,
+ "step": 4814
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9490221699601976,
+ "learning_rate": 2.8204016956169924e-07,
+ "loss": 0.8231,
+ "step": 4815
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.7905084831280471,
+ "learning_rate": 2.8057238744301994e-07,
+ "loss": 0.8261,
+ "step": 4816
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9927543287963589,
+ "learning_rate": 2.7910838023802676e-07,
+ "loss": 0.9189,
+ "step": 4817
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.8495678485938293,
+ "learning_rate": 2.7764814851532485e-07,
+ "loss": 0.9281,
+ "step": 4818
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.8981715562853979,
+ "learning_rate": 2.761916928420527e-07,
+ "loss": 0.8538,
+ "step": 4819
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.281907221968286,
+ "learning_rate": 2.74739013783879e-07,
+ "loss": 0.9449,
+ "step": 4820
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.8362923179945122,
+ "learning_rate": 2.7329011190500797e-07,
+ "loss": 0.8955,
+ "step": 4821
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.1619832823970573,
+ "learning_rate": 2.7184498776817615e-07,
+ "loss": 0.8837,
+ "step": 4822
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9110960707661918,
+ "learning_rate": 2.704036419346534e-07,
+ "loss": 0.8811,
+ "step": 4823
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.1212523560860714,
+ "learning_rate": 2.689660749642442e-07,
+ "loss": 0.9146,
+ "step": 4824
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.0730838433206684,
+ "learning_rate": 2.675322874152786e-07,
+ "loss": 0.951,
+ "step": 4825
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.8724336401258055,
+ "learning_rate": 2.6610227984462556e-07,
+ "loss": 0.8174,
+ "step": 4826
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.7584430041691073,
+ "learning_rate": 2.646760528076842e-07,
+ "loss": 0.8407,
+ "step": 4827
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9256198254167516,
+ "learning_rate": 2.6325360685838243e-07,
+ "loss": 0.8554,
+ "step": 4828
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.8910253583304542,
+ "learning_rate": 2.618349425491851e-07,
+ "loss": 0.8225,
+ "step": 4829
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.0123914308218305,
+ "learning_rate": 2.604200604310825e-07,
+ "loss": 0.9156,
+ "step": 4830
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.2241093981548952,
+ "learning_rate": 2.590089610535984e-07,
+ "loss": 0.8943,
+ "step": 4831
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.0372795070500178,
+ "learning_rate": 2.5760164496479e-07,
+ "loss": 0.8725,
+ "step": 4832
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9015191328280726,
+ "learning_rate": 2.5619811271123897e-07,
+ "loss": 0.8565,
+ "step": 4833
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.1773068646916138,
+ "learning_rate": 2.5479836483806586e-07,
+ "loss": 0.9254,
+ "step": 4834
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9129350713541154,
+ "learning_rate": 2.5340240188891143e-07,
+ "loss": 0.846,
+ "step": 4835
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9094144875731605,
+ "learning_rate": 2.520102244059552e-07,
+ "loss": 0.9001,
+ "step": 4836
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9342262032921407,
+ "learning_rate": 2.506218329299026e-07,
+ "loss": 0.9003,
+ "step": 4837
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.0149200179717532,
+ "learning_rate": 2.4923722799998664e-07,
+ "loss": 0.899,
+ "step": 4838
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9571938084709873,
+ "learning_rate": 2.4785641015397375e-07,
+ "loss": 0.8604,
+ "step": 4839
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9507174272791346,
+ "learning_rate": 2.464793799281573e-07,
+ "loss": 0.8968,
+ "step": 4840
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.0268259071415713,
+ "learning_rate": 2.4510613785735936e-07,
+ "loss": 0.8992,
+ "step": 4841
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9180622195280348,
+ "learning_rate": 2.4373668447493225e-07,
+ "loss": 0.8623,
+ "step": 4842
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.8173243364714653,
+ "learning_rate": 2.423710203127561e-07,
+ "loss": 0.8481,
+ "step": 4843
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.8501582439070058,
+ "learning_rate": 2.410091459012376e-07,
+ "loss": 0.8666,
+ "step": 4844
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.7436945020982553,
+ "learning_rate": 2.3965106176931375e-07,
+ "loss": 0.8033,
+ "step": 4845
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.0904856546692936,
+ "learning_rate": 2.3829676844444926e-07,
+ "loss": 0.8943,
+ "step": 4846
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9200488567790596,
+ "learning_rate": 2.3694626645263675e-07,
+ "loss": 0.9005,
+ "step": 4847
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9160577148251488,
+ "learning_rate": 2.3559955631839436e-07,
+ "loss": 0.8582,
+ "step": 4848
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.8691999171424171,
+ "learning_rate": 2.3425663856476932e-07,
+ "loss": 0.8813,
+ "step": 4849
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.889963183329649,
+ "learning_rate": 2.3291751371333438e-07,
+ "loss": 0.8405,
+ "step": 4850
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.83305665054129,
+ "learning_rate": 2.3158218228419127e-07,
+ "loss": 0.8162,
+ "step": 4851
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.94606023936023,
+ "learning_rate": 2.3025064479596625e-07,
+ "loss": 0.8675,
+ "step": 4852
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9921362321103562,
+ "learning_rate": 2.2892290176581678e-07,
+ "loss": 0.8462,
+ "step": 4853
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.799977501340026,
+ "learning_rate": 2.2759895370941809e-07,
+ "loss": 0.8123,
+ "step": 4854
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9659143817670327,
+ "learning_rate": 2.2627880114097779e-07,
+ "loss": 0.8679,
+ "step": 4855
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9412116769931023,
+ "learning_rate": 2.2496244457323013e-07,
+ "loss": 0.8911,
+ "step": 4856
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.9450756317603868,
+ "learning_rate": 2.2364988451742953e-07,
+ "loss": 0.8894,
+ "step": 4857
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.8983220803716617,
+ "learning_rate": 2.2234112148336373e-07,
+ "loss": 0.8341,
+ "step": 4858
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 1.0268524644220929,
+ "learning_rate": 2.2103615597933613e-07,
+ "loss": 0.9675,
+ "step": 4859
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 1.1491366209979879,
+ "learning_rate": 2.1973498851218244e-07,
+ "loss": 0.9454,
+ "step": 4860
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9358090668411937,
+ "learning_rate": 2.1843761958726283e-07,
+ "loss": 0.8429,
+ "step": 4861
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.6854621880409395,
+ "learning_rate": 2.1714404970845647e-07,
+ "loss": 0.7782,
+ "step": 4862
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9795775231339658,
+ "learning_rate": 2.1585427937817594e-07,
+ "loss": 0.9129,
+ "step": 4863
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.8519581561309496,
+ "learning_rate": 2.145683090973494e-07,
+ "loss": 0.8296,
+ "step": 4864
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.8514572277257874,
+ "learning_rate": 2.1328613936543396e-07,
+ "loss": 0.9167,
+ "step": 4865
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.852330306254833,
+ "learning_rate": 2.1200777068041134e-07,
+ "loss": 0.8427,
+ "step": 4866
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9473791189632517,
+ "learning_rate": 2.1073320353878102e-07,
+ "loss": 0.8657,
+ "step": 4867
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9847177080882329,
+ "learning_rate": 2.0946243843557367e-07,
+ "loss": 0.9212,
+ "step": 4868
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.8465542040304344,
+ "learning_rate": 2.0819547586434008e-07,
+ "loss": 0.8873,
+ "step": 4869
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.951387985091641,
+ "learning_rate": 2.0693231631715105e-07,
+ "loss": 0.8933,
+ "step": 4870
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9266622429240401,
+ "learning_rate": 2.0567296028460638e-07,
+ "loss": 0.8453,
+ "step": 4871
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.8174991156044403,
+ "learning_rate": 2.0441740825582258e-07,
+ "loss": 0.838,
+ "step": 4872
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 1.0089077722732323,
+ "learning_rate": 2.0316566071844402e-07,
+ "loss": 0.8936,
+ "step": 4873
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 1.0920332199012868,
+ "learning_rate": 2.0191771815863292e-07,
+ "loss": 0.9403,
+ "step": 4874
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9544594524434065,
+ "learning_rate": 2.0067358106107714e-07,
+ "loss": 0.85,
+ "step": 4875
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 1.0093062896803937,
+ "learning_rate": 1.994332499089846e-07,
+ "loss": 0.8766,
+ "step": 4876
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9205446344886734,
+ "learning_rate": 1.9819672518408662e-07,
+ "loss": 0.8626,
+ "step": 4877
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.8377835543230611,
+ "learning_rate": 1.9696400736663457e-07,
+ "loss": 0.8428,
+ "step": 4878
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9349688706042524,
+ "learning_rate": 1.9573509693540104e-07,
+ "loss": 0.8631,
+ "step": 4879
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.8660003918007885,
+ "learning_rate": 1.9450999436768093e-07,
+ "loss": 0.8902,
+ "step": 4880
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9229683300098562,
+ "learning_rate": 1.9328870013929134e-07,
+ "loss": 0.9103,
+ "step": 4881
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 1.0245997505612194,
+ "learning_rate": 1.9207121472456846e-07,
+ "loss": 0.9036,
+ "step": 4882
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.8394271533983679,
+ "learning_rate": 1.9085753859636736e-07,
+ "loss": 0.7996,
+ "step": 4883
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.8895116574215128,
+ "learning_rate": 1.8964767222606873e-07,
+ "loss": 0.8934,
+ "step": 4884
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.8271813502907709,
+ "learning_rate": 1.8844161608356782e-07,
+ "loss": 0.8819,
+ "step": 4885
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.960087942291403,
+ "learning_rate": 1.872393706372866e-07,
+ "loss": 0.8769,
+ "step": 4886
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9867545700235159,
+ "learning_rate": 1.8604093635416155e-07,
+ "loss": 0.9465,
+ "step": 4887
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9067425738013761,
+ "learning_rate": 1.8484631369964922e-07,
+ "loss": 0.875,
+ "step": 4888
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.6951368714852485,
+ "learning_rate": 1.8365550313772852e-07,
+ "loss": 0.7354,
+ "step": 4889
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.8670588577281381,
+ "learning_rate": 1.8246850513089832e-07,
+ "loss": 0.8523,
+ "step": 4890
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.8903424346494262,
+ "learning_rate": 1.8128532014017098e-07,
+ "loss": 0.8587,
+ "step": 4891
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.8822609628592021,
+ "learning_rate": 1.8010594862508669e-07,
+ "loss": 0.8117,
+ "step": 4892
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9202984522012919,
+ "learning_rate": 1.789303910436968e-07,
+ "loss": 0.8916,
+ "step": 4893
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 1.0830061317444566,
+ "learning_rate": 1.777586478525739e-07,
+ "loss": 0.9453,
+ "step": 4894
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9974045244723038,
+ "learning_rate": 1.7659071950681172e-07,
+ "loss": 0.8407,
+ "step": 4895
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9650522807388956,
+ "learning_rate": 1.754266064600174e-07,
+ "loss": 0.9346,
+ "step": 4896
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9672567598313921,
+ "learning_rate": 1.742663091643204e-07,
+ "loss": 0.8821,
+ "step": 4897
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9361755995042439,
+ "learning_rate": 1.7310982807036915e-07,
+ "loss": 0.8802,
+ "step": 4898
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.938968945334091,
+ "learning_rate": 1.719571636273243e-07,
+ "loss": 0.8815,
+ "step": 4899
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.8959667067165109,
+ "learning_rate": 1.7080831628286886e-07,
+ "loss": 0.8313,
+ "step": 4900
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9848751342582164,
+ "learning_rate": 1.6966328648320152e-07,
+ "loss": 0.8996,
+ "step": 4901
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9561656647857733,
+ "learning_rate": 1.685220746730387e-07,
+ "loss": 0.876,
+ "step": 4902
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9321518810751969,
+ "learning_rate": 1.673846812956137e-07,
+ "loss": 0.8366,
+ "step": 4903
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9301582279701739,
+ "learning_rate": 1.6625110679267642e-07,
+ "loss": 0.8306,
+ "step": 4904
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9054196529247108,
+ "learning_rate": 1.6512135160449583e-07,
+ "loss": 0.8958,
+ "step": 4905
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9301361200683044,
+ "learning_rate": 1.6399541616985648e-07,
+ "loss": 0.9442,
+ "step": 4906
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9265826979847209,
+ "learning_rate": 1.6287330092605525e-07,
+ "loss": 0.8635,
+ "step": 4907
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9315082930403457,
+ "learning_rate": 1.6175500630891128e-07,
+ "loss": 0.8604,
+ "step": 4908
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 1.0831395418253844,
+ "learning_rate": 1.6064053275275716e-07,
+ "loss": 0.8267,
+ "step": 4909
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.8879856142721188,
+ "learning_rate": 1.5952988069044105e-07,
+ "loss": 0.8619,
+ "step": 4910
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.9450149735390804,
+ "learning_rate": 1.5842305055332796e-07,
+ "loss": 0.8875,
+ "step": 4911
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9907386770499438,
+ "learning_rate": 1.573200427712973e-07,
+ "loss": 0.8901,
+ "step": 4912
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9961837741898627,
+ "learning_rate": 1.562208577727442e-07,
+ "loss": 0.869,
+ "step": 4913
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.6908371104653627,
+ "learning_rate": 1.551254959845805e-07,
+ "loss": 0.781,
+ "step": 4914
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.848932592726013,
+ "learning_rate": 1.540339578322314e-07,
+ "loss": 0.8398,
+ "step": 4915
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.7969892986103903,
+ "learning_rate": 1.5294624373963894e-07,
+ "loss": 0.8683,
+ "step": 4916
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 1.00710363788212,
+ "learning_rate": 1.5186235412925744e-07,
+ "loss": 0.9465,
+ "step": 4917
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 1.0016862214468187,
+ "learning_rate": 1.5078228942205674e-07,
+ "loss": 0.935,
+ "step": 4918
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9400739828963365,
+ "learning_rate": 1.4970605003752359e-07,
+ "loss": 0.8526,
+ "step": 4919
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 1.0189479572016134,
+ "learning_rate": 1.4863363639365357e-07,
+ "loss": 0.9729,
+ "step": 4920
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.8631404228098428,
+ "learning_rate": 1.4756504890696466e-07,
+ "loss": 0.8679,
+ "step": 4921
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 1.001340863529343,
+ "learning_rate": 1.4650028799247928e-07,
+ "loss": 0.892,
+ "step": 4922
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 1.1220587828585806,
+ "learning_rate": 1.454393540637411e-07,
+ "loss": 0.8648,
+ "step": 4923
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 1.0043038891555147,
+ "learning_rate": 1.4438224753280384e-07,
+ "loss": 0.9364,
+ "step": 4924
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.840414922018483,
+ "learning_rate": 1.4332896881023462e-07,
+ "loss": 0.8567,
+ "step": 4925
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 1.075487095300389,
+ "learning_rate": 1.422795183051151e-07,
+ "loss": 0.9167,
+ "step": 4926
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.8325197328919645,
+ "learning_rate": 1.4123389642504148e-07,
+ "loss": 0.8893,
+ "step": 4927
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 1.0020654863520815,
+ "learning_rate": 1.401921035761189e-07,
+ "loss": 0.8738,
+ "step": 4928
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.8273520614516778,
+ "learning_rate": 1.3915414016296925e-07,
+ "loss": 0.8515,
+ "step": 4929
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 1.0258373369412856,
+ "learning_rate": 1.381200065887256e-07,
+ "loss": 0.9156,
+ "step": 4930
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 1.0114786282422006,
+ "learning_rate": 1.3708970325503222e-07,
+ "loss": 0.8575,
+ "step": 4931
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9620270323858393,
+ "learning_rate": 1.3606323056204795e-07,
+ "loss": 0.9302,
+ "step": 4932
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9112452792040394,
+ "learning_rate": 1.3504058890844274e-07,
+ "loss": 0.8474,
+ "step": 4933
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9110880955254214,
+ "learning_rate": 1.3402177869139887e-07,
+ "loss": 0.884,
+ "step": 4934
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9014024407300836,
+ "learning_rate": 1.3300680030661096e-07,
+ "loss": 0.8912,
+ "step": 4935
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9900134268512297,
+ "learning_rate": 1.3199565414828363e-07,
+ "loss": 0.8962,
+ "step": 4936
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.8127789986933913,
+ "learning_rate": 1.3098834060913612e-07,
+ "loss": 0.8135,
+ "step": 4937
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9552559772327756,
+ "learning_rate": 1.2998486008039545e-07,
+ "loss": 0.8783,
+ "step": 4938
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9107922053320379,
+ "learning_rate": 1.289852129518032e-07,
+ "loss": 0.7668,
+ "step": 4939
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.825516073534608,
+ "learning_rate": 1.2798939961161217e-07,
+ "loss": 0.9004,
+ "step": 4940
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9418341015108066,
+ "learning_rate": 1.269974204465818e-07,
+ "loss": 0.8216,
+ "step": 4941
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.8972634896305701,
+ "learning_rate": 1.2600927584198618e-07,
+ "loss": 0.9335,
+ "step": 4942
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9263412712239212,
+ "learning_rate": 1.2502496618161165e-07,
+ "loss": 0.9212,
+ "step": 4943
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9897179641923016,
+ "learning_rate": 1.2404449184774904e-07,
+ "loss": 0.9131,
+ "step": 4944
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9641757091346561,
+ "learning_rate": 1.2306785322120596e-07,
+ "loss": 0.8681,
+ "step": 4945
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9060032522502734,
+ "learning_rate": 1.220950506812968e-07,
+ "loss": 0.8912,
+ "step": 4946
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9215454089738369,
+ "learning_rate": 1.2112608460584707e-07,
+ "loss": 0.8756,
+ "step": 4947
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.7945254219454703,
+ "learning_rate": 1.2016095537119242e-07,
+ "loss": 0.8295,
+ "step": 4948
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 1.0414589663403506,
+ "learning_rate": 1.1919966335217636e-07,
+ "loss": 0.9457,
+ "step": 4949
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9595568292401536,
+ "learning_rate": 1.1824220892215465e-07,
+ "loss": 0.905,
+ "step": 4950
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.8461944356601063,
+ "learning_rate": 1.1728859245299207e-07,
+ "loss": 0.8652,
+ "step": 4951
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9317314160690501,
+ "learning_rate": 1.1633881431506122e-07,
+ "loss": 0.8645,
+ "step": 4952
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9120030119039478,
+ "learning_rate": 1.1539287487724594e-07,
+ "loss": 0.8319,
+ "step": 4953
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9843358541077079,
+ "learning_rate": 1.1445077450693786e-07,
+ "loss": 0.9459,
+ "step": 4954
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9261795371240237,
+ "learning_rate": 1.1351251357003656e-07,
+ "loss": 0.7929,
+ "step": 4955
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.8039379007987508,
+ "learning_rate": 1.1257809243095385e-07,
+ "loss": 0.8256,
+ "step": 4956
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9345029900856137,
+ "learning_rate": 1.1164751145260722e-07,
+ "loss": 0.8424,
+ "step": 4957
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9227138858105707,
+ "learning_rate": 1.107207709964242e-07,
+ "loss": 0.8466,
+ "step": 4958
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.8999767466755633,
+ "learning_rate": 1.0979787142233911e-07,
+ "loss": 0.8479,
+ "step": 4959
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.7525695707719479,
+ "learning_rate": 1.0887881308879633e-07,
+ "loss": 0.7916,
+ "step": 4960
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.8755468919626722,
+ "learning_rate": 1.0796359635274701e-07,
+ "loss": 0.789,
+ "step": 4961
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9434425034039697,
+ "learning_rate": 1.0705222156965011e-07,
+ "loss": 0.9036,
+ "step": 4962
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.9959222395817438,
+ "learning_rate": 1.0614468909347476e-07,
+ "loss": 0.8797,
+ "step": 4963
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 1.0098054002291768,
+ "learning_rate": 1.0524099927669563e-07,
+ "loss": 0.9175,
+ "step": 4964
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.8143049652513321,
+ "learning_rate": 1.0434115247029419e-07,
+ "loss": 0.8647,
+ "step": 4965
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.8866996547768736,
+ "learning_rate": 1.0344514902376201e-07,
+ "loss": 0.941,
+ "step": 4966
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.8388311150203465,
+ "learning_rate": 1.0255298928509627e-07,
+ "loss": 0.826,
+ "step": 4967
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 1.0281085992344876,
+ "learning_rate": 1.0166467360079979e-07,
+ "loss": 0.8958,
+ "step": 4968
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.946969119102038,
+ "learning_rate": 1.007802023158877e-07,
+ "loss": 0.8803,
+ "step": 4969
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.9770804614697257,
+ "learning_rate": 9.989957577387521e-08,
+ "loss": 0.9306,
+ "step": 4970
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 1.1240751087233465,
+ "learning_rate": 9.902279431678874e-08,
+ "loss": 0.8514,
+ "step": 4971
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 1.219755530872357,
+ "learning_rate": 9.814985828516033e-08,
+ "loss": 0.9593,
+ "step": 4972
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.974785805979655,
+ "learning_rate": 9.728076801802656e-08,
+ "loss": 0.898,
+ "step": 4973
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.8319963374558925,
+ "learning_rate": 9.641552385293518e-08,
+ "loss": 0.7338,
+ "step": 4974
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.8475851587289722,
+ "learning_rate": 9.555412612593518e-08,
+ "loss": 0.8694,
+ "step": 4975
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.9221752350167541,
+ "learning_rate": 9.469657517158226e-08,
+ "loss": 0.9098,
+ "step": 4976
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.8780798629787528,
+ "learning_rate": 9.384287132294223e-08,
+ "loss": 0.8184,
+ "step": 4977
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.8253107845052225,
+ "learning_rate": 9.299301491158207e-08,
+ "loss": 0.8476,
+ "step": 4978
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.9055496502233873,
+ "learning_rate": 9.214700626757667e-08,
+ "loss": 0.8441,
+ "step": 4979
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.9286350519833819,
+ "learning_rate": 9.130484571950538e-08,
+ "loss": 0.8322,
+ "step": 4980
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.9130554928523456,
+ "learning_rate": 9.046653359445323e-08,
+ "loss": 0.8778,
+ "step": 4981
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 1.3248673251353242,
+ "learning_rate": 8.963207021801423e-08,
+ "loss": 0.8277,
+ "step": 4982
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.8582326641629127,
+ "learning_rate": 8.880145591428024e-08,
+ "loss": 0.8642,
+ "step": 4983
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.894970146284961,
+ "learning_rate": 8.797469100585432e-08,
+ "loss": 0.8512,
+ "step": 4984
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 1.0332434703597133,
+ "learning_rate": 8.715177581384182e-08,
+ "loss": 0.9301,
+ "step": 4985
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 1.0281611455914244,
+ "learning_rate": 8.633271065785486e-08,
+ "loss": 0.9253,
+ "step": 4986
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.8719749589448101,
+ "learning_rate": 8.551749585600678e-08,
+ "loss": 0.8787,
+ "step": 4987
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.9181086821632437,
+ "learning_rate": 8.470613172491981e-08,
+ "loss": 0.8588,
+ "step": 4988
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.9339848521162384,
+ "learning_rate": 8.389861857971748e-08,
+ "loss": 0.881,
+ "step": 4989
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.9173366391766944,
+ "learning_rate": 8.309495673402778e-08,
+ "loss": 0.8879,
+ "step": 4990
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 1.0399989389013817,
+ "learning_rate": 8.229514649998438e-08,
+ "loss": 0.8997,
+ "step": 4991
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 1.0624596162091668,
+ "learning_rate": 8.149918818822433e-08,
+ "loss": 0.9147,
+ "step": 4992
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.9970645138443958,
+ "learning_rate": 8.070708210788925e-08,
+ "loss": 0.8904,
+ "step": 4993
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.986835335630644,
+ "learning_rate": 7.991882856662303e-08,
+ "loss": 0.9101,
+ "step": 4994
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 1.013629135156919,
+ "learning_rate": 7.913442787057523e-08,
+ "loss": 0.8899,
+ "step": 4995
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.8835466904792467,
+ "learning_rate": 7.835388032439661e-08,
+ "loss": 0.8108,
+ "step": 4996
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.923970293443084,
+ "learning_rate": 7.757718623124466e-08,
+ "loss": 0.9156,
+ "step": 4997
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.959878509529537,
+ "learning_rate": 7.680434589277696e-08,
+ "loss": 0.8851,
+ "step": 4998
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.9017420597003865,
+ "learning_rate": 7.603535960915675e-08,
+ "loss": 0.871,
+ "step": 4999
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.9433129940418841,
+ "learning_rate": 7.527022767904957e-08,
+ "loss": 0.857,
+ "step": 5000
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.9693863757975214,
+ "learning_rate": 7.450895039962214e-08,
+ "loss": 0.83,
+ "step": 5001
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.9703553911015101,
+ "learning_rate": 7.375152806654685e-08,
+ "loss": 0.8666,
+ "step": 5002
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.7910571621656032,
+ "learning_rate": 7.299796097399947e-08,
+ "loss": 0.8284,
+ "step": 5003
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.8253973669771069,
+ "learning_rate": 7.224824941465369e-08,
+ "loss": 0.864,
+ "step": 5004
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 1.0238183808381525,
+ "learning_rate": 7.150239367969102e-08,
+ "loss": 0.8782,
+ "step": 5005
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 1.1514575472246202,
+ "learning_rate": 7.076039405879309e-08,
+ "loss": 0.8626,
+ "step": 5006
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.8433232839935662,
+ "learning_rate": 7.002225084014269e-08,
+ "loss": 0.9191,
+ "step": 5007
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 1.0102765291427236,
+ "learning_rate": 6.928796431042717e-08,
+ "loss": 0.9093,
+ "step": 5008
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.8292576715341747,
+ "learning_rate": 6.855753475483507e-08,
+ "loss": 0.8198,
+ "step": 5009
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.7612233949189441,
+ "learning_rate": 6.783096245705612e-08,
+ "loss": 0.7959,
+ "step": 5010
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 1.0225289640049293,
+ "learning_rate": 6.710824769928349e-08,
+ "loss": 0.914,
+ "step": 5011
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.9683160502108255,
+ "learning_rate": 6.63893907622104e-08,
+ "loss": 0.8869,
+ "step": 5012
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.9695984423459576,
+ "learning_rate": 6.567439192503244e-08,
+ "loss": 0.8984,
+ "step": 5013
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.9720923285609582,
+ "learning_rate": 6.496325146544746e-08,
+ "loss": 0.9388,
+ "step": 5014
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.8420698733872112,
+ "learning_rate": 6.425596965965453e-08,
+ "loss": 0.8578,
+ "step": 5015
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.9484628065671088,
+ "learning_rate": 6.35525467823539e-08,
+ "loss": 0.8252,
+ "step": 5016
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.9735810554009207,
+ "learning_rate": 6.285298310674703e-08,
+ "loss": 0.9221,
+ "step": 5017
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.9101543880564346,
+ "learning_rate": 6.215727890453438e-08,
+ "loss": 0.9316,
+ "step": 5018
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 1.0338953269332778,
+ "learning_rate": 6.14654344459209e-08,
+ "loss": 0.8709,
+ "step": 5019
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.8608706856091106,
+ "learning_rate": 6.077744999961166e-08,
+ "loss": 0.9059,
+ "step": 5020
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.7967369287800257,
+ "learning_rate": 6.009332583281069e-08,
+ "loss": 0.8225,
+ "step": 5021
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.9968382409323012,
+ "learning_rate": 5.941306221122545e-08,
+ "loss": 0.8771,
+ "step": 5022
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.7641425877218193,
+ "learning_rate": 5.873665939906015e-08,
+ "loss": 0.8342,
+ "step": 5023
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.7709760344688033,
+ "learning_rate": 5.806411765902353e-08,
+ "loss": 0.8394,
+ "step": 5024
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.8793154150848215,
+ "learning_rate": 5.739543725232333e-08,
+ "loss": 0.888,
+ "step": 5025
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.9472538588471504,
+ "learning_rate": 5.673061843866623e-08,
+ "loss": 0.8863,
+ "step": 5026
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.9854384843315175,
+ "learning_rate": 5.606966147626125e-08,
+ "loss": 0.8876,
+ "step": 5027
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.8587008817920627,
+ "learning_rate": 5.5412566621815266e-08,
+ "loss": 0.8476,
+ "step": 5028
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.8634951715355934,
+ "learning_rate": 5.4759334130536355e-08,
+ "loss": 0.8697,
+ "step": 5029
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 1.1373108459789385,
+ "learning_rate": 5.41099642561338e-08,
+ "loss": 0.8426,
+ "step": 5030
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.9506342752141803,
+ "learning_rate": 5.3464457250814726e-08,
+ "loss": 0.9123,
+ "step": 5031
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.9318516420309513,
+ "learning_rate": 5.2822813365286386e-08,
+ "loss": 0.9285,
+ "step": 5032
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.9583561926837334,
+ "learning_rate": 5.218503284875609e-08,
+ "loss": 0.8563,
+ "step": 5033
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 1.0021940891062198,
+ "learning_rate": 5.155111594893014e-08,
+ "loss": 0.9076,
+ "step": 5034
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.8143985887834371,
+ "learning_rate": 5.092106291201604e-08,
+ "loss": 0.8132,
+ "step": 5035
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.9725986635059012,
+ "learning_rate": 5.029487398271693e-08,
+ "loss": 0.8648,
+ "step": 5036
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.8766053363440376,
+ "learning_rate": 4.967254940423716e-08,
+ "loss": 0.8971,
+ "step": 5037
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.9782668378494458,
+ "learning_rate": 4.905408941828338e-08,
+ "loss": 0.8984,
+ "step": 5038
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.9340389798139436,
+ "learning_rate": 4.8439494265055674e-08,
+ "loss": 0.9028,
+ "step": 5039
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.9811769450685663,
+ "learning_rate": 4.7828764183257545e-08,
+ "loss": 0.8696,
+ "step": 5040
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.8961754214056428,
+ "learning_rate": 4.722189941008703e-08,
+ "loss": 0.9334,
+ "step": 5041
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.8707688717676328,
+ "learning_rate": 4.66189001812456e-08,
+ "loss": 0.8655,
+ "step": 5042
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.897970426992487,
+ "learning_rate": 4.6019766730930336e-08,
+ "loss": 0.876,
+ "step": 5043
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.8287578199877651,
+ "learning_rate": 4.542449929183845e-08,
+ "loss": 0.8483,
+ "step": 5044
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 1.0117366137455581,
+ "learning_rate": 4.4833098095165005e-08,
+ "loss": 0.8475,
+ "step": 5045
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.9042853360850842,
+ "learning_rate": 4.424556337060182e-08,
+ "loss": 0.8563,
+ "step": 5046
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.9668104450159726,
+ "learning_rate": 4.366189534634191e-08,
+ "loss": 0.9061,
+ "step": 5047
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.9219130017239038,
+ "learning_rate": 4.308209424907506e-08,
+ "loss": 0.8874,
+ "step": 5048
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 1.0038500651359918,
+ "learning_rate": 4.250616030399002e-08,
+ "loss": 0.9034,
+ "step": 5049
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.6486739136454955,
+ "learning_rate": 4.1934093734771194e-08,
+ "loss": 0.8006,
+ "step": 5050
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.9127991016704711,
+ "learning_rate": 4.13658947636042e-08,
+ "loss": 0.8768,
+ "step": 5051
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.8537586240512632,
+ "learning_rate": 4.080156361117027e-08,
+ "loss": 0.8708,
+ "step": 5052
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.9456913537159871,
+ "learning_rate": 4.024110049664853e-08,
+ "loss": 0.8494,
+ "step": 5053
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.825990068015632,
+ "learning_rate": 3.9684505637718194e-08,
+ "loss": 0.8223,
+ "step": 5054
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.8196455810297848,
+ "learning_rate": 3.913177925055189e-08,
+ "loss": 0.814,
+ "step": 5055
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.8183758574387315,
+ "learning_rate": 3.858292154982457e-08,
+ "loss": 0.8478,
+ "step": 5056
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.9156426232557442,
+ "learning_rate": 3.8037932748704596e-08,
+ "loss": 0.8795,
+ "step": 5057
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.9781664515146559,
+ "learning_rate": 3.7496813058859325e-08,
+ "loss": 0.8805,
+ "step": 5058
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.9220949250087491,
+ "learning_rate": 3.695956269045509e-08,
+ "loss": 0.8293,
+ "step": 5059
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.9929206963979994,
+ "learning_rate": 3.642618185215163e-08,
+ "loss": 0.8624,
+ "step": 5060
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.9632509295681669,
+ "learning_rate": 3.589667075110992e-08,
+ "loss": 0.8997,
+ "step": 5061
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 1.0711501017485943,
+ "learning_rate": 3.537102959298322e-08,
+ "loss": 0.8396,
+ "step": 5062
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.9727127535620441,
+ "learning_rate": 3.4849258581928224e-08,
+ "loss": 0.892,
+ "step": 5063
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.9593644220636101,
+ "learning_rate": 3.4331357920591726e-08,
+ "loss": 0.8853,
+ "step": 5064
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.9442514315914554,
+ "learning_rate": 3.3817327810121706e-08,
+ "loss": 0.8345,
+ "step": 5065
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.8905701331494348,
+ "learning_rate": 3.3307168450160685e-08,
+ "loss": 0.861,
+ "step": 5066
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.9902787090474342,
+ "learning_rate": 3.280088003885018e-08,
+ "loss": 0.9083,
+ "step": 5067
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.9931359015141633,
+ "learning_rate": 3.229846277282511e-08,
+ "loss": 0.8593,
+ "step": 5068
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.9852329124586989,
+ "learning_rate": 3.1799916847220505e-08,
+ "loss": 0.8363,
+ "step": 5069
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.8405878684997006,
+ "learning_rate": 3.130524245566369e-08,
+ "loss": 0.92,
+ "step": 5070
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.8830149146081075,
+ "learning_rate": 3.0814439790280984e-08,
+ "loss": 0.8577,
+ "step": 5071
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.9014046786230616,
+ "learning_rate": 3.032750904169546e-08,
+ "loss": 0.839,
+ "step": 5072
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.9377598357730702,
+ "learning_rate": 2.9844450399024725e-08,
+ "loss": 0.9075,
+ "step": 5073
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.9995410962159877,
+ "learning_rate": 2.936526404988427e-08,
+ "loss": 0.916,
+ "step": 5074
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 1.0270895051833207,
+ "learning_rate": 2.8889950180382985e-08,
+ "loss": 0.8634,
+ "step": 5075
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.8983186386617144,
+ "learning_rate": 2.8418508975127655e-08,
+ "loss": 0.8248,
+ "step": 5076
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.9398176283894555,
+ "learning_rate": 2.7950940617221812e-08,
+ "loss": 0.8987,
+ "step": 5077
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.8860398087452429,
+ "learning_rate": 2.7487245288261298e-08,
+ "loss": 0.8627,
+ "step": 5078
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.9342951785654404,
+ "learning_rate": 2.702742316834206e-08,
+ "loss": 0.9411,
+ "step": 5079
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 1.122163693996949,
+ "learning_rate": 2.657147443605457e-08,
+ "loss": 0.9127,
+ "step": 5080
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 1.0465484788710904,
+ "learning_rate": 2.6119399268480505e-08,
+ "loss": 0.9136,
+ "step": 5081
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.798825408684932,
+ "learning_rate": 2.5671197841203867e-08,
+ "loss": 0.837,
+ "step": 5082
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.8780526567293018,
+ "learning_rate": 2.5226870328299845e-08,
+ "loss": 0.8189,
+ "step": 5083
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.916480382150991,
+ "learning_rate": 2.4786416902339293e-08,
+ "loss": 0.8448,
+ "step": 5084
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 1.06170455107868,
+ "learning_rate": 2.4349837734390923e-08,
+ "loss": 0.8418,
+ "step": 5085
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 1.0476772440212105,
+ "learning_rate": 2.3917132994016878e-08,
+ "loss": 0.958,
+ "step": 5086
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.8952489476674438,
+ "learning_rate": 2.3488302849272726e-08,
+ "loss": 0.8258,
+ "step": 5087
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.9705618551900753,
+ "learning_rate": 2.3063347466713014e-08,
+ "loss": 0.8516,
+ "step": 5088
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 1.0136720502537189,
+ "learning_rate": 2.264226701138461e-08,
+ "loss": 0.9379,
+ "step": 5089
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.989594484149281,
+ "learning_rate": 2.2225061646830025e-08,
+ "loss": 0.922,
+ "step": 5090
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 1.0473452522042122,
+ "learning_rate": 2.181173153508853e-08,
+ "loss": 0.8655,
+ "step": 5091
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.8921121561614395,
+ "learning_rate": 2.1402276836691715e-08,
+ "loss": 0.8384,
+ "step": 5092
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.9084365387118964,
+ "learning_rate": 2.0996697710666812e-08,
+ "loss": 0.8851,
+ "step": 5093
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.9645226674058784,
+ "learning_rate": 2.059499431453671e-08,
+ "loss": 0.9162,
+ "step": 5094
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.8395769349697246,
+ "learning_rate": 2.0197166804317713e-08,
+ "loss": 0.785,
+ "step": 5095
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.9288132699660807,
+ "learning_rate": 1.9803215334522895e-08,
+ "loss": 0.9073,
+ "step": 5096
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.7831749331306697,
+ "learning_rate": 1.9413140058156533e-08,
+ "loss": 0.7824,
+ "step": 5097
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.9156055228868175,
+ "learning_rate": 1.9026941126721876e-08,
+ "loss": 0.8444,
+ "step": 5098
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.9943132981267118,
+ "learning_rate": 1.864461869021117e-08,
+ "loss": 0.873,
+ "step": 5099
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.7599942596047295,
+ "learning_rate": 1.826617289711563e-08,
+ "loss": 0.8405,
+ "step": 5100
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.9137468919011352,
+ "learning_rate": 1.7891603894418797e-08,
+ "loss": 0.8329,
+ "step": 5101
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.9229295254881686,
+ "learning_rate": 1.7520911827598742e-08,
+ "loss": 0.8584,
+ "step": 5102
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.8190360512219036,
+ "learning_rate": 1.7154096840629186e-08,
+ "loss": 0.8474,
+ "step": 5103
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.8243814663417862,
+ "learning_rate": 1.679115907597617e-08,
+ "loss": 0.8892,
+ "step": 5104
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.7902561992737296,
+ "learning_rate": 1.6432098674600272e-08,
+ "loss": 0.7589,
+ "step": 5105
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.7930075780988352,
+ "learning_rate": 1.6076915775956604e-08,
+ "loss": 0.7701,
+ "step": 5106
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.8914310825319651,
+ "learning_rate": 1.5725610517994816e-08,
+ "loss": 0.9466,
+ "step": 5107
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.8647166472751977,
+ "learning_rate": 1.537818303715688e-08,
+ "loss": 0.8664,
+ "step": 5108
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.9562926976375301,
+ "learning_rate": 1.50346334683793e-08,
+ "loss": 0.9224,
+ "step": 5109
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.8531383628855398,
+ "learning_rate": 1.4694961945093122e-08,
+ "loss": 0.8755,
+ "step": 5110
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.9499654053146481,
+ "learning_rate": 1.4359168599223926e-08,
+ "loss": 0.9338,
+ "step": 5111
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 1.0544085985986955,
+ "learning_rate": 1.4027253561188502e-08,
+ "loss": 0.8982,
+ "step": 5112
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.9597640495341702,
+ "learning_rate": 1.3699216959899287e-08,
+ "loss": 0.8625,
+ "step": 5113
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 1.1313391254220313,
+ "learning_rate": 1.337505892276325e-08,
+ "loss": 0.8732,
+ "step": 5114
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.8192842726394338,
+ "learning_rate": 1.3054779575677468e-08,
+ "loss": 0.7536,
+ "step": 5115
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 1.144650865389248,
+ "learning_rate": 1.2738379043035765e-08,
+ "loss": 0.92,
+ "step": 5116
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.9925136949839877,
+ "learning_rate": 1.2425857447725397e-08,
+ "loss": 0.8441,
+ "step": 5117
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.9316851526041872,
+ "learning_rate": 1.211721491112372e-08,
+ "loss": 0.8458,
+ "step": 5118
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.8534693109344463,
+ "learning_rate": 1.1812451553107063e-08,
+ "loss": 0.8811,
+ "step": 5119
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.9793728154770626,
+ "learning_rate": 1.1511567492038522e-08,
+ "loss": 0.8863,
+ "step": 5120
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.9754430819738603,
+ "learning_rate": 1.1214562844781285e-08,
+ "loss": 0.9006,
+ "step": 5121
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.9242377612979344,
+ "learning_rate": 1.0921437726686413e-08,
+ "loss": 0.8556,
+ "step": 5122
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.8441180573261889,
+ "learning_rate": 1.0632192251601726e-08,
+ "loss": 0.8454,
+ "step": 5123
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.9528393781597099,
+ "learning_rate": 1.0346826531865139e-08,
+ "loss": 0.8773,
+ "step": 5124
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.9958148724856253,
+ "learning_rate": 1.006534067831022e-08,
+ "loss": 0.9132,
+ "step": 5125
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.8407533110252854,
+ "learning_rate": 9.78773480026396e-09,
+ "loss": 0.8652,
+ "step": 5126
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.9688678826434681,
+ "learning_rate": 9.514009005543445e-09,
+ "loss": 0.9038,
+ "step": 5127
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.9620114124724733,
+ "learning_rate": 9.244163400462525e-09,
+ "loss": 0.9005,
+ "step": 5128
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 1.0672706434254617,
+ "learning_rate": 8.97819808982403e-09,
+ "loss": 0.87,
+ "step": 5129
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.9757762183767094,
+ "learning_rate": 8.716113176927554e-09,
+ "loss": 0.917,
+ "step": 5130
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.9435794318951496,
+ "learning_rate": 8.457908763562783e-09,
+ "loss": 0.8618,
+ "step": 5131
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.868734834805805,
+ "learning_rate": 8.203584950013942e-09,
+ "loss": 0.8493,
+ "step": 5132
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.8052141002329872,
+ "learning_rate": 7.953141835057576e-09,
+ "loss": 0.8542,
+ "step": 5133
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.9976444879598758,
+ "learning_rate": 7.706579515962542e-09,
+ "loss": 0.9646,
+ "step": 5134
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.909091922178808,
+ "learning_rate": 7.463898088490019e-09,
+ "loss": 0.859,
+ "step": 5135
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.9538373776612831,
+ "learning_rate": 7.225097646895718e-09,
+ "loss": 0.9141,
+ "step": 5136
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.9584712293939566,
+ "learning_rate": 6.990178283927673e-09,
+ "loss": 0.8951,
+ "step": 5137
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.9862658913023932,
+ "learning_rate": 6.759140090824012e-09,
+ "loss": 0.8538,
+ "step": 5138
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.9138909669547333,
+ "learning_rate": 6.531983157318511e-09,
+ "loss": 0.877,
+ "step": 5139
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.8760812069883439,
+ "learning_rate": 6.308707571636152e-09,
+ "loss": 0.8343,
+ "step": 5140
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.9104766295896735,
+ "learning_rate": 6.089313420494236e-09,
+ "loss": 0.8478,
+ "step": 5141
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.9052590435389873,
+ "learning_rate": 5.87380078910349e-09,
+ "loss": 0.9063,
+ "step": 5142
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.9543786793396181,
+ "learning_rate": 5.662169761165848e-09,
+ "loss": 0.8656,
+ "step": 5143
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.9034902338067047,
+ "learning_rate": 5.4544204188777815e-09,
+ "loss": 0.7799,
+ "step": 5144
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.9248390918946453,
+ "learning_rate": 5.25055284292475e-09,
+ "loss": 0.8639,
+ "step": 5145
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 1.015825398325497,
+ "learning_rate": 5.050567112487858e-09,
+ "loss": 0.8778,
+ "step": 5146
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.7891073540235253,
+ "learning_rate": 4.85446330523831e-09,
+ "loss": 0.8645,
+ "step": 5147
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 1.1277757430194246,
+ "learning_rate": 4.662241497341846e-09,
+ "loss": 0.8285,
+ "step": 5148
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.8224481645628162,
+ "learning_rate": 4.473901763454302e-09,
+ "loss": 0.875,
+ "step": 5149
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.9470083376411277,
+ "learning_rate": 4.289444176724944e-09,
+ "loss": 0.8966,
+ "step": 5150
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.8968778534432205,
+ "learning_rate": 4.108868808794242e-09,
+ "loss": 0.8393,
+ "step": 5151
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.8890026283971536,
+ "learning_rate": 3.932175729797205e-09,
+ "loss": 0.8378,
+ "step": 5152
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.945537895432477,
+ "learning_rate": 3.759365008357829e-09,
+ "loss": 0.8882,
+ "step": 5153
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.9047998580520465,
+ "learning_rate": 3.590436711594647e-09,
+ "loss": 0.9036,
+ "step": 5154
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.8247726317221266,
+ "learning_rate": 3.4253909051173985e-09,
+ "loss": 0.904,
+ "step": 5155
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.8390498818626625,
+ "learning_rate": 3.2642276530281404e-09,
+ "loss": 0.8676,
+ "step": 5156
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.8809934256903096,
+ "learning_rate": 3.1069470179201365e-09,
+ "loss": 0.8237,
+ "step": 5157
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.9302766322233841,
+ "learning_rate": 2.9535490608789687e-09,
+ "loss": 0.901,
+ "step": 5158
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.7697774832310228,
+ "learning_rate": 2.8040338414847545e-09,
+ "loss": 0.8565,
+ "step": 5159
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.8902002448987715,
+ "learning_rate": 2.6584014178054894e-09,
+ "loss": 0.8046,
+ "step": 5160
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 1.0292510478478478,
+ "learning_rate": 2.516651846403706e-09,
+ "loss": 0.8771,
+ "step": 5161
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 1.1549910881916694,
+ "learning_rate": 2.378785182333143e-09,
+ "loss": 0.8646,
+ "step": 5162
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.8281583041284687,
+ "learning_rate": 2.2448014791398574e-09,
+ "loss": 0.8527,
+ "step": 5163
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.8878235079164459,
+ "learning_rate": 2.1147007888622227e-09,
+ "loss": 0.8538,
+ "step": 5164
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 1.041922385050551,
+ "learning_rate": 1.9884831620287094e-09,
+ "loss": 0.9296,
+ "step": 5165
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.9414722193954977,
+ "learning_rate": 1.8661486476612144e-09,
+ "loss": 0.9014,
+ "step": 5166
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.831909402607967,
+ "learning_rate": 1.747697293272843e-09,
+ "loss": 0.8146,
+ "step": 5167
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.8494657105898284,
+ "learning_rate": 1.633129144870127e-09,
+ "loss": 0.902,
+ "step": 5168
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.9464105192527126,
+ "learning_rate": 1.5224442469474743e-09,
+ "loss": 0.8734,
+ "step": 5169
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.870790700949317,
+ "learning_rate": 1.4156426424960513e-09,
+ "loss": 0.8506,
+ "step": 5170
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 1.0819897940678171,
+ "learning_rate": 1.3127243729949003e-09,
+ "loss": 0.8826,
+ "step": 5171
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.8302728550764655,
+ "learning_rate": 1.2136894784176011e-09,
+ "loss": 0.9035,
+ "step": 5172
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.9787679197562164,
+ "learning_rate": 1.1185379972256105e-09,
+ "loss": 0.9466,
+ "step": 5173
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.8632505608838751,
+ "learning_rate": 1.0272699663782525e-09,
+ "loss": 0.9053,
+ "step": 5174
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.8896219094390361,
+ "learning_rate": 9.398854213193976e-10,
+ "loss": 0.8891,
+ "step": 5175
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.9489643694043242,
+ "learning_rate": 8.563843959907836e-10,
+ "loss": 0.892,
+ "step": 5176
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.8448555811990517,
+ "learning_rate": 7.767669228231356e-10,
+ "loss": 0.8332,
+ "step": 5177
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.9442011368133144,
+ "learning_rate": 7.010330327372749e-10,
+ "loss": 0.8367,
+ "step": 5178
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 1.193010494403342,
+ "learning_rate": 6.291827551474505e-10,
+ "loss": 0.9318,
+ "step": 5179
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.9666442329147793,
+ "learning_rate": 5.612161179613385e-10,
+ "loss": 0.9161,
+ "step": 5180
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.8399165252270878,
+ "learning_rate": 4.971331475756013e-10,
+ "loss": 0.8359,
+ "step": 5181
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.9590205736378942,
+ "learning_rate": 4.369338688781088e-10,
+ "loss": 0.9415,
+ "step": 5182
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.9477755507494081,
+ "learning_rate": 3.8061830525126797e-10,
+ "loss": 0.8849,
+ "step": 5183
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.9036069754082946,
+ "learning_rate": 3.2818647856647234e-10,
+ "loss": 0.8903,
+ "step": 5184
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.8009574588687146,
+ "learning_rate": 2.796384091885429e-10,
+ "loss": 0.873,
+ "step": 5185
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 1.1222034899044606,
+ "learning_rate": 2.3497411597128705e-10,
+ "loss": 0.8996,
+ "step": 5186
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 1.0360076877732805,
+ "learning_rate": 1.9419361626416e-10,
+ "loss": 0.8483,
+ "step": 5187
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.838445657739913,
+ "learning_rate": 1.5729692590338296e-10,
+ "loss": 0.8649,
+ "step": 5188
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.9705884722326784,
+ "learning_rate": 1.2428405921971476e-10,
+ "loss": 0.9066,
+ "step": 5189
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.934147776380278,
+ "learning_rate": 9.515502903734153e-11,
+ "loss": 0.8666,
+ "step": 5190
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.8328196251792085,
+ "learning_rate": 6.990984666610523e-11,
+ "loss": 0.8147,
+ "step": 5191
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.7996135315556432,
+ "learning_rate": 4.854852191371606e-11,
+ "loss": 0.8158,
+ "step": 5192
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.9653004972548954,
+ "learning_rate": 3.1071063075760466e-11,
+ "loss": 0.8766,
+ "step": 5193
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.9397125711183194,
+ "learning_rate": 1.7477476940142013e-11,
+ "loss": 0.843,
+ "step": 5194
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.8668572789445694,
+ "learning_rate": 7.767768785971185e-12,
+ "loss": 0.9091,
+ "step": 5195
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.7413903676980861,
+ "learning_rate": 1.9419423846755993e-12,
+ "loss": 0.793,
+ "step": 5196
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.9667495980889257,
+ "learning_rate": 0.0,
+ "loss": 0.889,
+ "step": 5197
+ },
+ {
+ "epoch": 1.0,
+ "step": 5197,
+ "total_flos": 1.538457807708645e+19,
+ "train_loss": 0.9249397156925323,
+ "train_runtime": 60188.4906,
+ "train_samples_per_second": 11.054,
+ "train_steps_per_second": 0.086
+ }
+ ],
+ "logging_steps": 1.0,
+ "max_steps": 5197,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 1,
+ "save_steps": 250,
+ "total_flos": 1.538457807708645e+19,
+ "train_batch_size": 4,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/06-10-24_sd2.1_llama7b_ft/training_args.bin b/06-10-24_sd2.1_llama7b_ft/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..b5a2e607501d89c4e47557ab2c7e396908001200
--- /dev/null
+++ b/06-10-24_sd2.1_llama7b_ft/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:188d084c2f1ada7677e2a10c9f767124701100542ce553b962997683d3747356
+size 6011
diff --git a/06-13-24_XL_llava_llama7b/checkpoint-1000/config.json b/06-13-24_XL_llava_llama7b/checkpoint-1000/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..1eb144e8b20a37c6bc202da65b2ea343b9a99db1
--- /dev/null
+++ b/06-13-24_XL_llava_llama7b/checkpoint-1000/config.json
@@ -0,0 +1,48 @@
+{
+ "_flash_attn_2_enabled": true,
+ "_name_or_path": "/mnt/bn/bohanzhainas1/Public_Models/llama-2_7B_hf",
+ "architectures": [
+ "LlamaForCausalLM"
+ ],
+ "attention_bias": false,
+ "attention_dropout": 0.0,
+ "bos_token_id": 1,
+ "ensemble_size": 1,
+ "eos_token_id": 2,
+ "freeze_mm_mlp_adapter": false,
+ "hidden_act": "silu",
+ "hidden_size": 4096,
+ "image_aspect_ratio": "square",
+ "image_grid_pinpoints": null,
+ "img_size": 768,
+ "initializer_range": 0.02,
+ "intermediate_size": 11008,
+ "max_position_embeddings": 2048,
+ "mm_hidden_size": 1280,
+ "mm_projector_type": "mlp2x_gelu",
+ "mm_use_im_patch_token": false,
+ "mm_use_im_start_end": false,
+ "mm_vision_select_feature": "patch",
+ "mm_vision_select_layer": -2,
+ "mm_vision_tower": "stabilityai/stable-diffusion-xl-base-1.0",
+ "model_type": "llava_llama",
+ "num_attention_heads": 32,
+ "num_hidden_layers": 32,
+ "num_key_value_heads": 32,
+ "pad_token_id": 0,
+ "pretraining_tp": 1,
+ "prompt": "",
+ "rms_norm_eps": 1e-06,
+ "rope_scaling": null,
+ "rope_theta": 10000.0,
+ "t": 1,
+ "tie_word_embeddings": false,
+ "torch_dtype": "float16",
+ "transformers_version": "4.38.2",
+ "tune_mm_mlp_adapter": true,
+ "up_ft_index": 0,
+ "use_cache": false,
+ "use_mm_proj": true,
+ "vision_tower": "stabilityai/stable-diffusion-xl-base-1.0",
+ "vocab_size": 32000
+}
diff --git a/06-13-24_XL_llava_llama7b/checkpoint-1000/mm_projector.bin b/06-13-24_XL_llava_llama7b/checkpoint-1000/mm_projector.bin
new file mode 100644
index 0000000000000000000000000000000000000000..35e545a134f140f73e9f74de945cebf07a98b119
--- /dev/null
+++ b/06-13-24_XL_llava_llama7b/checkpoint-1000/mm_projector.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:98d15005f4482a7bcd5ff4f39a82c65f690e1998a1c73a367cf5ab2006f47560
+size 44058237
diff --git a/06-13-24_XL_llava_llama7b/checkpoint-1200/config.json b/06-13-24_XL_llava_llama7b/checkpoint-1200/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..1eb144e8b20a37c6bc202da65b2ea343b9a99db1
--- /dev/null
+++ b/06-13-24_XL_llava_llama7b/checkpoint-1200/config.json
@@ -0,0 +1,48 @@
+{
+ "_flash_attn_2_enabled": true,
+ "_name_or_path": "/mnt/bn/bohanzhainas1/Public_Models/llama-2_7B_hf",
+ "architectures": [
+ "LlamaForCausalLM"
+ ],
+ "attention_bias": false,
+ "attention_dropout": 0.0,
+ "bos_token_id": 1,
+ "ensemble_size": 1,
+ "eos_token_id": 2,
+ "freeze_mm_mlp_adapter": false,
+ "hidden_act": "silu",
+ "hidden_size": 4096,
+ "image_aspect_ratio": "square",
+ "image_grid_pinpoints": null,
+ "img_size": 768,
+ "initializer_range": 0.02,
+ "intermediate_size": 11008,
+ "max_position_embeddings": 2048,
+ "mm_hidden_size": 1280,
+ "mm_projector_type": "mlp2x_gelu",
+ "mm_use_im_patch_token": false,
+ "mm_use_im_start_end": false,
+ "mm_vision_select_feature": "patch",
+ "mm_vision_select_layer": -2,
+ "mm_vision_tower": "stabilityai/stable-diffusion-xl-base-1.0",
+ "model_type": "llava_llama",
+ "num_attention_heads": 32,
+ "num_hidden_layers": 32,
+ "num_key_value_heads": 32,
+ "pad_token_id": 0,
+ "pretraining_tp": 1,
+ "prompt": "",
+ "rms_norm_eps": 1e-06,
+ "rope_scaling": null,
+ "rope_theta": 10000.0,
+ "t": 1,
+ "tie_word_embeddings": false,
+ "torch_dtype": "float16",
+ "transformers_version": "4.38.2",
+ "tune_mm_mlp_adapter": true,
+ "up_ft_index": 0,
+ "use_cache": false,
+ "use_mm_proj": true,
+ "vision_tower": "stabilityai/stable-diffusion-xl-base-1.0",
+ "vocab_size": 32000
+}
diff --git a/06-13-24_XL_llava_llama7b/checkpoint-1200/mm_projector.bin b/06-13-24_XL_llava_llama7b/checkpoint-1200/mm_projector.bin
new file mode 100644
index 0000000000000000000000000000000000000000..40f05ce19746da5ad038df235b25b0554df0475b
--- /dev/null
+++ b/06-13-24_XL_llava_llama7b/checkpoint-1200/mm_projector.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3645e59535f53d27fa19c09f0ac0f547c482ac6aa429fca371cec2f997dfd429
+size 44058237
diff --git a/06-13-24_XL_llava_llama7b/checkpoint-1400/config.json b/06-13-24_XL_llava_llama7b/checkpoint-1400/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..1eb144e8b20a37c6bc202da65b2ea343b9a99db1
--- /dev/null
+++ b/06-13-24_XL_llava_llama7b/checkpoint-1400/config.json
@@ -0,0 +1,48 @@
+{
+ "_flash_attn_2_enabled": true,
+ "_name_or_path": "/mnt/bn/bohanzhainas1/Public_Models/llama-2_7B_hf",
+ "architectures": [
+ "LlamaForCausalLM"
+ ],
+ "attention_bias": false,
+ "attention_dropout": 0.0,
+ "bos_token_id": 1,
+ "ensemble_size": 1,
+ "eos_token_id": 2,
+ "freeze_mm_mlp_adapter": false,
+ "hidden_act": "silu",
+ "hidden_size": 4096,
+ "image_aspect_ratio": "square",
+ "image_grid_pinpoints": null,
+ "img_size": 768,
+ "initializer_range": 0.02,
+ "intermediate_size": 11008,
+ "max_position_embeddings": 2048,
+ "mm_hidden_size": 1280,
+ "mm_projector_type": "mlp2x_gelu",
+ "mm_use_im_patch_token": false,
+ "mm_use_im_start_end": false,
+ "mm_vision_select_feature": "patch",
+ "mm_vision_select_layer": -2,
+ "mm_vision_tower": "stabilityai/stable-diffusion-xl-base-1.0",
+ "model_type": "llava_llama",
+ "num_attention_heads": 32,
+ "num_hidden_layers": 32,
+ "num_key_value_heads": 32,
+ "pad_token_id": 0,
+ "pretraining_tp": 1,
+ "prompt": "",
+ "rms_norm_eps": 1e-06,
+ "rope_scaling": null,
+ "rope_theta": 10000.0,
+ "t": 1,
+ "tie_word_embeddings": false,
+ "torch_dtype": "float16",
+ "transformers_version": "4.38.2",
+ "tune_mm_mlp_adapter": true,
+ "up_ft_index": 0,
+ "use_cache": false,
+ "use_mm_proj": true,
+ "vision_tower": "stabilityai/stable-diffusion-xl-base-1.0",
+ "vocab_size": 32000
+}
diff --git a/06-13-24_XL_llava_llama7b/checkpoint-1400/mm_projector.bin b/06-13-24_XL_llava_llama7b/checkpoint-1400/mm_projector.bin
new file mode 100644
index 0000000000000000000000000000000000000000..c5f5f1e711538a3bc8c9e04e86a9a5b5f0ada8b5
--- /dev/null
+++ b/06-13-24_XL_llava_llama7b/checkpoint-1400/mm_projector.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:63afdbefa5fffc6a3aac6945daca57b188ef290682d5bda3bc8e4502d75734a6
+size 44058237
diff --git a/06-13-24_XL_llava_llama7b/checkpoint-1600/config.json b/06-13-24_XL_llava_llama7b/checkpoint-1600/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..1eb144e8b20a37c6bc202da65b2ea343b9a99db1
--- /dev/null
+++ b/06-13-24_XL_llava_llama7b/checkpoint-1600/config.json
@@ -0,0 +1,48 @@
+{
+ "_flash_attn_2_enabled": true,
+ "_name_or_path": "/mnt/bn/bohanzhainas1/Public_Models/llama-2_7B_hf",
+ "architectures": [
+ "LlamaForCausalLM"
+ ],
+ "attention_bias": false,
+ "attention_dropout": 0.0,
+ "bos_token_id": 1,
+ "ensemble_size": 1,
+ "eos_token_id": 2,
+ "freeze_mm_mlp_adapter": false,
+ "hidden_act": "silu",
+ "hidden_size": 4096,
+ "image_aspect_ratio": "square",
+ "image_grid_pinpoints": null,
+ "img_size": 768,
+ "initializer_range": 0.02,
+ "intermediate_size": 11008,
+ "max_position_embeddings": 2048,
+ "mm_hidden_size": 1280,
+ "mm_projector_type": "mlp2x_gelu",
+ "mm_use_im_patch_token": false,
+ "mm_use_im_start_end": false,
+ "mm_vision_select_feature": "patch",
+ "mm_vision_select_layer": -2,
+ "mm_vision_tower": "stabilityai/stable-diffusion-xl-base-1.0",
+ "model_type": "llava_llama",
+ "num_attention_heads": 32,
+ "num_hidden_layers": 32,
+ "num_key_value_heads": 32,
+ "pad_token_id": 0,
+ "pretraining_tp": 1,
+ "prompt": "",
+ "rms_norm_eps": 1e-06,
+ "rope_scaling": null,
+ "rope_theta": 10000.0,
+ "t": 1,
+ "tie_word_embeddings": false,
+ "torch_dtype": "float16",
+ "transformers_version": "4.38.2",
+ "tune_mm_mlp_adapter": true,
+ "up_ft_index": 0,
+ "use_cache": false,
+ "use_mm_proj": true,
+ "vision_tower": "stabilityai/stable-diffusion-xl-base-1.0",
+ "vocab_size": 32000
+}
diff --git a/06-13-24_XL_llava_llama7b/checkpoint-1600/mm_projector.bin b/06-13-24_XL_llava_llama7b/checkpoint-1600/mm_projector.bin
new file mode 100644
index 0000000000000000000000000000000000000000..d9e4f0fbdf8a9803102cfa19d7d1498e92370bca
--- /dev/null
+++ b/06-13-24_XL_llava_llama7b/checkpoint-1600/mm_projector.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3f57cd31b1993f08a77f625f386be39740ce58368cd8d3ef867a707af9b03846
+size 44058237
diff --git a/06-13-24_XL_llava_llama7b/checkpoint-1800/config.json b/06-13-24_XL_llava_llama7b/checkpoint-1800/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..1eb144e8b20a37c6bc202da65b2ea343b9a99db1
--- /dev/null
+++ b/06-13-24_XL_llava_llama7b/checkpoint-1800/config.json
@@ -0,0 +1,48 @@
+{
+ "_flash_attn_2_enabled": true,
+ "_name_or_path": "/mnt/bn/bohanzhainas1/Public_Models/llama-2_7B_hf",
+ "architectures": [
+ "LlamaForCausalLM"
+ ],
+ "attention_bias": false,
+ "attention_dropout": 0.0,
+ "bos_token_id": 1,
+ "ensemble_size": 1,
+ "eos_token_id": 2,
+ "freeze_mm_mlp_adapter": false,
+ "hidden_act": "silu",
+ "hidden_size": 4096,
+ "image_aspect_ratio": "square",
+ "image_grid_pinpoints": null,
+ "img_size": 768,
+ "initializer_range": 0.02,
+ "intermediate_size": 11008,
+ "max_position_embeddings": 2048,
+ "mm_hidden_size": 1280,
+ "mm_projector_type": "mlp2x_gelu",
+ "mm_use_im_patch_token": false,
+ "mm_use_im_start_end": false,
+ "mm_vision_select_feature": "patch",
+ "mm_vision_select_layer": -2,
+ "mm_vision_tower": "stabilityai/stable-diffusion-xl-base-1.0",
+ "model_type": "llava_llama",
+ "num_attention_heads": 32,
+ "num_hidden_layers": 32,
+ "num_key_value_heads": 32,
+ "pad_token_id": 0,
+ "pretraining_tp": 1,
+ "prompt": "",
+ "rms_norm_eps": 1e-06,
+ "rope_scaling": null,
+ "rope_theta": 10000.0,
+ "t": 1,
+ "tie_word_embeddings": false,
+ "torch_dtype": "float16",
+ "transformers_version": "4.38.2",
+ "tune_mm_mlp_adapter": true,
+ "up_ft_index": 0,
+ "use_cache": false,
+ "use_mm_proj": true,
+ "vision_tower": "stabilityai/stable-diffusion-xl-base-1.0",
+ "vocab_size": 32000
+}
diff --git a/06-13-24_XL_llava_llama7b/checkpoint-1800/mm_projector.bin b/06-13-24_XL_llava_llama7b/checkpoint-1800/mm_projector.bin
new file mode 100644
index 0000000000000000000000000000000000000000..723ca0a5d97706dac2bf2ec0f5aa244cb0f6e6c4
--- /dev/null
+++ b/06-13-24_XL_llava_llama7b/checkpoint-1800/mm_projector.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:13a024a16549f827a3ed40fe94a549d6dd6001fa371ba19956d0b5c3db92b63d
+size 44058237
diff --git a/06-13-24_XL_llava_llama7b/checkpoint-200/config.json b/06-13-24_XL_llava_llama7b/checkpoint-200/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..1eb144e8b20a37c6bc202da65b2ea343b9a99db1
--- /dev/null
+++ b/06-13-24_XL_llava_llama7b/checkpoint-200/config.json
@@ -0,0 +1,48 @@
+{
+ "_flash_attn_2_enabled": true,
+ "_name_or_path": "/mnt/bn/bohanzhainas1/Public_Models/llama-2_7B_hf",
+ "architectures": [
+ "LlamaForCausalLM"
+ ],
+ "attention_bias": false,
+ "attention_dropout": 0.0,
+ "bos_token_id": 1,
+ "ensemble_size": 1,
+ "eos_token_id": 2,
+ "freeze_mm_mlp_adapter": false,
+ "hidden_act": "silu",
+ "hidden_size": 4096,
+ "image_aspect_ratio": "square",
+ "image_grid_pinpoints": null,
+ "img_size": 768,
+ "initializer_range": 0.02,
+ "intermediate_size": 11008,
+ "max_position_embeddings": 2048,
+ "mm_hidden_size": 1280,
+ "mm_projector_type": "mlp2x_gelu",
+ "mm_use_im_patch_token": false,
+ "mm_use_im_start_end": false,
+ "mm_vision_select_feature": "patch",
+ "mm_vision_select_layer": -2,
+ "mm_vision_tower": "stabilityai/stable-diffusion-xl-base-1.0",
+ "model_type": "llava_llama",
+ "num_attention_heads": 32,
+ "num_hidden_layers": 32,
+ "num_key_value_heads": 32,
+ "pad_token_id": 0,
+ "pretraining_tp": 1,
+ "prompt": "",
+ "rms_norm_eps": 1e-06,
+ "rope_scaling": null,
+ "rope_theta": 10000.0,
+ "t": 1,
+ "tie_word_embeddings": false,
+ "torch_dtype": "float16",
+ "transformers_version": "4.38.2",
+ "tune_mm_mlp_adapter": true,
+ "up_ft_index": 0,
+ "use_cache": false,
+ "use_mm_proj": true,
+ "vision_tower": "stabilityai/stable-diffusion-xl-base-1.0",
+ "vocab_size": 32000
+}
diff --git a/06-13-24_XL_llava_llama7b/checkpoint-200/mm_projector.bin b/06-13-24_XL_llava_llama7b/checkpoint-200/mm_projector.bin
new file mode 100644
index 0000000000000000000000000000000000000000..d17e580f5645bf3c327000429ec53f5f159f4fc0
--- /dev/null
+++ b/06-13-24_XL_llava_llama7b/checkpoint-200/mm_projector.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e971e26d0c58a8461ce2837c5db37c5203f6b5d800c9ac8e6b0e327f931dbfd6
+size 44058237
diff --git a/06-13-24_XL_llava_llama7b/checkpoint-2000/config.json b/06-13-24_XL_llava_llama7b/checkpoint-2000/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..1eb144e8b20a37c6bc202da65b2ea343b9a99db1
--- /dev/null
+++ b/06-13-24_XL_llava_llama7b/checkpoint-2000/config.json
@@ -0,0 +1,48 @@
+{
+ "_flash_attn_2_enabled": true,
+ "_name_or_path": "/mnt/bn/bohanzhainas1/Public_Models/llama-2_7B_hf",
+ "architectures": [
+ "LlamaForCausalLM"
+ ],
+ "attention_bias": false,
+ "attention_dropout": 0.0,
+ "bos_token_id": 1,
+ "ensemble_size": 1,
+ "eos_token_id": 2,
+ "freeze_mm_mlp_adapter": false,
+ "hidden_act": "silu",
+ "hidden_size": 4096,
+ "image_aspect_ratio": "square",
+ "image_grid_pinpoints": null,
+ "img_size": 768,
+ "initializer_range": 0.02,
+ "intermediate_size": 11008,
+ "max_position_embeddings": 2048,
+ "mm_hidden_size": 1280,
+ "mm_projector_type": "mlp2x_gelu",
+ "mm_use_im_patch_token": false,
+ "mm_use_im_start_end": false,
+ "mm_vision_select_feature": "patch",
+ "mm_vision_select_layer": -2,
+ "mm_vision_tower": "stabilityai/stable-diffusion-xl-base-1.0",
+ "model_type": "llava_llama",
+ "num_attention_heads": 32,
+ "num_hidden_layers": 32,
+ "num_key_value_heads": 32,
+ "pad_token_id": 0,
+ "pretraining_tp": 1,
+ "prompt": "",
+ "rms_norm_eps": 1e-06,
+ "rope_scaling": null,
+ "rope_theta": 10000.0,
+ "t": 1,
+ "tie_word_embeddings": false,
+ "torch_dtype": "float16",
+ "transformers_version": "4.38.2",
+ "tune_mm_mlp_adapter": true,
+ "up_ft_index": 0,
+ "use_cache": false,
+ "use_mm_proj": true,
+ "vision_tower": "stabilityai/stable-diffusion-xl-base-1.0",
+ "vocab_size": 32000
+}
diff --git a/06-13-24_XL_llava_llama7b/checkpoint-2000/mm_projector.bin b/06-13-24_XL_llava_llama7b/checkpoint-2000/mm_projector.bin
new file mode 100644
index 0000000000000000000000000000000000000000..35eb2207d9a9816c7eed5b8dab93a57a99c1b834
--- /dev/null
+++ b/06-13-24_XL_llava_llama7b/checkpoint-2000/mm_projector.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b60acf3958cc4300a50ed9184e2173e1f7e1feedf42a9e207180ed08298dbdc1
+size 44058237
diff --git a/06-13-24_XL_llava_llama7b/checkpoint-400/config.json b/06-13-24_XL_llava_llama7b/checkpoint-400/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..1eb144e8b20a37c6bc202da65b2ea343b9a99db1
--- /dev/null
+++ b/06-13-24_XL_llava_llama7b/checkpoint-400/config.json
@@ -0,0 +1,48 @@
+{
+ "_flash_attn_2_enabled": true,
+ "_name_or_path": "/mnt/bn/bohanzhainas1/Public_Models/llama-2_7B_hf",
+ "architectures": [
+ "LlamaForCausalLM"
+ ],
+ "attention_bias": false,
+ "attention_dropout": 0.0,
+ "bos_token_id": 1,
+ "ensemble_size": 1,
+ "eos_token_id": 2,
+ "freeze_mm_mlp_adapter": false,
+ "hidden_act": "silu",
+ "hidden_size": 4096,
+ "image_aspect_ratio": "square",
+ "image_grid_pinpoints": null,
+ "img_size": 768,
+ "initializer_range": 0.02,
+ "intermediate_size": 11008,
+ "max_position_embeddings": 2048,
+ "mm_hidden_size": 1280,
+ "mm_projector_type": "mlp2x_gelu",
+ "mm_use_im_patch_token": false,
+ "mm_use_im_start_end": false,
+ "mm_vision_select_feature": "patch",
+ "mm_vision_select_layer": -2,
+ "mm_vision_tower": "stabilityai/stable-diffusion-xl-base-1.0",
+ "model_type": "llava_llama",
+ "num_attention_heads": 32,
+ "num_hidden_layers": 32,
+ "num_key_value_heads": 32,
+ "pad_token_id": 0,
+ "pretraining_tp": 1,
+ "prompt": "",
+ "rms_norm_eps": 1e-06,
+ "rope_scaling": null,
+ "rope_theta": 10000.0,
+ "t": 1,
+ "tie_word_embeddings": false,
+ "torch_dtype": "float16",
+ "transformers_version": "4.38.2",
+ "tune_mm_mlp_adapter": true,
+ "up_ft_index": 0,
+ "use_cache": false,
+ "use_mm_proj": true,
+ "vision_tower": "stabilityai/stable-diffusion-xl-base-1.0",
+ "vocab_size": 32000
+}
diff --git a/06-13-24_XL_llava_llama7b/checkpoint-400/mm_projector.bin b/06-13-24_XL_llava_llama7b/checkpoint-400/mm_projector.bin
new file mode 100644
index 0000000000000000000000000000000000000000..093e27fc13530a8e38f9e06a9ade1f3b8637aac9
--- /dev/null
+++ b/06-13-24_XL_llava_llama7b/checkpoint-400/mm_projector.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5225ca2c21c9c54c1b5bae4b92f54356a92e78868fdfab316a1fb0ab428f4240
+size 44058237
diff --git a/06-13-24_XL_llava_llama7b/checkpoint-600/config.json b/06-13-24_XL_llava_llama7b/checkpoint-600/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..1eb144e8b20a37c6bc202da65b2ea343b9a99db1
--- /dev/null
+++ b/06-13-24_XL_llava_llama7b/checkpoint-600/config.json
@@ -0,0 +1,48 @@
+{
+ "_flash_attn_2_enabled": true,
+ "_name_or_path": "/mnt/bn/bohanzhainas1/Public_Models/llama-2_7B_hf",
+ "architectures": [
+ "LlamaForCausalLM"
+ ],
+ "attention_bias": false,
+ "attention_dropout": 0.0,
+ "bos_token_id": 1,
+ "ensemble_size": 1,
+ "eos_token_id": 2,
+ "freeze_mm_mlp_adapter": false,
+ "hidden_act": "silu",
+ "hidden_size": 4096,
+ "image_aspect_ratio": "square",
+ "image_grid_pinpoints": null,
+ "img_size": 768,
+ "initializer_range": 0.02,
+ "intermediate_size": 11008,
+ "max_position_embeddings": 2048,
+ "mm_hidden_size": 1280,
+ "mm_projector_type": "mlp2x_gelu",
+ "mm_use_im_patch_token": false,
+ "mm_use_im_start_end": false,
+ "mm_vision_select_feature": "patch",
+ "mm_vision_select_layer": -2,
+ "mm_vision_tower": "stabilityai/stable-diffusion-xl-base-1.0",
+ "model_type": "llava_llama",
+ "num_attention_heads": 32,
+ "num_hidden_layers": 32,
+ "num_key_value_heads": 32,
+ "pad_token_id": 0,
+ "pretraining_tp": 1,
+ "prompt": "",
+ "rms_norm_eps": 1e-06,
+ "rope_scaling": null,
+ "rope_theta": 10000.0,
+ "t": 1,
+ "tie_word_embeddings": false,
+ "torch_dtype": "float16",
+ "transformers_version": "4.38.2",
+ "tune_mm_mlp_adapter": true,
+ "up_ft_index": 0,
+ "use_cache": false,
+ "use_mm_proj": true,
+ "vision_tower": "stabilityai/stable-diffusion-xl-base-1.0",
+ "vocab_size": 32000
+}
diff --git a/06-13-24_XL_llava_llama7b/checkpoint-600/mm_projector.bin b/06-13-24_XL_llava_llama7b/checkpoint-600/mm_projector.bin
new file mode 100644
index 0000000000000000000000000000000000000000..2a3ac89edb8ade14f1376453d12e41a0973005d7
--- /dev/null
+++ b/06-13-24_XL_llava_llama7b/checkpoint-600/mm_projector.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5eee6a1cc4749796271dcbefaa3d44fd8babfbef91d114b5d4136def05f86b35
+size 44058237
diff --git a/06-13-24_XL_llava_llama7b/checkpoint-800/config.json b/06-13-24_XL_llava_llama7b/checkpoint-800/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..1eb144e8b20a37c6bc202da65b2ea343b9a99db1
--- /dev/null
+++ b/06-13-24_XL_llava_llama7b/checkpoint-800/config.json
@@ -0,0 +1,48 @@
+{
+ "_flash_attn_2_enabled": true,
+ "_name_or_path": "/mnt/bn/bohanzhainas1/Public_Models/llama-2_7B_hf",
+ "architectures": [
+ "LlamaForCausalLM"
+ ],
+ "attention_bias": false,
+ "attention_dropout": 0.0,
+ "bos_token_id": 1,
+ "ensemble_size": 1,
+ "eos_token_id": 2,
+ "freeze_mm_mlp_adapter": false,
+ "hidden_act": "silu",
+ "hidden_size": 4096,
+ "image_aspect_ratio": "square",
+ "image_grid_pinpoints": null,
+ "img_size": 768,
+ "initializer_range": 0.02,
+ "intermediate_size": 11008,
+ "max_position_embeddings": 2048,
+ "mm_hidden_size": 1280,
+ "mm_projector_type": "mlp2x_gelu",
+ "mm_use_im_patch_token": false,
+ "mm_use_im_start_end": false,
+ "mm_vision_select_feature": "patch",
+ "mm_vision_select_layer": -2,
+ "mm_vision_tower": "stabilityai/stable-diffusion-xl-base-1.0",
+ "model_type": "llava_llama",
+ "num_attention_heads": 32,
+ "num_hidden_layers": 32,
+ "num_key_value_heads": 32,
+ "pad_token_id": 0,
+ "pretraining_tp": 1,
+ "prompt": "",
+ "rms_norm_eps": 1e-06,
+ "rope_scaling": null,
+ "rope_theta": 10000.0,
+ "t": 1,
+ "tie_word_embeddings": false,
+ "torch_dtype": "float16",
+ "transformers_version": "4.38.2",
+ "tune_mm_mlp_adapter": true,
+ "up_ft_index": 0,
+ "use_cache": false,
+ "use_mm_proj": true,
+ "vision_tower": "stabilityai/stable-diffusion-xl-base-1.0",
+ "vocab_size": 32000
+}
diff --git a/06-13-24_XL_llava_llama7b/checkpoint-800/mm_projector.bin b/06-13-24_XL_llava_llama7b/checkpoint-800/mm_projector.bin
new file mode 100644
index 0000000000000000000000000000000000000000..10666e5b565af136c4588a03aff8b472edca9f43
--- /dev/null
+++ b/06-13-24_XL_llava_llama7b/checkpoint-800/mm_projector.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0e6060383fbebf0073e744acef6cf947066a960a33d6019facd52089c83e6029
+size 44058237
diff --git a/06-13-24_XL_llava_llama7b/config.json b/06-13-24_XL_llava_llama7b/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..516b195d23d394152432486d1e3ecf9bcec29b5d
--- /dev/null
+++ b/06-13-24_XL_llava_llama7b/config.json
@@ -0,0 +1,48 @@
+{
+ "_flash_attn_2_enabled": true,
+ "_name_or_path": "/mnt/bn/bohanzhainas1/Public_Models/llama-2_7B_hf",
+ "architectures": [
+ "LlamaForCausalLM"
+ ],
+ "attention_bias": false,
+ "attention_dropout": 0.0,
+ "bos_token_id": 1,
+ "ensemble_size": 1,
+ "eos_token_id": 2,
+ "freeze_mm_mlp_adapter": false,
+ "hidden_act": "silu",
+ "hidden_size": 4096,
+ "image_aspect_ratio": "square",
+ "image_grid_pinpoints": null,
+ "img_size": 768,
+ "initializer_range": 0.02,
+ "intermediate_size": 11008,
+ "max_position_embeddings": 2048,
+ "mm_hidden_size": 1280,
+ "mm_projector_type": "mlp2x_gelu",
+ "mm_use_im_patch_token": false,
+ "mm_use_im_start_end": false,
+ "mm_vision_select_feature": "patch",
+ "mm_vision_select_layer": -2,
+ "mm_vision_tower": "stabilityai/stable-diffusion-xl-base-1.0",
+ "model_type": "llava_llama",
+ "num_attention_heads": 32,
+ "num_hidden_layers": 32,
+ "num_key_value_heads": 32,
+ "pad_token_id": 0,
+ "pretraining_tp": 1,
+ "prompt": "",
+ "rms_norm_eps": 1e-06,
+ "rope_scaling": null,
+ "rope_theta": 10000.0,
+ "t": 1,
+ "tie_word_embeddings": false,
+ "torch_dtype": "float16",
+ "transformers_version": "4.38.2",
+ "tune_mm_mlp_adapter": true,
+ "up_ft_index": 0,
+ "use_cache": true,
+ "use_mm_proj": true,
+ "vision_tower": "stabilityai/stable-diffusion-xl-base-1.0",
+ "vocab_size": 32000
+}
diff --git a/06-13-24_XL_llava_llama7b/mm_projector.bin b/06-13-24_XL_llava_llama7b/mm_projector.bin
new file mode 100644
index 0000000000000000000000000000000000000000..d1d1acc07dd27ec18944f741f723af7860e2fb11
--- /dev/null
+++ b/06-13-24_XL_llava_llama7b/mm_projector.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:dd80e831684cd6815c5b609ceed5a4a10d88d74007eef96a67e907c4198f97bf
+size 44058237
diff --git a/06-13-24_XL_llava_llama7b/trainer_state.json b/06-13-24_XL_llava_llama7b/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..9122005d82ab3a7aeba6aaf81da6bc9e085518b1
--- /dev/null
+++ b/06-13-24_XL_llava_llama7b/trainer_state.json
@@ -0,0 +1,15290 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 0.9998853342506593,
+ "eval_steps": 500,
+ "global_step": 2180,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.0,
+ "grad_norm": 82.95042504892388,
+ "learning_rate": 1.5151515151515153e-05,
+ "loss": 10.2527,
+ "step": 1
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 82.634932522638,
+ "learning_rate": 3.0303030303030306e-05,
+ "loss": 10.3101,
+ "step": 2
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 59.38979487176437,
+ "learning_rate": 4.545454545454546e-05,
+ "loss": 9.4612,
+ "step": 3
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 45.30988225692408,
+ "learning_rate": 6.060606060606061e-05,
+ "loss": 7.3369,
+ "step": 4
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 20.73490168789013,
+ "learning_rate": 7.575757575757576e-05,
+ "loss": 6.4832,
+ "step": 5
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 17.60709753388363,
+ "learning_rate": 9.090909090909092e-05,
+ "loss": 6.0726,
+ "step": 6
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 14.659560795415013,
+ "learning_rate": 0.00010606060606060606,
+ "loss": 5.8327,
+ "step": 7
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 15.751176871384166,
+ "learning_rate": 0.00012121212121212122,
+ "loss": 5.8578,
+ "step": 8
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 12.557351640349488,
+ "learning_rate": 0.00013636363636363637,
+ "loss": 5.5952,
+ "step": 9
+ },
+ {
+ "epoch": 0.0,
+ "grad_norm": 6.623609285962712,
+ "learning_rate": 0.00015151515151515152,
+ "loss": 5.337,
+ "step": 10
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 4.471177355492287,
+ "learning_rate": 0.00016666666666666666,
+ "loss": 5.2032,
+ "step": 11
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 4.1333928108391085,
+ "learning_rate": 0.00018181818181818183,
+ "loss": 5.1763,
+ "step": 12
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 4.18549508724102,
+ "learning_rate": 0.00019696969696969695,
+ "loss": 5.0303,
+ "step": 13
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 3.6006917274328964,
+ "learning_rate": 0.00021212121212121213,
+ "loss": 5.1017,
+ "step": 14
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 3.1435469313041,
+ "learning_rate": 0.00022727272727272727,
+ "loss": 5.0118,
+ "step": 15
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 2.994855336823533,
+ "learning_rate": 0.00024242424242424245,
+ "loss": 4.9557,
+ "step": 16
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 2.6131030295208313,
+ "learning_rate": 0.00025757575757575756,
+ "loss": 4.8665,
+ "step": 17
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 2.4036496870065043,
+ "learning_rate": 0.00027272727272727274,
+ "loss": 4.849,
+ "step": 18
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 2.346828766867057,
+ "learning_rate": 0.0002878787878787879,
+ "loss": 4.7954,
+ "step": 19
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 2.0072967735884673,
+ "learning_rate": 0.00030303030303030303,
+ "loss": 4.5698,
+ "step": 20
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.8653457431256537,
+ "learning_rate": 0.0003181818181818182,
+ "loss": 4.6119,
+ "step": 21
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.657378223951846,
+ "learning_rate": 0.0003333333333333333,
+ "loss": 4.6384,
+ "step": 22
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.508574856291383,
+ "learning_rate": 0.0003484848484848485,
+ "loss": 4.4947,
+ "step": 23
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.512618896645684,
+ "learning_rate": 0.00036363636363636367,
+ "loss": 4.4894,
+ "step": 24
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.2561733765065244,
+ "learning_rate": 0.0003787878787878788,
+ "loss": 4.4936,
+ "step": 25
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.3010975917353673,
+ "learning_rate": 0.0003939393939393939,
+ "loss": 4.3015,
+ "step": 26
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.439918891147037,
+ "learning_rate": 0.00040909090909090913,
+ "loss": 4.2854,
+ "step": 27
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.2229129739557523,
+ "learning_rate": 0.00042424242424242425,
+ "loss": 4.3152,
+ "step": 28
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.3207747389141722,
+ "learning_rate": 0.0004393939393939394,
+ "loss": 4.31,
+ "step": 29
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.4141744751584322,
+ "learning_rate": 0.00045454545454545455,
+ "loss": 4.2469,
+ "step": 30
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.3126980101093566,
+ "learning_rate": 0.0004696969696969697,
+ "loss": 4.1531,
+ "step": 31
+ },
+ {
+ "epoch": 0.01,
+ "grad_norm": 1.1515487556436643,
+ "learning_rate": 0.0004848484848484849,
+ "loss": 4.1459,
+ "step": 32
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.0839462546825658,
+ "learning_rate": 0.0005,
+ "loss": 4.1853,
+ "step": 33
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 1.1174369970212654,
+ "learning_rate": 0.0005151515151515151,
+ "loss": 3.9901,
+ "step": 34
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.8991452525125105,
+ "learning_rate": 0.0005303030303030302,
+ "loss": 4.0768,
+ "step": 35
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.9217705652374459,
+ "learning_rate": 0.0005454545454545455,
+ "loss": 4.0243,
+ "step": 36
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.9692396150456398,
+ "learning_rate": 0.0005606060606060606,
+ "loss": 3.9632,
+ "step": 37
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.8947883586802508,
+ "learning_rate": 0.0005757575757575758,
+ "loss": 3.9983,
+ "step": 38
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.7931454653044937,
+ "learning_rate": 0.0005909090909090909,
+ "loss": 3.8517,
+ "step": 39
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.9458623829252094,
+ "learning_rate": 0.0006060606060606061,
+ "loss": 3.8583,
+ "step": 40
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.7957216586548687,
+ "learning_rate": 0.0006212121212121212,
+ "loss": 3.8007,
+ "step": 41
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.7306514455528892,
+ "learning_rate": 0.0006363636363636364,
+ "loss": 3.8011,
+ "step": 42
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.7758085901387844,
+ "learning_rate": 0.0006515151515151515,
+ "loss": 3.7763,
+ "step": 43
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.7763351537262178,
+ "learning_rate": 0.0006666666666666666,
+ "loss": 3.7814,
+ "step": 44
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.6241573491178961,
+ "learning_rate": 0.0006818181818181818,
+ "loss": 3.7329,
+ "step": 45
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.7030848764774981,
+ "learning_rate": 0.000696969696969697,
+ "loss": 3.6096,
+ "step": 46
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.6758237052615907,
+ "learning_rate": 0.0007121212121212122,
+ "loss": 3.6531,
+ "step": 47
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.6140459112609652,
+ "learning_rate": 0.0007272727272727273,
+ "loss": 3.6726,
+ "step": 48
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.6039650058864519,
+ "learning_rate": 0.0007424242424242425,
+ "loss": 3.5842,
+ "step": 49
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.6269049196775377,
+ "learning_rate": 0.0007575757575757576,
+ "loss": 3.5942,
+ "step": 50
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.5902358030487995,
+ "learning_rate": 0.0007727272727272727,
+ "loss": 3.5121,
+ "step": 51
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.5412101536670967,
+ "learning_rate": 0.0007878787878787878,
+ "loss": 3.6796,
+ "step": 52
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.5562674203938032,
+ "learning_rate": 0.000803030303030303,
+ "loss": 3.552,
+ "step": 53
+ },
+ {
+ "epoch": 0.02,
+ "grad_norm": 0.6363067536462007,
+ "learning_rate": 0.0008181818181818183,
+ "loss": 3.6345,
+ "step": 54
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.5224737046467328,
+ "learning_rate": 0.0008333333333333334,
+ "loss": 3.5754,
+ "step": 55
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.567462218285191,
+ "learning_rate": 0.0008484848484848485,
+ "loss": 3.4982,
+ "step": 56
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.5107192708394519,
+ "learning_rate": 0.0008636363636363636,
+ "loss": 3.5486,
+ "step": 57
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.4523602848121307,
+ "learning_rate": 0.0008787878787878789,
+ "loss": 3.5356,
+ "step": 58
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.46972095535862,
+ "learning_rate": 0.000893939393939394,
+ "loss": 3.3634,
+ "step": 59
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.43189992399796573,
+ "learning_rate": 0.0009090909090909091,
+ "loss": 3.3904,
+ "step": 60
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.45454507127246835,
+ "learning_rate": 0.0009242424242424242,
+ "loss": 3.5306,
+ "step": 61
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.4333834503064586,
+ "learning_rate": 0.0009393939393939394,
+ "loss": 3.464,
+ "step": 62
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.430370314226551,
+ "learning_rate": 0.0009545454545454546,
+ "loss": 3.5201,
+ "step": 63
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.4070857805816067,
+ "learning_rate": 0.0009696969696969698,
+ "loss": 3.401,
+ "step": 64
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.3902441892380618,
+ "learning_rate": 0.000984848484848485,
+ "loss": 3.388,
+ "step": 65
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.36684798610976865,
+ "learning_rate": 0.001,
+ "loss": 3.3986,
+ "step": 66
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.3748246948706842,
+ "learning_rate": 0.0009999994478847943,
+ "loss": 3.4062,
+ "step": 67
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.38689305793143314,
+ "learning_rate": 0.0009999977915403962,
+ "loss": 3.4211,
+ "step": 68
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.36836414708887383,
+ "learning_rate": 0.0009999950309704639,
+ "loss": 3.4363,
+ "step": 69
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.3842648842329544,
+ "learning_rate": 0.000999991166181094,
+ "loss": 3.4129,
+ "step": 70
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.4139438816846334,
+ "learning_rate": 0.0009999861971808216,
+ "loss": 3.3786,
+ "step": 71
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.3655164012498137,
+ "learning_rate": 0.0009999801239806208,
+ "loss": 3.3682,
+ "step": 72
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.31760042009982875,
+ "learning_rate": 0.0009999729465939035,
+ "loss": 3.2977,
+ "step": 73
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.37437452002795735,
+ "learning_rate": 0.0009999646650365212,
+ "loss": 3.331,
+ "step": 74
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.38632961474556765,
+ "learning_rate": 0.0009999552793267634,
+ "loss": 3.2532,
+ "step": 75
+ },
+ {
+ "epoch": 0.03,
+ "grad_norm": 0.350746962296021,
+ "learning_rate": 0.0009999447894853577,
+ "loss": 3.4019,
+ "step": 76
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.3330969138225619,
+ "learning_rate": 0.0009999331955354708,
+ "loss": 3.4745,
+ "step": 77
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.3450426649810674,
+ "learning_rate": 0.0009999204975027073,
+ "loss": 3.2767,
+ "step": 78
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.3414207348247738,
+ "learning_rate": 0.0009999066954151103,
+ "loss": 3.3253,
+ "step": 79
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.32860562975299573,
+ "learning_rate": 0.0009998917893031614,
+ "loss": 3.3314,
+ "step": 80
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.30675058337636746,
+ "learning_rate": 0.0009998757791997801,
+ "loss": 3.336,
+ "step": 81
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.3544647815926881,
+ "learning_rate": 0.0009998586651403238,
+ "loss": 3.3244,
+ "step": 82
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.3221893415119645,
+ "learning_rate": 0.0009998404471625885,
+ "loss": 3.2662,
+ "step": 83
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.3241632979754274,
+ "learning_rate": 0.0009998211253068078,
+ "loss": 3.4099,
+ "step": 84
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.3432838076210636,
+ "learning_rate": 0.0009998006996156535,
+ "loss": 3.2894,
+ "step": 85
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.32462687705170473,
+ "learning_rate": 0.0009997791701342347,
+ "loss": 3.3791,
+ "step": 86
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.3632567456647841,
+ "learning_rate": 0.0009997565369100983,
+ "loss": 3.3034,
+ "step": 87
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.30859191730940755,
+ "learning_rate": 0.0009997327999932291,
+ "loss": 3.289,
+ "step": 88
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.36227030975870295,
+ "learning_rate": 0.000999707959436049,
+ "loss": 3.3113,
+ "step": 89
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.31358727930263836,
+ "learning_rate": 0.0009996820152934176,
+ "loss": 3.2404,
+ "step": 90
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.3714043672867009,
+ "learning_rate": 0.000999654967622631,
+ "loss": 3.3233,
+ "step": 91
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.3365585575968304,
+ "learning_rate": 0.0009996268164834238,
+ "loss": 3.2952,
+ "step": 92
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.3209548476981024,
+ "learning_rate": 0.000999597561937966,
+ "loss": 3.2823,
+ "step": 93
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.44443599223092217,
+ "learning_rate": 0.0009995672040508656,
+ "loss": 3.2045,
+ "step": 94
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.37018119167061425,
+ "learning_rate": 0.0009995357428891662,
+ "loss": 3.2339,
+ "step": 95
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.3997932214917397,
+ "learning_rate": 0.0009995031785223491,
+ "loss": 3.2935,
+ "step": 96
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.3476568391682883,
+ "learning_rate": 0.000999469511022331,
+ "loss": 3.247,
+ "step": 97
+ },
+ {
+ "epoch": 0.04,
+ "grad_norm": 0.38919076677780207,
+ "learning_rate": 0.0009994347404634657,
+ "loss": 3.2571,
+ "step": 98
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.3348352205240213,
+ "learning_rate": 0.0009993988669225423,
+ "loss": 3.2614,
+ "step": 99
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.3644092409271942,
+ "learning_rate": 0.000999361890478786,
+ "loss": 3.2118,
+ "step": 100
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.36760257059072127,
+ "learning_rate": 0.0009993238112138583,
+ "loss": 3.2362,
+ "step": 101
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.3240184012151109,
+ "learning_rate": 0.0009992846292118554,
+ "loss": 3.2361,
+ "step": 102
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.3269159342841765,
+ "learning_rate": 0.000999244344559309,
+ "loss": 3.2569,
+ "step": 103
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.4242037595737302,
+ "learning_rate": 0.0009992029573451869,
+ "loss": 3.1982,
+ "step": 104
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.39979388743208516,
+ "learning_rate": 0.0009991604676608905,
+ "loss": 3.1784,
+ "step": 105
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.3481968028656099,
+ "learning_rate": 0.0009991168756002568,
+ "loss": 3.2969,
+ "step": 106
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.3313078664749429,
+ "learning_rate": 0.0009990721812595574,
+ "loss": 3.1431,
+ "step": 107
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.40622321803546924,
+ "learning_rate": 0.0009990263847374976,
+ "loss": 3.2725,
+ "step": 108
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.4072712536663883,
+ "learning_rate": 0.0009989794861352173,
+ "loss": 3.1953,
+ "step": 109
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.4257972215354896,
+ "learning_rate": 0.0009989314855562905,
+ "loss": 3.3183,
+ "step": 110
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.35612820816498447,
+ "learning_rate": 0.0009988823831067245,
+ "loss": 3.2339,
+ "step": 111
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.3780899191640233,
+ "learning_rate": 0.0009988321788949597,
+ "loss": 3.2658,
+ "step": 112
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.3585646473716321,
+ "learning_rate": 0.0009987808730318709,
+ "loss": 3.268,
+ "step": 113
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.3308050111655194,
+ "learning_rate": 0.0009987284656307644,
+ "loss": 3.2384,
+ "step": 114
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.38714683287773644,
+ "learning_rate": 0.0009986749568073802,
+ "loss": 3.2472,
+ "step": 115
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.3611136394905656,
+ "learning_rate": 0.0009986203466798905,
+ "loss": 3.2638,
+ "step": 116
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.4028315098952853,
+ "learning_rate": 0.0009985646353688996,
+ "loss": 3.2306,
+ "step": 117
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.37519788342795735,
+ "learning_rate": 0.0009985078229974437,
+ "loss": 3.2431,
+ "step": 118
+ },
+ {
+ "epoch": 0.05,
+ "grad_norm": 0.44015629329538253,
+ "learning_rate": 0.0009984499096909905,
+ "loss": 3.1234,
+ "step": 119
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.38116146787362765,
+ "learning_rate": 0.0009983908955774397,
+ "loss": 3.2417,
+ "step": 120
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.35083636950449976,
+ "learning_rate": 0.0009983307807871211,
+ "loss": 3.2439,
+ "step": 121
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.35768596507000633,
+ "learning_rate": 0.0009982695654527965,
+ "loss": 3.2685,
+ "step": 122
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.38409069670430745,
+ "learning_rate": 0.0009982072497096571,
+ "loss": 3.2577,
+ "step": 123
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.325030479031564,
+ "learning_rate": 0.000998143833695325,
+ "loss": 3.1419,
+ "step": 124
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.34186076486867273,
+ "learning_rate": 0.0009980793175498517,
+ "loss": 3.2149,
+ "step": 125
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.4305950083005351,
+ "learning_rate": 0.000998013701415719,
+ "loss": 3.2738,
+ "step": 126
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.3523796900377572,
+ "learning_rate": 0.0009979469854378372,
+ "loss": 3.247,
+ "step": 127
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.4138432322336785,
+ "learning_rate": 0.000997879169763546,
+ "loss": 3.2371,
+ "step": 128
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.4275854926997472,
+ "learning_rate": 0.000997810254542614,
+ "loss": 3.1366,
+ "step": 129
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.45822447879154155,
+ "learning_rate": 0.0009977402399272374,
+ "loss": 3.3312,
+ "step": 130
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.36552121871476567,
+ "learning_rate": 0.0009976691260720407,
+ "loss": 3.1981,
+ "step": 131
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.424912645043032,
+ "learning_rate": 0.0009975969131340763,
+ "loss": 3.2396,
+ "step": 132
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.4696508041944591,
+ "learning_rate": 0.0009975236012728236,
+ "loss": 3.2391,
+ "step": 133
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.37162035902748786,
+ "learning_rate": 0.0009974491906501886,
+ "loss": 3.1937,
+ "step": 134
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.3789688972753476,
+ "learning_rate": 0.0009973736814305049,
+ "loss": 3.183,
+ "step": 135
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.477371287222042,
+ "learning_rate": 0.0009972970737805312,
+ "loss": 3.2383,
+ "step": 136
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.4192238355119607,
+ "learning_rate": 0.0009972193678694525,
+ "loss": 3.2077,
+ "step": 137
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.4097754814835854,
+ "learning_rate": 0.0009971405638688794,
+ "loss": 3.1663,
+ "step": 138
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.4063648246679404,
+ "learning_rate": 0.0009970606619528475,
+ "loss": 3.1732,
+ "step": 139
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.4571912921126169,
+ "learning_rate": 0.000996979662297817,
+ "loss": 3.2438,
+ "step": 140
+ },
+ {
+ "epoch": 0.06,
+ "grad_norm": 0.4358624955959288,
+ "learning_rate": 0.0009968975650826721,
+ "loss": 3.3695,
+ "step": 141
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.40133556665542536,
+ "learning_rate": 0.000996814370488722,
+ "loss": 3.2588,
+ "step": 142
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.47379758831256114,
+ "learning_rate": 0.000996730078699698,
+ "loss": 3.2042,
+ "step": 143
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.4033536379862544,
+ "learning_rate": 0.0009966446899017558,
+ "loss": 3.2582,
+ "step": 144
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.38717560919840516,
+ "learning_rate": 0.0009965582042834728,
+ "loss": 3.2198,
+ "step": 145
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.3983717751362564,
+ "learning_rate": 0.0009964706220358492,
+ "loss": 3.13,
+ "step": 146
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.398321585295696,
+ "learning_rate": 0.000996381943352307,
+ "loss": 3.1809,
+ "step": 147
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.38508766872231004,
+ "learning_rate": 0.0009962921684286896,
+ "loss": 3.1459,
+ "step": 148
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.4196782794416121,
+ "learning_rate": 0.0009962012974632614,
+ "loss": 3.3174,
+ "step": 149
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.38312506901587917,
+ "learning_rate": 0.0009961093306567075,
+ "loss": 3.1609,
+ "step": 150
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.3834026463850755,
+ "learning_rate": 0.0009960162682121328,
+ "loss": 3.1727,
+ "step": 151
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.37212533331129016,
+ "learning_rate": 0.0009959221103350623,
+ "loss": 3.2118,
+ "step": 152
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.3585062598718845,
+ "learning_rate": 0.0009958268572334394,
+ "loss": 3.2316,
+ "step": 153
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.41745934958868924,
+ "learning_rate": 0.0009957305091176274,
+ "loss": 3.2481,
+ "step": 154
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.34402519353680516,
+ "learning_rate": 0.0009956330662004075,
+ "loss": 3.1339,
+ "step": 155
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.42000843418190076,
+ "learning_rate": 0.0009955345286969779,
+ "loss": 3.1269,
+ "step": 156
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.3405992400300467,
+ "learning_rate": 0.0009954348968249551,
+ "loss": 3.2557,
+ "step": 157
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.39744079187691733,
+ "learning_rate": 0.0009953341708043724,
+ "loss": 3.3068,
+ "step": 158
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.3404108657386337,
+ "learning_rate": 0.0009952323508576793,
+ "loss": 3.1693,
+ "step": 159
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.3950684579015263,
+ "learning_rate": 0.0009951294372097406,
+ "loss": 3.1515,
+ "step": 160
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.4482362340996043,
+ "learning_rate": 0.0009950254300878378,
+ "loss": 3.1808,
+ "step": 161
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.38020677113644147,
+ "learning_rate": 0.000994920329721666,
+ "loss": 3.1741,
+ "step": 162
+ },
+ {
+ "epoch": 0.07,
+ "grad_norm": 0.3599945744609763,
+ "learning_rate": 0.0009948141363433356,
+ "loss": 3.1914,
+ "step": 163
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.40622969230815925,
+ "learning_rate": 0.00099470685018737,
+ "loss": 3.0915,
+ "step": 164
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.4285939608121229,
+ "learning_rate": 0.0009945984714907073,
+ "loss": 3.1998,
+ "step": 165
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.45402284588934233,
+ "learning_rate": 0.000994489000492697,
+ "loss": 3.125,
+ "step": 166
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.3504381721917374,
+ "learning_rate": 0.0009943784374351016,
+ "loss": 3.0695,
+ "step": 167
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.41291307737428556,
+ "learning_rate": 0.0009942667825620951,
+ "loss": 3.145,
+ "step": 168
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.4121843883779206,
+ "learning_rate": 0.0009941540361202634,
+ "loss": 3.154,
+ "step": 169
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.42824905618839176,
+ "learning_rate": 0.0009940401983586022,
+ "loss": 3.1346,
+ "step": 170
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.45579503995367526,
+ "learning_rate": 0.000993925269528518,
+ "loss": 3.1215,
+ "step": 171
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.4267455863227672,
+ "learning_rate": 0.0009938092498838265,
+ "loss": 3.1676,
+ "step": 172
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.47980485147618696,
+ "learning_rate": 0.0009936921396807524,
+ "loss": 3.1425,
+ "step": 173
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.40237071602296964,
+ "learning_rate": 0.0009935739391779292,
+ "loss": 3.1554,
+ "step": 174
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.43339561257680276,
+ "learning_rate": 0.000993454648636398,
+ "loss": 3.0667,
+ "step": 175
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.45262843009609055,
+ "learning_rate": 0.0009933342683196074,
+ "loss": 3.2084,
+ "step": 176
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.42919683421217986,
+ "learning_rate": 0.0009932127984934125,
+ "loss": 3.1305,
+ "step": 177
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.42736970339734914,
+ "learning_rate": 0.0009930902394260745,
+ "loss": 3.1811,
+ "step": 178
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.47906720240389955,
+ "learning_rate": 0.0009929665913882607,
+ "loss": 3.1476,
+ "step": 179
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.45727421220655834,
+ "learning_rate": 0.0009928418546530425,
+ "loss": 3.1152,
+ "step": 180
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.3964710652365098,
+ "learning_rate": 0.0009927160294958964,
+ "loss": 3.1728,
+ "step": 181
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.43066772330863295,
+ "learning_rate": 0.000992589116194702,
+ "loss": 3.0938,
+ "step": 182
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.4278315161148297,
+ "learning_rate": 0.000992461115029743,
+ "loss": 3.2027,
+ "step": 183
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.4165564813405439,
+ "learning_rate": 0.000992332026283704,
+ "loss": 3.1369,
+ "step": 184
+ },
+ {
+ "epoch": 0.08,
+ "grad_norm": 0.44490859123358745,
+ "learning_rate": 0.0009922018502416736,
+ "loss": 3.1791,
+ "step": 185
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.4297239385990805,
+ "learning_rate": 0.0009920705871911395,
+ "loss": 3.0978,
+ "step": 186
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.4164329344297113,
+ "learning_rate": 0.0009919382374219915,
+ "loss": 3.0626,
+ "step": 187
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.4240943922812887,
+ "learning_rate": 0.0009918048012265187,
+ "loss": 3.1073,
+ "step": 188
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.37505297434649765,
+ "learning_rate": 0.0009916702788994097,
+ "loss": 3.0991,
+ "step": 189
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.39210701995667224,
+ "learning_rate": 0.0009915346707377519,
+ "loss": 3.0966,
+ "step": 190
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.38874774590113353,
+ "learning_rate": 0.0009913979770410305,
+ "loss": 3.0343,
+ "step": 191
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.4378272803448782,
+ "learning_rate": 0.0009912601981111285,
+ "loss": 3.1417,
+ "step": 192
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.41803064162729764,
+ "learning_rate": 0.0009911213342523248,
+ "loss": 3.141,
+ "step": 193
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.38800768847334605,
+ "learning_rate": 0.000990981385771295,
+ "loss": 3.1103,
+ "step": 194
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.4158892914115033,
+ "learning_rate": 0.00099084035297711,
+ "loss": 3.2183,
+ "step": 195
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.43483103388982164,
+ "learning_rate": 0.000990698236181235,
+ "loss": 3.1441,
+ "step": 196
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.400079061193393,
+ "learning_rate": 0.0009905550356975293,
+ "loss": 3.134,
+ "step": 197
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.4373421005675195,
+ "learning_rate": 0.0009904107518422457,
+ "loss": 3.08,
+ "step": 198
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.4658531688683174,
+ "learning_rate": 0.0009902653849340295,
+ "loss": 3.1103,
+ "step": 199
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.5014574119656803,
+ "learning_rate": 0.0009901189352939177,
+ "loss": 3.2289,
+ "step": 200
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.4554518699498125,
+ "learning_rate": 0.0009899714032453387,
+ "loss": 3.2423,
+ "step": 201
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.4694856298039221,
+ "learning_rate": 0.000989822789114111,
+ "loss": 3.148,
+ "step": 202
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.41830359322807736,
+ "learning_rate": 0.0009896730932284434,
+ "loss": 3.1455,
+ "step": 203
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.44864494604252647,
+ "learning_rate": 0.0009895223159189332,
+ "loss": 3.1112,
+ "step": 204
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.4187237689782655,
+ "learning_rate": 0.0009893704575185663,
+ "loss": 3.215,
+ "step": 205
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.38269763029338416,
+ "learning_rate": 0.000989217518362716,
+ "loss": 3.1615,
+ "step": 206
+ },
+ {
+ "epoch": 0.09,
+ "grad_norm": 0.4429232685444736,
+ "learning_rate": 0.0009890634987891425,
+ "loss": 3.1956,
+ "step": 207
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.5060709084581992,
+ "learning_rate": 0.0009889083991379917,
+ "loss": 3.1378,
+ "step": 208
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.4049674656253958,
+ "learning_rate": 0.0009887522197517954,
+ "loss": 3.0508,
+ "step": 209
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.40092360523144455,
+ "learning_rate": 0.0009885949609754693,
+ "loss": 3.1287,
+ "step": 210
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.508749976861702,
+ "learning_rate": 0.000988436623156314,
+ "loss": 3.1596,
+ "step": 211
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.45643647222065215,
+ "learning_rate": 0.0009882772066440114,
+ "loss": 3.1289,
+ "step": 212
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.39709749740612094,
+ "learning_rate": 0.0009881167117906276,
+ "loss": 3.1457,
+ "step": 213
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.5008607848796507,
+ "learning_rate": 0.0009879551389506084,
+ "loss": 3.1286,
+ "step": 214
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.4718674449530359,
+ "learning_rate": 0.0009877924884807814,
+ "loss": 3.0666,
+ "step": 215
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.45993404115378045,
+ "learning_rate": 0.000987628760740354,
+ "loss": 3.0883,
+ "step": 216
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.41001673991185883,
+ "learning_rate": 0.0009874639560909118,
+ "loss": 3.1061,
+ "step": 217
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.45676599715528154,
+ "learning_rate": 0.0009872980748964202,
+ "loss": 3.0723,
+ "step": 218
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.4774796859660166,
+ "learning_rate": 0.000987131117523221,
+ "loss": 3.1656,
+ "step": 219
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.39048001670056587,
+ "learning_rate": 0.000986963084340033,
+ "loss": 3.1426,
+ "step": 220
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.4231483499082633,
+ "learning_rate": 0.0009867939757179508,
+ "loss": 3.1827,
+ "step": 221
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.4727524817393217,
+ "learning_rate": 0.0009866237920304443,
+ "loss": 3.106,
+ "step": 222
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.4194575418604502,
+ "learning_rate": 0.0009864525336533577,
+ "loss": 3.1018,
+ "step": 223
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.5006044290698525,
+ "learning_rate": 0.000986280200964908,
+ "loss": 3.0744,
+ "step": 224
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.4401855145250986,
+ "learning_rate": 0.0009861067943456856,
+ "loss": 3.08,
+ "step": 225
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.44489706477495233,
+ "learning_rate": 0.000985932314178652,
+ "loss": 3.103,
+ "step": 226
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.40791050146457725,
+ "learning_rate": 0.00098575676084914,
+ "loss": 3.0859,
+ "step": 227
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 0.42377460089817603,
+ "learning_rate": 0.0009855801347448518,
+ "loss": 3.0935,
+ "step": 228
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.45592980379171116,
+ "learning_rate": 0.0009854024362558596,
+ "loss": 3.1125,
+ "step": 229
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.4118101691213073,
+ "learning_rate": 0.0009852236657746035,
+ "loss": 3.1589,
+ "step": 230
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.5272881338571506,
+ "learning_rate": 0.0009850438236958911,
+ "loss": 3.0957,
+ "step": 231
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.4490047956941026,
+ "learning_rate": 0.0009848629104168966,
+ "loss": 3.1208,
+ "step": 232
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.43782874471630884,
+ "learning_rate": 0.00098468092633716,
+ "loss": 3.0995,
+ "step": 233
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.48116080637997155,
+ "learning_rate": 0.0009844978718585855,
+ "loss": 3.0628,
+ "step": 234
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.542738686660602,
+ "learning_rate": 0.0009843137473854423,
+ "loss": 3.0773,
+ "step": 235
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.5344688972610916,
+ "learning_rate": 0.000984128553324362,
+ "loss": 3.1078,
+ "step": 236
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.45444094037694366,
+ "learning_rate": 0.0009839422900843383,
+ "loss": 3.0386,
+ "step": 237
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.493470439418435,
+ "learning_rate": 0.0009837549580767261,
+ "loss": 3.0623,
+ "step": 238
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.5418050329289771,
+ "learning_rate": 0.0009835665577152411,
+ "loss": 3.0467,
+ "step": 239
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.5055816469702586,
+ "learning_rate": 0.000983377089415958,
+ "loss": 3.1215,
+ "step": 240
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.4359793703137916,
+ "learning_rate": 0.0009831865535973102,
+ "loss": 3.097,
+ "step": 241
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.5920888473542507,
+ "learning_rate": 0.0009829949506800885,
+ "loss": 3.1075,
+ "step": 242
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.5313870573864672,
+ "learning_rate": 0.0009828022810874405,
+ "loss": 3.1538,
+ "step": 243
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.49934752960555806,
+ "learning_rate": 0.0009826085452448693,
+ "loss": 3.1054,
+ "step": 244
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.43099905853553383,
+ "learning_rate": 0.000982413743580233,
+ "loss": 3.0721,
+ "step": 245
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.43121090914085797,
+ "learning_rate": 0.0009822178765237436,
+ "loss": 3.0757,
+ "step": 246
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.481990293258324,
+ "learning_rate": 0.0009820209445079654,
+ "loss": 3.096,
+ "step": 247
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.5061323287985197,
+ "learning_rate": 0.0009818229479678158,
+ "loss": 3.1012,
+ "step": 248
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.4785333831749571,
+ "learning_rate": 0.0009816238873405615,
+ "loss": 3.1351,
+ "step": 249
+ },
+ {
+ "epoch": 0.11,
+ "grad_norm": 0.46682488420438106,
+ "learning_rate": 0.0009814237630658207,
+ "loss": 3.1114,
+ "step": 250
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.5072455535157702,
+ "learning_rate": 0.00098122257558556,
+ "loss": 3.2007,
+ "step": 251
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.40041882998300415,
+ "learning_rate": 0.0009810203253440937,
+ "loss": 3.123,
+ "step": 252
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.4496159830126479,
+ "learning_rate": 0.0009808170127880837,
+ "loss": 3.1186,
+ "step": 253
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.484003873950594,
+ "learning_rate": 0.000980612638366538,
+ "loss": 3.0687,
+ "step": 254
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.42442397592498443,
+ "learning_rate": 0.0009804072025308096,
+ "loss": 3.168,
+ "step": 255
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.46377527750237707,
+ "learning_rate": 0.000980200705734595,
+ "loss": 3.1004,
+ "step": 256
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.4446499437423701,
+ "learning_rate": 0.0009799931484339344,
+ "loss": 3.0519,
+ "step": 257
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.5041640794359902,
+ "learning_rate": 0.0009797845310872103,
+ "loss": 3.1724,
+ "step": 258
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.47835857384331953,
+ "learning_rate": 0.0009795748541551457,
+ "loss": 3.1003,
+ "step": 259
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.434898596894168,
+ "learning_rate": 0.000979364118100804,
+ "loss": 3.1418,
+ "step": 260
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.4318498174627635,
+ "learning_rate": 0.0009791523233895875,
+ "loss": 3.0687,
+ "step": 261
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.4318970309259732,
+ "learning_rate": 0.0009789394704892364,
+ "loss": 3.0405,
+ "step": 262
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.476703679735708,
+ "learning_rate": 0.0009787255598698282,
+ "loss": 3.1871,
+ "step": 263
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.41057241553203194,
+ "learning_rate": 0.0009785105920037758,
+ "loss": 3.0853,
+ "step": 264
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.4835283589151547,
+ "learning_rate": 0.0009782945673658275,
+ "loss": 3.0942,
+ "step": 265
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.5385887655656103,
+ "learning_rate": 0.0009780774864330654,
+ "loss": 3.1635,
+ "step": 266
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.4195330117826838,
+ "learning_rate": 0.000977859349684904,
+ "loss": 3.1021,
+ "step": 267
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.43818416797584986,
+ "learning_rate": 0.00097764015760309,
+ "loss": 3.0344,
+ "step": 268
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.5162035278358195,
+ "learning_rate": 0.0009774199106717004,
+ "loss": 3.1493,
+ "step": 269
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.4567388393472109,
+ "learning_rate": 0.0009771986093771417,
+ "loss": 3.1545,
+ "step": 270
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.49344034923911934,
+ "learning_rate": 0.0009769762542081496,
+ "loss": 3.1004,
+ "step": 271
+ },
+ {
+ "epoch": 0.12,
+ "grad_norm": 0.5034562936667282,
+ "learning_rate": 0.000976752845655786,
+ "loss": 3.0405,
+ "step": 272
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.44110073582390935,
+ "learning_rate": 0.0009765283842134411,
+ "loss": 3.0355,
+ "step": 273
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.3962732003629882,
+ "learning_rate": 0.0009763028703768282,
+ "loss": 3.0247,
+ "step": 274
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.4200951475097383,
+ "learning_rate": 0.0009760763046439862,
+ "loss": 2.9413,
+ "step": 275
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.4394458881335293,
+ "learning_rate": 0.0009758486875152766,
+ "loss": 3.0569,
+ "step": 276
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.4576215314085917,
+ "learning_rate": 0.0009756200194933829,
+ "loss": 3.0147,
+ "step": 277
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.4543258327166807,
+ "learning_rate": 0.0009753903010833094,
+ "loss": 3.0853,
+ "step": 278
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.5150532889800652,
+ "learning_rate": 0.0009751595327923803,
+ "loss": 3.0914,
+ "step": 279
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.4791776603488495,
+ "learning_rate": 0.0009749277151302382,
+ "loss": 3.0906,
+ "step": 280
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.5305733772036967,
+ "learning_rate": 0.0009746948486088435,
+ "loss": 3.1035,
+ "step": 281
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.4976368794364665,
+ "learning_rate": 0.0009744609337424727,
+ "loss": 3.0905,
+ "step": 282
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.5990072746458743,
+ "learning_rate": 0.0009742259710477177,
+ "loss": 3.0403,
+ "step": 283
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.457180281105584,
+ "learning_rate": 0.0009739899610434841,
+ "loss": 3.0107,
+ "step": 284
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.5691034753532734,
+ "learning_rate": 0.0009737529042509913,
+ "loss": 3.1911,
+ "step": 285
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.43104504062352855,
+ "learning_rate": 0.0009735148011937693,
+ "loss": 3.0662,
+ "step": 286
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.4464040256436296,
+ "learning_rate": 0.00097327565239766,
+ "loss": 3.0768,
+ "step": 287
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.48121574776448583,
+ "learning_rate": 0.0009730354583908136,
+ "loss": 3.0723,
+ "step": 288
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.4492604734314535,
+ "learning_rate": 0.0009727942197036895,
+ "loss": 3.1109,
+ "step": 289
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.4751256904836382,
+ "learning_rate": 0.0009725519368690539,
+ "loss": 3.0962,
+ "step": 290
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.48966108126015545,
+ "learning_rate": 0.0009723086104219787,
+ "loss": 3.0486,
+ "step": 291
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.4415086536993296,
+ "learning_rate": 0.0009720642408998409,
+ "loss": 3.0539,
+ "step": 292
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.4955858288652964,
+ "learning_rate": 0.0009718188288423211,
+ "loss": 2.9687,
+ "step": 293
+ },
+ {
+ "epoch": 0.13,
+ "grad_norm": 0.5635385639569106,
+ "learning_rate": 0.0009715723747914022,
+ "loss": 3.0044,
+ "step": 294
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.5040733177428176,
+ "learning_rate": 0.0009713248792913685,
+ "loss": 2.9573,
+ "step": 295
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.48763789572095345,
+ "learning_rate": 0.0009710763428888037,
+ "loss": 3.0118,
+ "step": 296
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.5089192549992846,
+ "learning_rate": 0.0009708267661325909,
+ "loss": 3.0716,
+ "step": 297
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.5704575486009696,
+ "learning_rate": 0.0009705761495739107,
+ "loss": 3.074,
+ "step": 298
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.4982231702246244,
+ "learning_rate": 0.0009703244937662399,
+ "loss": 2.9961,
+ "step": 299
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.47686210036848165,
+ "learning_rate": 0.0009700717992653505,
+ "loss": 3.0401,
+ "step": 300
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.5043821939077378,
+ "learning_rate": 0.0009698180666293083,
+ "loss": 3.0293,
+ "step": 301
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.51108815454753,
+ "learning_rate": 0.000969563296418472,
+ "loss": 3.0199,
+ "step": 302
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.48029497523757336,
+ "learning_rate": 0.0009693074891954914,
+ "loss": 3.1206,
+ "step": 303
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.48929534763175214,
+ "learning_rate": 0.0009690506455253072,
+ "loss": 3.1588,
+ "step": 304
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.4651349084870595,
+ "learning_rate": 0.0009687927659751481,
+ "loss": 3.0654,
+ "step": 305
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.4412143183011205,
+ "learning_rate": 0.0009685338511145312,
+ "loss": 3.0085,
+ "step": 306
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.5047303894510224,
+ "learning_rate": 0.0009682739015152598,
+ "loss": 2.9846,
+ "step": 307
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.4439234300306221,
+ "learning_rate": 0.0009680129177514226,
+ "loss": 3.0199,
+ "step": 308
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.5053855594822741,
+ "learning_rate": 0.0009677509003993915,
+ "loss": 2.9622,
+ "step": 309
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.4714928209978853,
+ "learning_rate": 0.0009674878500378221,
+ "loss": 3.0119,
+ "step": 310
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.5206857170514964,
+ "learning_rate": 0.0009672237672476505,
+ "loss": 3.0394,
+ "step": 311
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.5157904091378976,
+ "learning_rate": 0.0009669586526120935,
+ "loss": 3.1681,
+ "step": 312
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.5589361778593485,
+ "learning_rate": 0.0009666925067166459,
+ "loss": 3.0563,
+ "step": 313
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.4883117092623467,
+ "learning_rate": 0.000966425330149081,
+ "loss": 2.8973,
+ "step": 314
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.4655464739020218,
+ "learning_rate": 0.0009661571234994475,
+ "loss": 2.9918,
+ "step": 315
+ },
+ {
+ "epoch": 0.14,
+ "grad_norm": 0.48524124973975213,
+ "learning_rate": 0.0009658878873600691,
+ "loss": 3.0725,
+ "step": 316
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.4600346257725194,
+ "learning_rate": 0.0009656176223255438,
+ "loss": 3.0072,
+ "step": 317
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.4899746598711084,
+ "learning_rate": 0.000965346328992741,
+ "loss": 3.0979,
+ "step": 318
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.4682045574289672,
+ "learning_rate": 0.0009650740079608014,
+ "loss": 3.0828,
+ "step": 319
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.46047176594216327,
+ "learning_rate": 0.0009648006598311353,
+ "loss": 3.123,
+ "step": 320
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.4388428568052404,
+ "learning_rate": 0.0009645262852074214,
+ "loss": 3.0697,
+ "step": 321
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.4985649784084978,
+ "learning_rate": 0.0009642508846956053,
+ "loss": 2.916,
+ "step": 322
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.4865522533091172,
+ "learning_rate": 0.0009639744589038983,
+ "loss": 3.0701,
+ "step": 323
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.4701109428139839,
+ "learning_rate": 0.0009636970084427759,
+ "loss": 2.9631,
+ "step": 324
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.4687897608554863,
+ "learning_rate": 0.0009634185339249766,
+ "loss": 3.0297,
+ "step": 325
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.48690208768786464,
+ "learning_rate": 0.0009631390359655003,
+ "loss": 3.0389,
+ "step": 326
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.4822703859374579,
+ "learning_rate": 0.0009628585151816074,
+ "loss": 3.0992,
+ "step": 327
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.49027420014184586,
+ "learning_rate": 0.0009625769721928172,
+ "loss": 3.1263,
+ "step": 328
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.48041370623938345,
+ "learning_rate": 0.0009622944076209061,
+ "loss": 3.0158,
+ "step": 329
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.44305260781128986,
+ "learning_rate": 0.0009620108220899071,
+ "loss": 3.0836,
+ "step": 330
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.49600057755910937,
+ "learning_rate": 0.0009617262162261075,
+ "loss": 3.1382,
+ "step": 331
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.47018596626971704,
+ "learning_rate": 0.0009614405906580486,
+ "loss": 3.0862,
+ "step": 332
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.487893258555253,
+ "learning_rate": 0.000961153946016523,
+ "loss": 3.0112,
+ "step": 333
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.5346228381478916,
+ "learning_rate": 0.000960866282934574,
+ "loss": 2.9964,
+ "step": 334
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.5057431347484991,
+ "learning_rate": 0.0009605776020474945,
+ "loss": 3.1034,
+ "step": 335
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.4982220444519792,
+ "learning_rate": 0.0009602879039928249,
+ "loss": 3.0489,
+ "step": 336
+ },
+ {
+ "epoch": 0.15,
+ "grad_norm": 0.5537396879593695,
+ "learning_rate": 0.0009599971894103521,
+ "loss": 2.9787,
+ "step": 337
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.40627706080294745,
+ "learning_rate": 0.0009597054589421077,
+ "loss": 3.0086,
+ "step": 338
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.4879160651165343,
+ "learning_rate": 0.0009594127132323669,
+ "loss": 3.0945,
+ "step": 339
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.46797694459842665,
+ "learning_rate": 0.0009591189529276474,
+ "loss": 3.0399,
+ "step": 340
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.4696602589181308,
+ "learning_rate": 0.0009588241786767072,
+ "loss": 3.0719,
+ "step": 341
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.4843098525339636,
+ "learning_rate": 0.0009585283911305436,
+ "loss": 3.0296,
+ "step": 342
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.4798243772095176,
+ "learning_rate": 0.000958231590942392,
+ "loss": 3.098,
+ "step": 343
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.498609086823684,
+ "learning_rate": 0.0009579337787677238,
+ "loss": 3.0521,
+ "step": 344
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.4765578621322359,
+ "learning_rate": 0.0009576349552642456,
+ "loss": 2.9481,
+ "step": 345
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.4454407074455648,
+ "learning_rate": 0.0009573351210918975,
+ "loss": 3.0238,
+ "step": 346
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.4988400567288936,
+ "learning_rate": 0.0009570342769128514,
+ "loss": 2.9955,
+ "step": 347
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.43168145214126674,
+ "learning_rate": 0.0009567324233915099,
+ "loss": 3.1314,
+ "step": 348
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.5143204791193541,
+ "learning_rate": 0.0009564295611945047,
+ "loss": 2.9786,
+ "step": 349
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.4048829632590379,
+ "learning_rate": 0.000956125690990695,
+ "loss": 3.0386,
+ "step": 350
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.606095112929325,
+ "learning_rate": 0.0009558208134511665,
+ "loss": 3.0629,
+ "step": 351
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.4550869204413142,
+ "learning_rate": 0.0009555149292492289,
+ "loss": 3.0376,
+ "step": 352
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.47566921266912415,
+ "learning_rate": 0.0009552080390604159,
+ "loss": 3.1736,
+ "step": 353
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.55080372234207,
+ "learning_rate": 0.0009549001435624823,
+ "loss": 2.9837,
+ "step": 354
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.4509504477067053,
+ "learning_rate": 0.0009545912434354029,
+ "loss": 3.0398,
+ "step": 355
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.5338822513329282,
+ "learning_rate": 0.0009542813393613721,
+ "loss": 2.9519,
+ "step": 356
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.5441582279188419,
+ "learning_rate": 0.0009539704320248006,
+ "loss": 3.0605,
+ "step": 357
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.509224581576941,
+ "learning_rate": 0.0009536585221123151,
+ "loss": 2.9538,
+ "step": 358
+ },
+ {
+ "epoch": 0.16,
+ "grad_norm": 0.4377590413705248,
+ "learning_rate": 0.0009533456103127565,
+ "loss": 2.9679,
+ "step": 359
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.4429655378689488,
+ "learning_rate": 0.000953031697317178,
+ "loss": 3.0538,
+ "step": 360
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.5679104034208167,
+ "learning_rate": 0.0009527167838188445,
+ "loss": 2.9194,
+ "step": 361
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.47529767192616873,
+ "learning_rate": 0.0009524008705132299,
+ "loss": 2.9776,
+ "step": 362
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.5534471815840221,
+ "learning_rate": 0.0009520839580980166,
+ "loss": 3.0446,
+ "step": 363
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.5286943923618481,
+ "learning_rate": 0.0009517660472730929,
+ "loss": 2.9446,
+ "step": 364
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.47775327960168995,
+ "learning_rate": 0.0009514471387405526,
+ "loss": 2.9894,
+ "step": 365
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.5809941342275982,
+ "learning_rate": 0.0009511272332046926,
+ "loss": 2.9459,
+ "step": 366
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.5070919461332507,
+ "learning_rate": 0.0009508063313720119,
+ "loss": 3.0021,
+ "step": 367
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.48152230628043413,
+ "learning_rate": 0.0009504844339512095,
+ "loss": 3.0805,
+ "step": 368
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.5054527516998113,
+ "learning_rate": 0.0009501615416531835,
+ "loss": 3.0184,
+ "step": 369
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.5146853715938834,
+ "learning_rate": 0.0009498376551910285,
+ "loss": 3.0452,
+ "step": 370
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.43134805674947385,
+ "learning_rate": 0.0009495127752800352,
+ "loss": 3.0313,
+ "step": 371
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.5356726184825262,
+ "learning_rate": 0.0009491869026376882,
+ "loss": 3.0163,
+ "step": 372
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.4899367322241046,
+ "learning_rate": 0.0009488600379836648,
+ "loss": 3.1271,
+ "step": 373
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.4726827829894805,
+ "learning_rate": 0.0009485321820398321,
+ "loss": 3.0836,
+ "step": 374
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.4663226272027926,
+ "learning_rate": 0.0009482033355302475,
+ "loss": 3.0792,
+ "step": 375
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.6016945641109074,
+ "learning_rate": 0.0009478734991811556,
+ "loss": 2.9813,
+ "step": 376
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.4881397438118027,
+ "learning_rate": 0.0009475426737209871,
+ "loss": 3.0804,
+ "step": 377
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.5529403080632629,
+ "learning_rate": 0.000947210859880357,
+ "loss": 3.0023,
+ "step": 378
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.5875267291466769,
+ "learning_rate": 0.0009468780583920631,
+ "loss": 2.9256,
+ "step": 379
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.5394188300653993,
+ "learning_rate": 0.0009465442699910846,
+ "loss": 3.0018,
+ "step": 380
+ },
+ {
+ "epoch": 0.17,
+ "grad_norm": 0.5765294864804436,
+ "learning_rate": 0.0009462094954145801,
+ "loss": 2.997,
+ "step": 381
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.48345874381753895,
+ "learning_rate": 0.0009458737354018859,
+ "loss": 2.9406,
+ "step": 382
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.6035973913974746,
+ "learning_rate": 0.000945536990694515,
+ "loss": 3.0295,
+ "step": 383
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.5062688177104546,
+ "learning_rate": 0.0009451992620361551,
+ "loss": 3.0511,
+ "step": 384
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.5180825486609733,
+ "learning_rate": 0.0009448605501726664,
+ "loss": 3.0151,
+ "step": 385
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.6046632514050444,
+ "learning_rate": 0.000944520855852081,
+ "loss": 2.9617,
+ "step": 386
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.47843443440018185,
+ "learning_rate": 0.0009441801798246002,
+ "loss": 2.983,
+ "step": 387
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.4715912159681642,
+ "learning_rate": 0.0009438385228425939,
+ "loss": 3.0215,
+ "step": 388
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.6146005902208077,
+ "learning_rate": 0.0009434958856605982,
+ "loss": 3.0772,
+ "step": 389
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.4609760270508822,
+ "learning_rate": 0.0009431522690353137,
+ "loss": 2.928,
+ "step": 390
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.5329345662254492,
+ "learning_rate": 0.0009428076737256044,
+ "loss": 2.9899,
+ "step": 391
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.46676657691266404,
+ "learning_rate": 0.0009424621004924954,
+ "loss": 2.9449,
+ "step": 392
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.44187559540363164,
+ "learning_rate": 0.0009421155500991719,
+ "loss": 3.0253,
+ "step": 393
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.571003803057975,
+ "learning_rate": 0.0009417680233109767,
+ "loss": 3.0754,
+ "step": 394
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.5305659030984635,
+ "learning_rate": 0.000941419520895409,
+ "loss": 3.002,
+ "step": 395
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.5029704997484273,
+ "learning_rate": 0.0009410700436221229,
+ "loss": 3.0294,
+ "step": 396
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.4970159440525706,
+ "learning_rate": 0.0009407195922629252,
+ "loss": 2.9632,
+ "step": 397
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.4394418129773029,
+ "learning_rate": 0.000940368167591774,
+ "loss": 2.9151,
+ "step": 398
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.5395774624072547,
+ "learning_rate": 0.0009400157703847769,
+ "loss": 2.9488,
+ "step": 399
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.4660245997485381,
+ "learning_rate": 0.0009396624014201895,
+ "loss": 3.048,
+ "step": 400
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.47705779740329024,
+ "learning_rate": 0.000939308061478413,
+ "loss": 2.9699,
+ "step": 401
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.45402379450769487,
+ "learning_rate": 0.0009389527513419935,
+ "loss": 3.0008,
+ "step": 402
+ },
+ {
+ "epoch": 0.18,
+ "grad_norm": 0.47358943094409517,
+ "learning_rate": 0.0009385964717956195,
+ "loss": 2.8775,
+ "step": 403
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.6162999419541931,
+ "learning_rate": 0.0009382392236261201,
+ "loss": 3.0417,
+ "step": 404
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.4987551168026171,
+ "learning_rate": 0.0009378810076224644,
+ "loss": 2.9643,
+ "step": 405
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.548870078424536,
+ "learning_rate": 0.0009375218245757582,
+ "loss": 2.9958,
+ "step": 406
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.518639074187955,
+ "learning_rate": 0.0009371616752792432,
+ "loss": 2.9839,
+ "step": 407
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.5406094243266623,
+ "learning_rate": 0.0009368005605282949,
+ "loss": 2.9244,
+ "step": 408
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.46251386339698414,
+ "learning_rate": 0.0009364384811204212,
+ "loss": 2.9786,
+ "step": 409
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.4933066238767263,
+ "learning_rate": 0.00093607543785526,
+ "loss": 2.9733,
+ "step": 410
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.548651226793732,
+ "learning_rate": 0.0009357114315345787,
+ "loss": 2.9623,
+ "step": 411
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.4158226446837171,
+ "learning_rate": 0.0009353464629622705,
+ "loss": 2.8826,
+ "step": 412
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.4228662108018181,
+ "learning_rate": 0.0009349805329443544,
+ "loss": 2.9811,
+ "step": 413
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.5157418778471298,
+ "learning_rate": 0.0009346136422889724,
+ "loss": 3.0147,
+ "step": 414
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.46735735665049033,
+ "learning_rate": 0.0009342457918063882,
+ "loss": 2.943,
+ "step": 415
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.6376252592860742,
+ "learning_rate": 0.0009338769823089853,
+ "loss": 3.0437,
+ "step": 416
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.49325825946383267,
+ "learning_rate": 0.0009335072146112648,
+ "loss": 2.9208,
+ "step": 417
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.5596290314171709,
+ "learning_rate": 0.0009331364895298444,
+ "loss": 2.8705,
+ "step": 418
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.4884054625815647,
+ "learning_rate": 0.0009327648078834559,
+ "loss": 2.9132,
+ "step": 419
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.5496974839242864,
+ "learning_rate": 0.0009323921704929434,
+ "loss": 2.9552,
+ "step": 420
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.49132631497978096,
+ "learning_rate": 0.0009320185781812623,
+ "loss": 2.9904,
+ "step": 421
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.43657765190006026,
+ "learning_rate": 0.0009316440317734762,
+ "loss": 2.9211,
+ "step": 422
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.48661805476874975,
+ "learning_rate": 0.0009312685320967565,
+ "loss": 2.9624,
+ "step": 423
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.5135715364325427,
+ "learning_rate": 0.0009308920799803793,
+ "loss": 2.9397,
+ "step": 424
+ },
+ {
+ "epoch": 0.19,
+ "grad_norm": 0.5213564984984786,
+ "learning_rate": 0.0009305146762557246,
+ "loss": 2.9539,
+ "step": 425
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.45148383956790444,
+ "learning_rate": 0.0009301363217562736,
+ "loss": 2.9351,
+ "step": 426
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.5960017161681906,
+ "learning_rate": 0.0009297570173176074,
+ "loss": 2.9276,
+ "step": 427
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.511075499664819,
+ "learning_rate": 0.000929376763777405,
+ "loss": 2.9691,
+ "step": 428
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.5224780437181922,
+ "learning_rate": 0.0009289955619754413,
+ "loss": 3.0003,
+ "step": 429
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.5341936886872939,
+ "learning_rate": 0.0009286134127535859,
+ "loss": 2.925,
+ "step": 430
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.5729715396293762,
+ "learning_rate": 0.0009282303169558,
+ "loss": 2.9691,
+ "step": 431
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.5219776108137949,
+ "learning_rate": 0.0009278462754281359,
+ "loss": 2.9531,
+ "step": 432
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.5639021131909823,
+ "learning_rate": 0.0009274612890187342,
+ "loss": 3.0339,
+ "step": 433
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.423981207360482,
+ "learning_rate": 0.0009270753585778222,
+ "loss": 2.9041,
+ "step": 434
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.4879316860560613,
+ "learning_rate": 0.0009266884849577124,
+ "loss": 2.8857,
+ "step": 435
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.533230155055393,
+ "learning_rate": 0.0009263006690127998,
+ "loss": 3.0521,
+ "step": 436
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.5419517537515356,
+ "learning_rate": 0.0009259119115995609,
+ "loss": 2.9755,
+ "step": 437
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.49860727195367804,
+ "learning_rate": 0.0009255222135765511,
+ "loss": 2.9616,
+ "step": 438
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.539691726375911,
+ "learning_rate": 0.0009251315758044032,
+ "loss": 2.9118,
+ "step": 439
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.5233911902091123,
+ "learning_rate": 0.0009247399991458255,
+ "loss": 2.8851,
+ "step": 440
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.49625909044493643,
+ "learning_rate": 0.0009243474844655996,
+ "loss": 2.9513,
+ "step": 441
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.5674325585191178,
+ "learning_rate": 0.0009239540326305791,
+ "loss": 2.9316,
+ "step": 442
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.5620663517264144,
+ "learning_rate": 0.0009235596445096864,
+ "loss": 3.0614,
+ "step": 443
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.4770836891464476,
+ "learning_rate": 0.0009231643209739127,
+ "loss": 2.9604,
+ "step": 444
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.5883505727472255,
+ "learning_rate": 0.0009227680628963145,
+ "loss": 2.8912,
+ "step": 445
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 0.4960038185401185,
+ "learning_rate": 0.000922370871152012,
+ "loss": 2.9507,
+ "step": 446
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.4875655421509081,
+ "learning_rate": 0.0009219727466181877,
+ "loss": 2.9655,
+ "step": 447
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.5346241963009257,
+ "learning_rate": 0.0009215736901740841,
+ "loss": 2.9841,
+ "step": 448
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.5570461562419412,
+ "learning_rate": 0.0009211737027010016,
+ "loss": 2.9582,
+ "step": 449
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.5428447838624275,
+ "learning_rate": 0.0009207727850822971,
+ "loss": 2.9631,
+ "step": 450
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.5153435768317739,
+ "learning_rate": 0.0009203709382033814,
+ "loss": 2.9474,
+ "step": 451
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.5616424856636266,
+ "learning_rate": 0.0009199681629517173,
+ "loss": 2.9879,
+ "step": 452
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.5561590624389096,
+ "learning_rate": 0.0009195644602168184,
+ "loss": 2.8772,
+ "step": 453
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.5261545733785727,
+ "learning_rate": 0.0009191598308902464,
+ "loss": 2.9756,
+ "step": 454
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.503695859923239,
+ "learning_rate": 0.0009187542758656091,
+ "loss": 2.9651,
+ "step": 455
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.4611058162423113,
+ "learning_rate": 0.0009183477960385591,
+ "loss": 2.9308,
+ "step": 456
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.5110891052157471,
+ "learning_rate": 0.0009179403923067912,
+ "loss": 2.9121,
+ "step": 457
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.5572457689919895,
+ "learning_rate": 0.0009175320655700406,
+ "loss": 3.0012,
+ "step": 458
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.56575456099697,
+ "learning_rate": 0.0009171228167300805,
+ "loss": 2.9347,
+ "step": 459
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.6737909709752703,
+ "learning_rate": 0.0009167126466907215,
+ "loss": 2.9121,
+ "step": 460
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.48306566069640283,
+ "learning_rate": 0.0009163015563578074,
+ "loss": 2.8241,
+ "step": 461
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.551315896441015,
+ "learning_rate": 0.0009158895466392158,
+ "loss": 2.9989,
+ "step": 462
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.5239093149442633,
+ "learning_rate": 0.0009154766184448535,
+ "loss": 2.9659,
+ "step": 463
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.5679546114613675,
+ "learning_rate": 0.0009150627726866568,
+ "loss": 2.964,
+ "step": 464
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.5776425199961028,
+ "learning_rate": 0.000914648010278587,
+ "loss": 2.9475,
+ "step": 465
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.5897599367569054,
+ "learning_rate": 0.0009142323321366315,
+ "loss": 2.8971,
+ "step": 466
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.5177650589375873,
+ "learning_rate": 0.0009138157391787986,
+ "loss": 2.9064,
+ "step": 467
+ },
+ {
+ "epoch": 0.21,
+ "grad_norm": 0.4374348835989601,
+ "learning_rate": 0.0009133982323251177,
+ "loss": 2.9857,
+ "step": 468
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.6088452751788686,
+ "learning_rate": 0.0009129798124976365,
+ "loss": 2.9426,
+ "step": 469
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.5926389954790133,
+ "learning_rate": 0.0009125604806204187,
+ "loss": 2.8814,
+ "step": 470
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.4672703058624501,
+ "learning_rate": 0.0009121402376195421,
+ "loss": 2.855,
+ "step": 471
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.5610911037218453,
+ "learning_rate": 0.0009117190844230972,
+ "loss": 2.999,
+ "step": 472
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.5097476947768206,
+ "learning_rate": 0.0009112970219611841,
+ "loss": 2.9025,
+ "step": 473
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.5063098850604952,
+ "learning_rate": 0.0009108740511659115,
+ "loss": 2.9227,
+ "step": 474
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.5370771029988459,
+ "learning_rate": 0.0009104501729713935,
+ "loss": 2.941,
+ "step": 475
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.5959185849794054,
+ "learning_rate": 0.0009100253883137488,
+ "loss": 2.994,
+ "step": 476
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.4937170152698863,
+ "learning_rate": 0.0009095996981310974,
+ "loss": 2.9104,
+ "step": 477
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.47650997879531587,
+ "learning_rate": 0.0009091731033635596,
+ "loss": 2.9115,
+ "step": 478
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.44011750443479547,
+ "learning_rate": 0.0009087456049532529,
+ "loss": 2.9221,
+ "step": 479
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.5322455264120257,
+ "learning_rate": 0.0009083172038442914,
+ "loss": 2.9359,
+ "step": 480
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.5183598599185936,
+ "learning_rate": 0.0009078879009827817,
+ "loss": 2.9428,
+ "step": 481
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.4618647837081506,
+ "learning_rate": 0.0009074576973168223,
+ "loss": 2.9797,
+ "step": 482
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.5147828479886168,
+ "learning_rate": 0.0009070265937965015,
+ "loss": 2.9793,
+ "step": 483
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.4684521134489715,
+ "learning_rate": 0.0009065945913738942,
+ "loss": 2.9277,
+ "step": 484
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.5429982829901041,
+ "learning_rate": 0.0009061616910030609,
+ "loss": 2.9257,
+ "step": 485
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.45291350473876096,
+ "learning_rate": 0.0009057278936400453,
+ "loss": 2.8675,
+ "step": 486
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.49644894337956574,
+ "learning_rate": 0.0009052932002428715,
+ "loss": 2.9112,
+ "step": 487
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.4798836530615159,
+ "learning_rate": 0.0009048576117715435,
+ "loss": 2.9464,
+ "step": 488
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.4981773949039495,
+ "learning_rate": 0.0009044211291880407,
+ "loss": 2.9835,
+ "step": 489
+ },
+ {
+ "epoch": 0.22,
+ "grad_norm": 0.5600084614536893,
+ "learning_rate": 0.000903983753456318,
+ "loss": 2.9778,
+ "step": 490
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.5101647323119486,
+ "learning_rate": 0.0009035454855423026,
+ "loss": 2.8529,
+ "step": 491
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.5780686512312957,
+ "learning_rate": 0.0009031063264138922,
+ "loss": 2.9457,
+ "step": 492
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.4913676655656347,
+ "learning_rate": 0.0009026662770409522,
+ "loss": 2.9559,
+ "step": 493
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.4990109684403697,
+ "learning_rate": 0.0009022253383953147,
+ "loss": 2.9019,
+ "step": 494
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.5877748599338133,
+ "learning_rate": 0.0009017835114507753,
+ "loss": 2.9429,
+ "step": 495
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.5177379832570983,
+ "learning_rate": 0.0009013407971830914,
+ "loss": 2.985,
+ "step": 496
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.5932959048307527,
+ "learning_rate": 0.0009008971965699801,
+ "loss": 2.8916,
+ "step": 497
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.54932480626174,
+ "learning_rate": 0.0009004527105911163,
+ "loss": 2.9476,
+ "step": 498
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.4845103221930796,
+ "learning_rate": 0.0009000073402281295,
+ "loss": 2.8539,
+ "step": 499
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.5472696408301263,
+ "learning_rate": 0.0008995610864646028,
+ "loss": 2.9767,
+ "step": 500
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.515946712565081,
+ "learning_rate": 0.0008991139502860703,
+ "loss": 2.9315,
+ "step": 501
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.46194685756295556,
+ "learning_rate": 0.0008986659326800146,
+ "loss": 2.9608,
+ "step": 502
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.4560765574370238,
+ "learning_rate": 0.0008982170346358651,
+ "loss": 2.8346,
+ "step": 503
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.5391918532572942,
+ "learning_rate": 0.0008977672571449956,
+ "loss": 2.9439,
+ "step": 504
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.4814760570041407,
+ "learning_rate": 0.0008973166012007217,
+ "loss": 2.827,
+ "step": 505
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.5629457114946768,
+ "learning_rate": 0.0008968650677982998,
+ "loss": 2.9719,
+ "step": 506
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.5593845257197965,
+ "learning_rate": 0.0008964126579349236,
+ "loss": 2.8864,
+ "step": 507
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.46519104813690254,
+ "learning_rate": 0.0008959593726097226,
+ "loss": 2.8429,
+ "step": 508
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.4840023861659385,
+ "learning_rate": 0.0008955052128237596,
+ "loss": 2.9331,
+ "step": 509
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.5558219699545366,
+ "learning_rate": 0.0008950501795800288,
+ "loss": 2.9016,
+ "step": 510
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.4857175065946673,
+ "learning_rate": 0.0008945942738834532,
+ "loss": 2.9583,
+ "step": 511
+ },
+ {
+ "epoch": 0.23,
+ "grad_norm": 0.46442689539646187,
+ "learning_rate": 0.0008941374967408826,
+ "loss": 2.9313,
+ "step": 512
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.4920680577558812,
+ "learning_rate": 0.0008936798491610916,
+ "loss": 2.9322,
+ "step": 513
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.42071657053145184,
+ "learning_rate": 0.0008932213321547768,
+ "loss": 2.8092,
+ "step": 514
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.5423075015977806,
+ "learning_rate": 0.0008927619467345554,
+ "loss": 2.9191,
+ "step": 515
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.46214326428447566,
+ "learning_rate": 0.0008923016939149615,
+ "loss": 2.9774,
+ "step": 516
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.5368441064573027,
+ "learning_rate": 0.0008918405747124458,
+ "loss": 2.9746,
+ "step": 517
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.47725237881777255,
+ "learning_rate": 0.0008913785901453721,
+ "loss": 2.8924,
+ "step": 518
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.4587232357368762,
+ "learning_rate": 0.000890915741234015,
+ "loss": 2.9563,
+ "step": 519
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.5025733386528658,
+ "learning_rate": 0.0008904520290005582,
+ "loss": 2.8566,
+ "step": 520
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.5062215102612683,
+ "learning_rate": 0.000889987454469092,
+ "loss": 2.9385,
+ "step": 521
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.46271562433176705,
+ "learning_rate": 0.0008895220186656111,
+ "loss": 2.8672,
+ "step": 522
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.4447670480633681,
+ "learning_rate": 0.0008890557226180122,
+ "loss": 2.9219,
+ "step": 523
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.5131916806570509,
+ "learning_rate": 0.0008885885673560921,
+ "loss": 2.8686,
+ "step": 524
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.472971672961692,
+ "learning_rate": 0.0008881205539115444,
+ "loss": 2.834,
+ "step": 525
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.5203357089744916,
+ "learning_rate": 0.0008876516833179589,
+ "loss": 2.8396,
+ "step": 526
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.5795193057589354,
+ "learning_rate": 0.0008871819566108177,
+ "loss": 2.8938,
+ "step": 527
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.4828578545983392,
+ "learning_rate": 0.000886711374827494,
+ "loss": 2.8184,
+ "step": 528
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.5005724587827719,
+ "learning_rate": 0.0008862399390072491,
+ "loss": 2.9117,
+ "step": 529
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.543022044362364,
+ "learning_rate": 0.0008857676501912305,
+ "loss": 2.7959,
+ "step": 530
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.4921165011571337,
+ "learning_rate": 0.0008852945094224697,
+ "loss": 2.9149,
+ "step": 531
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.46669093263366496,
+ "learning_rate": 0.0008848205177458795,
+ "loss": 2.907,
+ "step": 532
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.5449088043083928,
+ "learning_rate": 0.0008843456762082518,
+ "loss": 2.9024,
+ "step": 533
+ },
+ {
+ "epoch": 0.24,
+ "grad_norm": 0.4735115730561581,
+ "learning_rate": 0.0008838699858582557,
+ "loss": 2.9188,
+ "step": 534
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.499556618409815,
+ "learning_rate": 0.0008833934477464347,
+ "loss": 2.9121,
+ "step": 535
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.5592446376142547,
+ "learning_rate": 0.0008829160629252045,
+ "loss": 2.8401,
+ "step": 536
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.5053968364268197,
+ "learning_rate": 0.0008824378324488509,
+ "loss": 2.9381,
+ "step": 537
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.5803014676195123,
+ "learning_rate": 0.0008819587573735268,
+ "loss": 2.9658,
+ "step": 538
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.5655567818068991,
+ "learning_rate": 0.0008814788387572513,
+ "loss": 2.9185,
+ "step": 539
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.5394021744156271,
+ "learning_rate": 0.0008809980776599053,
+ "loss": 2.8238,
+ "step": 540
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.5006668242505091,
+ "learning_rate": 0.0008805164751432312,
+ "loss": 2.9115,
+ "step": 541
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.549900650742081,
+ "learning_rate": 0.0008800340322708292,
+ "loss": 2.878,
+ "step": 542
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.4434311475917405,
+ "learning_rate": 0.0008795507501081555,
+ "loss": 2.929,
+ "step": 543
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.5049435314266008,
+ "learning_rate": 0.0008790666297225196,
+ "loss": 2.8983,
+ "step": 544
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.5258503290310728,
+ "learning_rate": 0.0008785816721830829,
+ "loss": 2.8395,
+ "step": 545
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.5226439862806438,
+ "learning_rate": 0.0008780958785608546,
+ "loss": 2.9605,
+ "step": 546
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.47502288491554423,
+ "learning_rate": 0.0008776092499286912,
+ "loss": 2.9058,
+ "step": 547
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.49777311686968306,
+ "learning_rate": 0.0008771217873612929,
+ "loss": 2.8013,
+ "step": 548
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.5040476517317467,
+ "learning_rate": 0.0008766334919352017,
+ "loss": 2.8891,
+ "step": 549
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.5734027619323142,
+ "learning_rate": 0.0008761443647287987,
+ "loss": 2.8978,
+ "step": 550
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.5591583484093245,
+ "learning_rate": 0.0008756544068223026,
+ "loss": 2.9268,
+ "step": 551
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.5590005958740321,
+ "learning_rate": 0.0008751636192977659,
+ "loss": 2.9581,
+ "step": 552
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.4989723022744546,
+ "learning_rate": 0.0008746720032390737,
+ "loss": 2.8166,
+ "step": 553
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.4524663721859508,
+ "learning_rate": 0.0008741795597319408,
+ "loss": 2.876,
+ "step": 554
+ },
+ {
+ "epoch": 0.25,
+ "grad_norm": 0.5340577235221757,
+ "learning_rate": 0.0008736862898639095,
+ "loss": 2.8726,
+ "step": 555
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.46073229195296406,
+ "learning_rate": 0.0008731921947243468,
+ "loss": 2.8697,
+ "step": 556
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.5008000922809575,
+ "learning_rate": 0.0008726972754044427,
+ "loss": 2.9267,
+ "step": 557
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.4447115743712347,
+ "learning_rate": 0.0008722015329972069,
+ "loss": 2.8616,
+ "step": 558
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.45839536662578106,
+ "learning_rate": 0.0008717049685974672,
+ "loss": 2.8847,
+ "step": 559
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.5138574329331402,
+ "learning_rate": 0.0008712075833018665,
+ "loss": 2.927,
+ "step": 560
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.4617875992822275,
+ "learning_rate": 0.0008707093782088608,
+ "loss": 2.8355,
+ "step": 561
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.4768214484564255,
+ "learning_rate": 0.0008702103544187167,
+ "loss": 2.9338,
+ "step": 562
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.5092198530538106,
+ "learning_rate": 0.0008697105130335085,
+ "loss": 2.8922,
+ "step": 563
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.4909867955599156,
+ "learning_rate": 0.0008692098551571164,
+ "loss": 2.8805,
+ "step": 564
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.48400549737164544,
+ "learning_rate": 0.0008687083818952235,
+ "loss": 2.844,
+ "step": 565
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.5312485094846079,
+ "learning_rate": 0.0008682060943553143,
+ "loss": 2.8628,
+ "step": 566
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.5284169290059281,
+ "learning_rate": 0.0008677029936466707,
+ "loss": 2.8693,
+ "step": 567
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.5596873208407769,
+ "learning_rate": 0.0008671990808803711,
+ "loss": 2.8628,
+ "step": 568
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.5127726821195686,
+ "learning_rate": 0.0008666943571692871,
+ "loss": 2.8931,
+ "step": 569
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.5412302489200196,
+ "learning_rate": 0.0008661888236280813,
+ "loss": 2.8495,
+ "step": 570
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.5648066434836707,
+ "learning_rate": 0.0008656824813732045,
+ "loss": 2.8212,
+ "step": 571
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.530080954397923,
+ "learning_rate": 0.000865175331522894,
+ "loss": 2.9589,
+ "step": 572
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.49007327673633844,
+ "learning_rate": 0.0008646673751971703,
+ "loss": 2.9213,
+ "step": 573
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.47956842689479867,
+ "learning_rate": 0.000864158613517835,
+ "loss": 2.8258,
+ "step": 574
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.563080037087868,
+ "learning_rate": 0.0008636490476084681,
+ "loss": 2.8938,
+ "step": 575
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.48614569752706366,
+ "learning_rate": 0.0008631386785944264,
+ "loss": 2.8999,
+ "step": 576
+ },
+ {
+ "epoch": 0.26,
+ "grad_norm": 0.47949260744852895,
+ "learning_rate": 0.0008626275076028397,
+ "loss": 2.9086,
+ "step": 577
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.5452426340004343,
+ "learning_rate": 0.0008621155357626091,
+ "loss": 2.7912,
+ "step": 578
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.5467956749933328,
+ "learning_rate": 0.0008616027642044042,
+ "loss": 2.8611,
+ "step": 579
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.5134651071394092,
+ "learning_rate": 0.000861089194060661,
+ "loss": 2.8905,
+ "step": 580
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.472677589381949,
+ "learning_rate": 0.000860574826465579,
+ "loss": 2.7825,
+ "step": 581
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.48018765852806505,
+ "learning_rate": 0.0008600596625551191,
+ "loss": 2.8662,
+ "step": 582
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.5128764288735578,
+ "learning_rate": 0.0008595437034670006,
+ "loss": 2.8177,
+ "step": 583
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.4966513345718583,
+ "learning_rate": 0.0008590269503406985,
+ "loss": 2.9176,
+ "step": 584
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.5082739810555716,
+ "learning_rate": 0.0008585094043174423,
+ "loss": 2.8508,
+ "step": 585
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.48652094508387944,
+ "learning_rate": 0.0008579910665402118,
+ "loss": 2.8479,
+ "step": 586
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.48352095303386156,
+ "learning_rate": 0.000857471938153736,
+ "loss": 2.8137,
+ "step": 587
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.46689576961775836,
+ "learning_rate": 0.0008569520203044892,
+ "loss": 2.8206,
+ "step": 588
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.5249930996254872,
+ "learning_rate": 0.0008564313141406901,
+ "loss": 2.8354,
+ "step": 589
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.5101748353606734,
+ "learning_rate": 0.0008559098208122973,
+ "loss": 2.8479,
+ "step": 590
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.4737361503886194,
+ "learning_rate": 0.0008553875414710089,
+ "loss": 2.8718,
+ "step": 591
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.4695780598746661,
+ "learning_rate": 0.0008548644772702579,
+ "loss": 2.834,
+ "step": 592
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.5442563661083804,
+ "learning_rate": 0.0008543406293652116,
+ "loss": 2.8823,
+ "step": 593
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.4769846167337409,
+ "learning_rate": 0.0008538159989127671,
+ "loss": 2.8325,
+ "step": 594
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.48928408239321125,
+ "learning_rate": 0.0008532905870715505,
+ "loss": 2.9471,
+ "step": 595
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.5755923681979764,
+ "learning_rate": 0.0008527643950019131,
+ "loss": 2.9084,
+ "step": 596
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.5150173270935033,
+ "learning_rate": 0.0008522374238659296,
+ "loss": 2.9108,
+ "step": 597
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.49089687158658574,
+ "learning_rate": 0.0008517096748273951,
+ "loss": 2.8406,
+ "step": 598
+ },
+ {
+ "epoch": 0.27,
+ "grad_norm": 0.5384211751976695,
+ "learning_rate": 0.0008511811490518227,
+ "loss": 2.93,
+ "step": 599
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.4549451269314346,
+ "learning_rate": 0.0008506518477064405,
+ "loss": 2.7794,
+ "step": 600
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.4935542960309496,
+ "learning_rate": 0.0008501217719601903,
+ "loss": 2.8384,
+ "step": 601
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.47367721409710756,
+ "learning_rate": 0.0008495909229837233,
+ "loss": 2.8738,
+ "step": 602
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.4822635411013927,
+ "learning_rate": 0.000849059301949399,
+ "loss": 2.8853,
+ "step": 603
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.5211203299307834,
+ "learning_rate": 0.0008485269100312812,
+ "loss": 2.8763,
+ "step": 604
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.5093788703853647,
+ "learning_rate": 0.0008479937484051368,
+ "loss": 2.9627,
+ "step": 605
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.49379817177711127,
+ "learning_rate": 0.0008474598182484323,
+ "loss": 2.9097,
+ "step": 606
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.5055896442532648,
+ "learning_rate": 0.0008469251207403317,
+ "loss": 2.8566,
+ "step": 607
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.48998652758761935,
+ "learning_rate": 0.0008463896570616934,
+ "loss": 2.8092,
+ "step": 608
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.4643446399451955,
+ "learning_rate": 0.0008458534283950678,
+ "loss": 2.9923,
+ "step": 609
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.47750788765136326,
+ "learning_rate": 0.0008453164359246952,
+ "loss": 2.8214,
+ "step": 610
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.5387199911323801,
+ "learning_rate": 0.0008447786808365022,
+ "loss": 2.8333,
+ "step": 611
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.537020119273398,
+ "learning_rate": 0.0008442401643181,
+ "loss": 2.8287,
+ "step": 612
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.45996197632489566,
+ "learning_rate": 0.0008437008875587811,
+ "loss": 2.8405,
+ "step": 613
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.46585158868278714,
+ "learning_rate": 0.0008431608517495171,
+ "loss": 2.8358,
+ "step": 614
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.5324745662763111,
+ "learning_rate": 0.0008426200580829561,
+ "loss": 2.9404,
+ "step": 615
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.4897854216836938,
+ "learning_rate": 0.0008420785077534195,
+ "loss": 2.7592,
+ "step": 616
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.5143061610832548,
+ "learning_rate": 0.0008415362019569001,
+ "loss": 2.8299,
+ "step": 617
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.4901690572264902,
+ "learning_rate": 0.0008409931418910591,
+ "loss": 2.782,
+ "step": 618
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.5710877923272081,
+ "learning_rate": 0.0008404493287552232,
+ "loss": 2.9038,
+ "step": 619
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.5387796073193574,
+ "learning_rate": 0.0008399047637503825,
+ "loss": 2.8469,
+ "step": 620
+ },
+ {
+ "epoch": 0.28,
+ "grad_norm": 0.46356672629268736,
+ "learning_rate": 0.0008393594480791875,
+ "loss": 2.8451,
+ "step": 621
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.4857247723265462,
+ "learning_rate": 0.0008388133829459463,
+ "loss": 2.8477,
+ "step": 622
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.5028032907022912,
+ "learning_rate": 0.0008382665695566227,
+ "loss": 2.8071,
+ "step": 623
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.5352764699826724,
+ "learning_rate": 0.0008377190091188324,
+ "loss": 2.8124,
+ "step": 624
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.5154671314728527,
+ "learning_rate": 0.0008371707028418413,
+ "loss": 2.8065,
+ "step": 625
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.5525779227319001,
+ "learning_rate": 0.0008366216519365621,
+ "loss": 2.81,
+ "step": 626
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.4999272063558071,
+ "learning_rate": 0.0008360718576155525,
+ "loss": 2.9294,
+ "step": 627
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.538437972979967,
+ "learning_rate": 0.0008355213210930118,
+ "loss": 2.802,
+ "step": 628
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.4779442710284314,
+ "learning_rate": 0.0008349700435847778,
+ "loss": 2.9181,
+ "step": 629
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.5112276129707731,
+ "learning_rate": 0.0008344180263083256,
+ "loss": 2.8876,
+ "step": 630
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.5121208689408047,
+ "learning_rate": 0.000833865270482764,
+ "loss": 2.9277,
+ "step": 631
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.5073415586581523,
+ "learning_rate": 0.0008333117773288324,
+ "loss": 2.9644,
+ "step": 632
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.516536914907916,
+ "learning_rate": 0.0008327575480688985,
+ "loss": 2.907,
+ "step": 633
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.5365473704448307,
+ "learning_rate": 0.000832202583926956,
+ "loss": 2.8206,
+ "step": 634
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.5366448186943982,
+ "learning_rate": 0.0008316468861286217,
+ "loss": 2.83,
+ "step": 635
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.5228990718651447,
+ "learning_rate": 0.0008310904559011323,
+ "loss": 2.8893,
+ "step": 636
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.46544827337040146,
+ "learning_rate": 0.0008305332944733419,
+ "loss": 2.7929,
+ "step": 637
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.5332647947062249,
+ "learning_rate": 0.0008299754030757202,
+ "loss": 2.8962,
+ "step": 638
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.5373153750438453,
+ "learning_rate": 0.0008294167829403481,
+ "loss": 2.7954,
+ "step": 639
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.513076059645814,
+ "learning_rate": 0.0008288574353009164,
+ "loss": 2.8701,
+ "step": 640
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.47984399848374937,
+ "learning_rate": 0.0008282973613927225,
+ "loss": 2.7395,
+ "step": 641
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.4847363450816728,
+ "learning_rate": 0.0008277365624526675,
+ "loss": 2.8574,
+ "step": 642
+ },
+ {
+ "epoch": 0.29,
+ "grad_norm": 0.4910899907700678,
+ "learning_rate": 0.0008271750397192541,
+ "loss": 2.8263,
+ "step": 643
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.5443766929577677,
+ "learning_rate": 0.0008266127944325832,
+ "loss": 2.8328,
+ "step": 644
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.5039112413589106,
+ "learning_rate": 0.0008260498278343513,
+ "loss": 2.915,
+ "step": 645
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.45117555991793873,
+ "learning_rate": 0.0008254861411678485,
+ "loss": 2.802,
+ "step": 646
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.4620612469446592,
+ "learning_rate": 0.0008249217356779544,
+ "loss": 2.7939,
+ "step": 647
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.5213350459237406,
+ "learning_rate": 0.0008243566126111363,
+ "loss": 2.8363,
+ "step": 648
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.46127740901909686,
+ "learning_rate": 0.0008237907732154466,
+ "loss": 2.776,
+ "step": 649
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.5195423408551074,
+ "learning_rate": 0.0008232242187405194,
+ "loss": 2.8347,
+ "step": 650
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.4597719384843304,
+ "learning_rate": 0.000822656950437568,
+ "loss": 2.7871,
+ "step": 651
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.4744744191905495,
+ "learning_rate": 0.0008220889695593823,
+ "loss": 2.8932,
+ "step": 652
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.4592569922712311,
+ "learning_rate": 0.0008215202773603259,
+ "loss": 2.8186,
+ "step": 653
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.4888379498693485,
+ "learning_rate": 0.0008209508750963328,
+ "loss": 2.8816,
+ "step": 654
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.4608427644037051,
+ "learning_rate": 0.0008203807640249062,
+ "loss": 2.8588,
+ "step": 655
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.5427147262691617,
+ "learning_rate": 0.0008198099454051136,
+ "loss": 2.8434,
+ "step": 656
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.5064096594473994,
+ "learning_rate": 0.0008192384204975857,
+ "loss": 2.8643,
+ "step": 657
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.4837547195674473,
+ "learning_rate": 0.000818666190564513,
+ "loss": 2.8047,
+ "step": 658
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.44277891757582555,
+ "learning_rate": 0.0008180932568696426,
+ "loss": 2.852,
+ "step": 659
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.52358325472259,
+ "learning_rate": 0.0008175196206782764,
+ "loss": 2.8352,
+ "step": 660
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.4997652840704003,
+ "learning_rate": 0.0008169452832572675,
+ "loss": 2.8574,
+ "step": 661
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.46535861244240406,
+ "learning_rate": 0.0008163702458750173,
+ "loss": 2.7891,
+ "step": 662
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.5844228916475839,
+ "learning_rate": 0.0008157945098014734,
+ "loss": 2.8538,
+ "step": 663
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 0.5340395984470981,
+ "learning_rate": 0.0008152180763081267,
+ "loss": 2.8357,
+ "step": 664
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.4773983930271618,
+ "learning_rate": 0.0008146409466680076,
+ "loss": 2.8222,
+ "step": 665
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.5307602408186971,
+ "learning_rate": 0.0008140631221556845,
+ "loss": 2.8009,
+ "step": 666
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.43860393901215583,
+ "learning_rate": 0.0008134846040472599,
+ "loss": 2.8659,
+ "step": 667
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.5107311724742075,
+ "learning_rate": 0.0008129053936203688,
+ "loss": 2.9087,
+ "step": 668
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.5191816808666726,
+ "learning_rate": 0.0008123254921541745,
+ "loss": 2.8502,
+ "step": 669
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.51020382429763,
+ "learning_rate": 0.0008117449009293668,
+ "loss": 2.8447,
+ "step": 670
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.4703843692795738,
+ "learning_rate": 0.0008111636212281586,
+ "loss": 2.8351,
+ "step": 671
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.46621448944448807,
+ "learning_rate": 0.0008105816543342833,
+ "loss": 2.7883,
+ "step": 672
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.4301977863025668,
+ "learning_rate": 0.0008099990015329919,
+ "loss": 2.8082,
+ "step": 673
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.5173946426936311,
+ "learning_rate": 0.0008094156641110504,
+ "loss": 2.7677,
+ "step": 674
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.5981599780929721,
+ "learning_rate": 0.0008088316433567369,
+ "loss": 2.8339,
+ "step": 675
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.6234006489255799,
+ "learning_rate": 0.0008082469405598378,
+ "loss": 2.9027,
+ "step": 676
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.5224969823883701,
+ "learning_rate": 0.0008076615570116468,
+ "loss": 2.8669,
+ "step": 677
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.6091701591643708,
+ "learning_rate": 0.0008070754940049603,
+ "loss": 2.7997,
+ "step": 678
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.5965877845222998,
+ "learning_rate": 0.0008064887528340756,
+ "loss": 2.878,
+ "step": 679
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.4939727269291862,
+ "learning_rate": 0.0008059013347947874,
+ "loss": 2.7794,
+ "step": 680
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.532980097448278,
+ "learning_rate": 0.0008053132411843857,
+ "loss": 2.7978,
+ "step": 681
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.556732847287261,
+ "learning_rate": 0.0008047244733016521,
+ "loss": 2.8827,
+ "step": 682
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.5175738146243685,
+ "learning_rate": 0.0008041350324468573,
+ "loss": 2.8155,
+ "step": 683
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.5526779343389316,
+ "learning_rate": 0.0008035449199217583,
+ "loss": 2.7721,
+ "step": 684
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.5096027320085115,
+ "learning_rate": 0.0008029541370295957,
+ "loss": 2.877,
+ "step": 685
+ },
+ {
+ "epoch": 0.31,
+ "grad_norm": 0.5215402888330627,
+ "learning_rate": 0.0008023626850750903,
+ "loss": 2.8386,
+ "step": 686
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.5357029520261977,
+ "learning_rate": 0.0008017705653644406,
+ "loss": 2.8333,
+ "step": 687
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.5481223895987873,
+ "learning_rate": 0.0008011777792053195,
+ "loss": 2.821,
+ "step": 688
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.541773595807566,
+ "learning_rate": 0.0008005843279068725,
+ "loss": 2.7868,
+ "step": 689
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.5333916070138732,
+ "learning_rate": 0.000799990212779713,
+ "loss": 2.76,
+ "step": 690
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.6896986380917894,
+ "learning_rate": 0.0007993954351359214,
+ "loss": 2.8998,
+ "step": 691
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.5138560292942024,
+ "learning_rate": 0.0007987999962890406,
+ "loss": 2.7831,
+ "step": 692
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.5006192365144864,
+ "learning_rate": 0.0007982038975540742,
+ "loss": 2.8001,
+ "step": 693
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.5706754312956506,
+ "learning_rate": 0.0007976071402474826,
+ "loss": 2.9346,
+ "step": 694
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.5819806935647719,
+ "learning_rate": 0.0007970097256871811,
+ "loss": 2.8146,
+ "step": 695
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.5298625281963546,
+ "learning_rate": 0.0007964116551925364,
+ "loss": 2.8184,
+ "step": 696
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.4808085412662244,
+ "learning_rate": 0.0007958129300843637,
+ "loss": 2.7149,
+ "step": 697
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.5279065705275813,
+ "learning_rate": 0.0007952135516849239,
+ "loss": 2.818,
+ "step": 698
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.49558916945973513,
+ "learning_rate": 0.0007946135213179207,
+ "loss": 2.784,
+ "step": 699
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.5285076010156654,
+ "learning_rate": 0.0007940128403084977,
+ "loss": 2.8013,
+ "step": 700
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.49178833068753275,
+ "learning_rate": 0.0007934115099832355,
+ "loss": 2.766,
+ "step": 701
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.45489547648663475,
+ "learning_rate": 0.0007928095316701483,
+ "loss": 2.7464,
+ "step": 702
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.5672774198786119,
+ "learning_rate": 0.0007922069066986819,
+ "loss": 2.8402,
+ "step": 703
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.5559238096112596,
+ "learning_rate": 0.0007916036363997097,
+ "loss": 2.8086,
+ "step": 704
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.49014582917174837,
+ "learning_rate": 0.0007909997221055308,
+ "loss": 2.8316,
+ "step": 705
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.44636188694556145,
+ "learning_rate": 0.0007903951651498658,
+ "loss": 2.7963,
+ "step": 706
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.47986388320100326,
+ "learning_rate": 0.0007897899668678557,
+ "loss": 2.8437,
+ "step": 707
+ },
+ {
+ "epoch": 0.32,
+ "grad_norm": 0.4959767047311583,
+ "learning_rate": 0.0007891841285960566,
+ "loss": 2.819,
+ "step": 708
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.49856630474601016,
+ "learning_rate": 0.0007885776516724388,
+ "loss": 2.7593,
+ "step": 709
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.46537743342048815,
+ "learning_rate": 0.0007879705374363831,
+ "loss": 2.8627,
+ "step": 710
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.46719387680375635,
+ "learning_rate": 0.000787362787228677,
+ "loss": 2.8295,
+ "step": 711
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.4771797067030745,
+ "learning_rate": 0.0007867544023915134,
+ "loss": 2.7843,
+ "step": 712
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.5077335914469979,
+ "learning_rate": 0.0007861453842684861,
+ "loss": 2.837,
+ "step": 713
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.4859065777537034,
+ "learning_rate": 0.0007855357342045882,
+ "loss": 2.9189,
+ "step": 714
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.47599378592576,
+ "learning_rate": 0.0007849254535462074,
+ "loss": 2.729,
+ "step": 715
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.4792105033293628,
+ "learning_rate": 0.0007843145436411252,
+ "loss": 2.7473,
+ "step": 716
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.5094603109719453,
+ "learning_rate": 0.0007837030058385117,
+ "loss": 2.7893,
+ "step": 717
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.5159291742558351,
+ "learning_rate": 0.0007830908414889246,
+ "loss": 2.8468,
+ "step": 718
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.5226777221205205,
+ "learning_rate": 0.0007824780519443046,
+ "loss": 2.8741,
+ "step": 719
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.48165745332515575,
+ "learning_rate": 0.0007818646385579735,
+ "loss": 2.8219,
+ "step": 720
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.492731216786435,
+ "learning_rate": 0.0007812506026846307,
+ "loss": 2.8265,
+ "step": 721
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.5212723481310344,
+ "learning_rate": 0.0007806359456803504,
+ "loss": 2.8288,
+ "step": 722
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.5168373653618235,
+ "learning_rate": 0.0007800206689025785,
+ "loss": 2.8805,
+ "step": 723
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.50933421230669,
+ "learning_rate": 0.0007794047737101297,
+ "loss": 2.7754,
+ "step": 724
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.516459366923144,
+ "learning_rate": 0.0007787882614631843,
+ "loss": 2.8378,
+ "step": 725
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.4814820725937623,
+ "learning_rate": 0.0007781711335232856,
+ "loss": 2.7943,
+ "step": 726
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.4875185583230406,
+ "learning_rate": 0.0007775533912533363,
+ "loss": 2.7584,
+ "step": 727
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.48607259538361763,
+ "learning_rate": 0.0007769350360175962,
+ "loss": 2.7943,
+ "step": 728
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.5750716834562372,
+ "learning_rate": 0.0007763160691816784,
+ "loss": 2.7651,
+ "step": 729
+ },
+ {
+ "epoch": 0.33,
+ "grad_norm": 0.48445857712796325,
+ "learning_rate": 0.000775696492112547,
+ "loss": 2.8346,
+ "step": 730
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.4924724269823932,
+ "learning_rate": 0.0007750763061785137,
+ "loss": 2.6975,
+ "step": 731
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.5184944074242422,
+ "learning_rate": 0.000774455512749235,
+ "loss": 2.7624,
+ "step": 732
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.6235652530453863,
+ "learning_rate": 0.0007738341131957085,
+ "loss": 2.7976,
+ "step": 733
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.46422224574631493,
+ "learning_rate": 0.000773212108890271,
+ "loss": 2.785,
+ "step": 734
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.5318321963574243,
+ "learning_rate": 0.0007725895012065947,
+ "loss": 2.812,
+ "step": 735
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.5907337544025071,
+ "learning_rate": 0.0007719662915196844,
+ "loss": 2.7859,
+ "step": 736
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.5053319064383238,
+ "learning_rate": 0.0007713424812058736,
+ "loss": 2.8105,
+ "step": 737
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.5016101292225251,
+ "learning_rate": 0.0007707180716428237,
+ "loss": 2.8497,
+ "step": 738
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.5635667122980256,
+ "learning_rate": 0.0007700930642095184,
+ "loss": 2.8426,
+ "step": 739
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.5347229459146409,
+ "learning_rate": 0.0007694674602862621,
+ "loss": 2.7543,
+ "step": 740
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.49840886650055877,
+ "learning_rate": 0.0007688412612546769,
+ "loss": 2.809,
+ "step": 741
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.49061312191402545,
+ "learning_rate": 0.0007682144684976983,
+ "loss": 2.7986,
+ "step": 742
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.4796430732364091,
+ "learning_rate": 0.0007675870833995739,
+ "loss": 2.7881,
+ "step": 743
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.5085767803752446,
+ "learning_rate": 0.0007669591073458592,
+ "loss": 2.8191,
+ "step": 744
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.4960853465685693,
+ "learning_rate": 0.0007663305417234146,
+ "loss": 2.7734,
+ "step": 745
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.4725389933963003,
+ "learning_rate": 0.0007657013879204022,
+ "loss": 2.7513,
+ "step": 746
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.5086340202810876,
+ "learning_rate": 0.0007650716473262842,
+ "loss": 2.8695,
+ "step": 747
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.4913173591153497,
+ "learning_rate": 0.0007644413213318177,
+ "loss": 2.7421,
+ "step": 748
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.5117193247039085,
+ "learning_rate": 0.0007638104113290531,
+ "loss": 2.9339,
+ "step": 749
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.5502113295847995,
+ "learning_rate": 0.0007631789187113303,
+ "loss": 2.8683,
+ "step": 750
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.4831331978157491,
+ "learning_rate": 0.000762546844873276,
+ "loss": 2.7499,
+ "step": 751
+ },
+ {
+ "epoch": 0.34,
+ "grad_norm": 0.468882911032173,
+ "learning_rate": 0.0007619141912108007,
+ "loss": 2.7906,
+ "step": 752
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.4899884078767972,
+ "learning_rate": 0.000761280959121095,
+ "loss": 2.8205,
+ "step": 753
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.570115786694176,
+ "learning_rate": 0.0007606471500026273,
+ "loss": 2.893,
+ "step": 754
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.5258819361717191,
+ "learning_rate": 0.0007600127652551401,
+ "loss": 2.8404,
+ "step": 755
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.5330489775108016,
+ "learning_rate": 0.0007593778062796472,
+ "loss": 2.8236,
+ "step": 756
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.5157248399249127,
+ "learning_rate": 0.000758742274478431,
+ "loss": 2.774,
+ "step": 757
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.5030996059317102,
+ "learning_rate": 0.0007581061712550381,
+ "loss": 2.7986,
+ "step": 758
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.5510672987815763,
+ "learning_rate": 0.0007574694980142779,
+ "loss": 2.8002,
+ "step": 759
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.4892304524594696,
+ "learning_rate": 0.0007568322561622183,
+ "loss": 2.7387,
+ "step": 760
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.43620102638706626,
+ "learning_rate": 0.0007561944471061826,
+ "loss": 2.8067,
+ "step": 761
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.5249551128598866,
+ "learning_rate": 0.0007555560722547475,
+ "loss": 2.8052,
+ "step": 762
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.5314704328210782,
+ "learning_rate": 0.0007549171330177387,
+ "loss": 2.7803,
+ "step": 763
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.5083259243551354,
+ "learning_rate": 0.0007542776308062285,
+ "loss": 2.8439,
+ "step": 764
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.49486921119685484,
+ "learning_rate": 0.0007536375670325325,
+ "loss": 2.8783,
+ "step": 765
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.4948632029244516,
+ "learning_rate": 0.0007529969431102063,
+ "loss": 2.8038,
+ "step": 766
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.5613854379150421,
+ "learning_rate": 0.000752355760454043,
+ "loss": 2.7952,
+ "step": 767
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.48408993039962017,
+ "learning_rate": 0.0007517140204800693,
+ "loss": 2.8285,
+ "step": 768
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.5613325063670752,
+ "learning_rate": 0.0007510717246055425,
+ "loss": 2.8986,
+ "step": 769
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.5057024413578123,
+ "learning_rate": 0.0007504288742489482,
+ "loss": 2.9194,
+ "step": 770
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.5182327909106127,
+ "learning_rate": 0.0007497854708299963,
+ "loss": 2.7443,
+ "step": 771
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.42623461117590616,
+ "learning_rate": 0.0007491415157696178,
+ "loss": 2.7663,
+ "step": 772
+ },
+ {
+ "epoch": 0.35,
+ "grad_norm": 0.4616439913926983,
+ "learning_rate": 0.0007484970104899623,
+ "loss": 2.758,
+ "step": 773
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.5216346476601743,
+ "learning_rate": 0.0007478519564143945,
+ "loss": 2.7734,
+ "step": 774
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.5263024721605336,
+ "learning_rate": 0.000747206354967491,
+ "loss": 2.8226,
+ "step": 775
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.49211891503665545,
+ "learning_rate": 0.0007465602075750373,
+ "loss": 2.8352,
+ "step": 776
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.4640334355314008,
+ "learning_rate": 0.0007459135156640247,
+ "loss": 2.8302,
+ "step": 777
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.4916355653630351,
+ "learning_rate": 0.0007452662806626468,
+ "loss": 2.9638,
+ "step": 778
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.5682294532098234,
+ "learning_rate": 0.0007446185040002967,
+ "loss": 2.7034,
+ "step": 779
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.5083832030962198,
+ "learning_rate": 0.0007439701871075642,
+ "loss": 2.8328,
+ "step": 780
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.5496820839619448,
+ "learning_rate": 0.0007433213314162313,
+ "loss": 2.8676,
+ "step": 781
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.4942563061066859,
+ "learning_rate": 0.0007426719383592705,
+ "loss": 2.7837,
+ "step": 782
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.4696144837532974,
+ "learning_rate": 0.000742022009370841,
+ "loss": 2.7719,
+ "step": 783
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.5210832302973146,
+ "learning_rate": 0.0007413715458862855,
+ "loss": 2.7627,
+ "step": 784
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.4761344198378059,
+ "learning_rate": 0.0007407205493421272,
+ "loss": 2.806,
+ "step": 785
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.5221206952994478,
+ "learning_rate": 0.0007400690211760661,
+ "loss": 2.7719,
+ "step": 786
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.48563540014368656,
+ "learning_rate": 0.0007394169628269771,
+ "loss": 2.7514,
+ "step": 787
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.48413737878074503,
+ "learning_rate": 0.0007387643757349051,
+ "loss": 2.7659,
+ "step": 788
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.5118877841303338,
+ "learning_rate": 0.0007381112613410635,
+ "loss": 2.947,
+ "step": 789
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.5316034920848375,
+ "learning_rate": 0.0007374576210878298,
+ "loss": 2.8874,
+ "step": 790
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.4883297464158908,
+ "learning_rate": 0.0007368034564187425,
+ "loss": 2.8233,
+ "step": 791
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.513939458135119,
+ "learning_rate": 0.0007361487687784989,
+ "loss": 2.7089,
+ "step": 792
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.5785424945432845,
+ "learning_rate": 0.0007354935596129513,
+ "loss": 2.8671,
+ "step": 793
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.48353704239395495,
+ "learning_rate": 0.000734837830369103,
+ "loss": 2.6387,
+ "step": 794
+ },
+ {
+ "epoch": 0.36,
+ "grad_norm": 0.5005110059498641,
+ "learning_rate": 0.0007341815824951066,
+ "loss": 2.8208,
+ "step": 795
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.46936386034848254,
+ "learning_rate": 0.0007335248174402597,
+ "loss": 2.8644,
+ "step": 796
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.46979639946920604,
+ "learning_rate": 0.0007328675366550023,
+ "loss": 2.8314,
+ "step": 797
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.5113553778805662,
+ "learning_rate": 0.0007322097415909134,
+ "loss": 2.8458,
+ "step": 798
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.4583682893078192,
+ "learning_rate": 0.0007315514337007071,
+ "loss": 2.7059,
+ "step": 799
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.511531165607695,
+ "learning_rate": 0.0007308926144382312,
+ "loss": 2.7719,
+ "step": 800
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.5340364889597687,
+ "learning_rate": 0.0007302332852584619,
+ "loss": 2.7535,
+ "step": 801
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.5270216020215914,
+ "learning_rate": 0.0007295734476175018,
+ "loss": 2.8255,
+ "step": 802
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.4695692919781961,
+ "learning_rate": 0.0007289131029725768,
+ "loss": 2.7913,
+ "step": 803
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.48772634714980395,
+ "learning_rate": 0.0007282522527820319,
+ "loss": 2.7327,
+ "step": 804
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.4908532542810394,
+ "learning_rate": 0.000727590898505329,
+ "loss": 2.759,
+ "step": 805
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.5392196973957384,
+ "learning_rate": 0.0007269290416030429,
+ "loss": 2.7883,
+ "step": 806
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.5229476344661595,
+ "learning_rate": 0.000726266683536859,
+ "loss": 2.7925,
+ "step": 807
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.46506825598686913,
+ "learning_rate": 0.0007256038257695687,
+ "loss": 2.7616,
+ "step": 808
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.5105977210525918,
+ "learning_rate": 0.0007249404697650678,
+ "loss": 2.7546,
+ "step": 809
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.5221766178000213,
+ "learning_rate": 0.0007242766169883518,
+ "loss": 2.7515,
+ "step": 810
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.5520794117426239,
+ "learning_rate": 0.0007236122689055138,
+ "loss": 2.8096,
+ "step": 811
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.5449218556525243,
+ "learning_rate": 0.0007229474269837401,
+ "loss": 2.8004,
+ "step": 812
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.526682352936027,
+ "learning_rate": 0.0007222820926913085,
+ "loss": 2.7215,
+ "step": 813
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.580858902393497,
+ "learning_rate": 0.0007216162674975833,
+ "loss": 2.8152,
+ "step": 814
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.5564683137969968,
+ "learning_rate": 0.0007209499528730138,
+ "loss": 2.7984,
+ "step": 815
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.5172723636230036,
+ "learning_rate": 0.0007202831502891294,
+ "loss": 2.8307,
+ "step": 816
+ },
+ {
+ "epoch": 0.37,
+ "grad_norm": 0.44820234752374694,
+ "learning_rate": 0.0007196158612185375,
+ "loss": 2.6601,
+ "step": 817
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.5291399878434985,
+ "learning_rate": 0.0007189480871349201,
+ "loss": 2.7593,
+ "step": 818
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.48846075033892566,
+ "learning_rate": 0.0007182798295130299,
+ "loss": 2.7638,
+ "step": 819
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.5031815366441419,
+ "learning_rate": 0.0007176110898286878,
+ "loss": 2.8129,
+ "step": 820
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.5315462339416552,
+ "learning_rate": 0.0007169418695587791,
+ "loss": 2.8152,
+ "step": 821
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.5302833561195777,
+ "learning_rate": 0.0007162721701812506,
+ "loss": 2.8827,
+ "step": 822
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.45982473679298547,
+ "learning_rate": 0.0007156019931751072,
+ "loss": 2.7817,
+ "step": 823
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.5074921998828663,
+ "learning_rate": 0.0007149313400204082,
+ "loss": 2.6997,
+ "step": 824
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.5159064353389416,
+ "learning_rate": 0.0007142602121982653,
+ "loss": 2.7705,
+ "step": 825
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.5180972511567747,
+ "learning_rate": 0.0007135886111908379,
+ "loss": 2.8564,
+ "step": 826
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.49085156382430956,
+ "learning_rate": 0.0007129165384813303,
+ "loss": 2.802,
+ "step": 827
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.4730921860189482,
+ "learning_rate": 0.0007122439955539888,
+ "loss": 2.7001,
+ "step": 828
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.5292831592244239,
+ "learning_rate": 0.0007115709838940983,
+ "loss": 2.7195,
+ "step": 829
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.5413440021833602,
+ "learning_rate": 0.0007108975049879785,
+ "loss": 2.8672,
+ "step": 830
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.4740637709041407,
+ "learning_rate": 0.0007102235603229814,
+ "loss": 2.7726,
+ "step": 831
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.4788801287316272,
+ "learning_rate": 0.000709549151387487,
+ "loss": 2.7526,
+ "step": 832
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.4772345087746463,
+ "learning_rate": 0.0007088742796709013,
+ "loss": 2.7133,
+ "step": 833
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.47261956559665125,
+ "learning_rate": 0.000708198946663652,
+ "loss": 2.7813,
+ "step": 834
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.5610903768467415,
+ "learning_rate": 0.0007075231538571856,
+ "loss": 2.8754,
+ "step": 835
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.48776587880542527,
+ "learning_rate": 0.0007068469027439641,
+ "loss": 2.8114,
+ "step": 836
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.49431864674560283,
+ "learning_rate": 0.0007061701948174613,
+ "loss": 2.8075,
+ "step": 837
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.48851198234487625,
+ "learning_rate": 0.0007054930315721606,
+ "loss": 2.8488,
+ "step": 838
+ },
+ {
+ "epoch": 0.38,
+ "grad_norm": 0.5604073171311356,
+ "learning_rate": 0.0007048154145035501,
+ "loss": 2.8551,
+ "step": 839
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.5194267496909427,
+ "learning_rate": 0.0007041373451081207,
+ "loss": 2.7335,
+ "step": 840
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.4783931437057692,
+ "learning_rate": 0.0007034588248833621,
+ "loss": 2.8199,
+ "step": 841
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.6279616789683025,
+ "learning_rate": 0.0007027798553277595,
+ "loss": 2.722,
+ "step": 842
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.5679575025331579,
+ "learning_rate": 0.0007021004379407909,
+ "loss": 2.7367,
+ "step": 843
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.5842541136276735,
+ "learning_rate": 0.0007014205742229227,
+ "loss": 2.7953,
+ "step": 844
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.5240874226653945,
+ "learning_rate": 0.0007007402656756072,
+ "loss": 2.8248,
+ "step": 845
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.4993236489277078,
+ "learning_rate": 0.0007000595138012797,
+ "loss": 2.7417,
+ "step": 846
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.5514996901209408,
+ "learning_rate": 0.0006993783201033535,
+ "loss": 2.7423,
+ "step": 847
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.5344575439985406,
+ "learning_rate": 0.0006986966860862182,
+ "loss": 2.6965,
+ "step": 848
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.5739598688379087,
+ "learning_rate": 0.000698014613255236,
+ "loss": 2.7276,
+ "step": 849
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.5219557469078394,
+ "learning_rate": 0.0006973321031167382,
+ "loss": 2.7379,
+ "step": 850
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.5638436367299189,
+ "learning_rate": 0.0006966491571780216,
+ "loss": 2.8022,
+ "step": 851
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.5493939763772003,
+ "learning_rate": 0.0006959657769473453,
+ "loss": 2.7537,
+ "step": 852
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.5205268381254381,
+ "learning_rate": 0.000695281963933928,
+ "loss": 2.7773,
+ "step": 853
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.5078383199467512,
+ "learning_rate": 0.0006945977196479438,
+ "loss": 2.7682,
+ "step": 854
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.5014949245052864,
+ "learning_rate": 0.0006939130456005196,
+ "loss": 2.759,
+ "step": 855
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.5029223070598458,
+ "learning_rate": 0.0006932279433037311,
+ "loss": 2.806,
+ "step": 856
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.4709328628183458,
+ "learning_rate": 0.0006925424142705997,
+ "loss": 2.6671,
+ "step": 857
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.5076559891877385,
+ "learning_rate": 0.0006918564600150896,
+ "loss": 2.7781,
+ "step": 858
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.479546724105253,
+ "learning_rate": 0.0006911700820521042,
+ "loss": 2.7367,
+ "step": 859
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.5092651438817656,
+ "learning_rate": 0.0006904832818974818,
+ "loss": 2.8519,
+ "step": 860
+ },
+ {
+ "epoch": 0.39,
+ "grad_norm": 0.5618203714233453,
+ "learning_rate": 0.0006897960610679939,
+ "loss": 2.7845,
+ "step": 861
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.48106458305246025,
+ "learning_rate": 0.0006891084210813407,
+ "loss": 2.8059,
+ "step": 862
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.48366679677231933,
+ "learning_rate": 0.0006884203634561483,
+ "loss": 2.7889,
+ "step": 863
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.5492583624585266,
+ "learning_rate": 0.0006877318897119651,
+ "loss": 2.7834,
+ "step": 864
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.5054741274256389,
+ "learning_rate": 0.0006870430013692579,
+ "loss": 2.8286,
+ "step": 865
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.48903380592737233,
+ "learning_rate": 0.0006863536999494101,
+ "loss": 2.78,
+ "step": 866
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.4987610224213302,
+ "learning_rate": 0.0006856639869747167,
+ "loss": 2.799,
+ "step": 867
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.5381791610275435,
+ "learning_rate": 0.0006849738639683818,
+ "loss": 2.7685,
+ "step": 868
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.49956599991520023,
+ "learning_rate": 0.000684283332454515,
+ "loss": 2.7864,
+ "step": 869
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.466848144196578,
+ "learning_rate": 0.0006835923939581281,
+ "loss": 2.7065,
+ "step": 870
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.48405128109984186,
+ "learning_rate": 0.0006829010500051318,
+ "loss": 2.7659,
+ "step": 871
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.5353280000892553,
+ "learning_rate": 0.0006822093021223321,
+ "loss": 2.7429,
+ "step": 872
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.524535401854029,
+ "learning_rate": 0.0006815171518374268,
+ "loss": 2.8012,
+ "step": 873
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.5331439117843774,
+ "learning_rate": 0.0006808246006790031,
+ "loss": 2.7336,
+ "step": 874
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.5187697764424821,
+ "learning_rate": 0.0006801316501765329,
+ "loss": 2.7901,
+ "step": 875
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.5115009666948729,
+ "learning_rate": 0.0006794383018603704,
+ "loss": 2.7998,
+ "step": 876
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.5265446854041065,
+ "learning_rate": 0.0006787445572617481,
+ "loss": 2.809,
+ "step": 877
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.5165138048260781,
+ "learning_rate": 0.0006780504179127734,
+ "loss": 2.7829,
+ "step": 878
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.5388697163620849,
+ "learning_rate": 0.0006773558853464265,
+ "loss": 2.7535,
+ "step": 879
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.49606269040249723,
+ "learning_rate": 0.000676660961096555,
+ "loss": 2.8486,
+ "step": 880
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.48285254073520323,
+ "learning_rate": 0.000675965646697872,
+ "loss": 2.8458,
+ "step": 881
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.5037317036941886,
+ "learning_rate": 0.0006752699436859519,
+ "loss": 2.6753,
+ "step": 882
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 0.5331738557233788,
+ "learning_rate": 0.0006745738535972279,
+ "loss": 2.7633,
+ "step": 883
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.5059399351796149,
+ "learning_rate": 0.0006738773779689874,
+ "loss": 2.8242,
+ "step": 884
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.49336863688376975,
+ "learning_rate": 0.0006731805183393696,
+ "loss": 2.6581,
+ "step": 885
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.5338208080333934,
+ "learning_rate": 0.0006724832762473618,
+ "loss": 2.7471,
+ "step": 886
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.512212145987671,
+ "learning_rate": 0.0006717856532327956,
+ "loss": 2.7511,
+ "step": 887
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.5089148809429237,
+ "learning_rate": 0.0006710876508363444,
+ "loss": 2.7457,
+ "step": 888
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.5354632741946636,
+ "learning_rate": 0.0006703892705995189,
+ "loss": 2.6854,
+ "step": 889
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.5304678838742364,
+ "learning_rate": 0.0006696905140646647,
+ "loss": 2.7535,
+ "step": 890
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.533702012090405,
+ "learning_rate": 0.0006689913827749581,
+ "loss": 2.8708,
+ "step": 891
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.5368945292993025,
+ "learning_rate": 0.0006682918782744032,
+ "loss": 2.6945,
+ "step": 892
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.5129913038785266,
+ "learning_rate": 0.0006675920021078282,
+ "loss": 2.7662,
+ "step": 893
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.5239537318193066,
+ "learning_rate": 0.0006668917558208823,
+ "loss": 2.751,
+ "step": 894
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.49404334394218646,
+ "learning_rate": 0.0006661911409600321,
+ "loss": 2.744,
+ "step": 895
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.5279168802712604,
+ "learning_rate": 0.0006654901590725577,
+ "loss": 2.7897,
+ "step": 896
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.47740648051606127,
+ "learning_rate": 0.0006647888117065507,
+ "loss": 2.776,
+ "step": 897
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.49574907735898144,
+ "learning_rate": 0.0006640871004109086,
+ "loss": 2.7985,
+ "step": 898
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.48851802670010686,
+ "learning_rate": 0.000663385026735334,
+ "loss": 2.7733,
+ "step": 899
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.4901231714809981,
+ "learning_rate": 0.0006626825922303285,
+ "loss": 2.7551,
+ "step": 900
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.5340245179409802,
+ "learning_rate": 0.0006619797984471915,
+ "loss": 2.8051,
+ "step": 901
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.5176702974142926,
+ "learning_rate": 0.0006612766469380158,
+ "loss": 2.6531,
+ "step": 902
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.515304315905886,
+ "learning_rate": 0.0006605731392556833,
+ "loss": 2.7889,
+ "step": 903
+ },
+ {
+ "epoch": 0.41,
+ "grad_norm": 0.5508704003690635,
+ "learning_rate": 0.0006598692769538637,
+ "loss": 2.784,
+ "step": 904
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.4822902267852383,
+ "learning_rate": 0.0006591650615870091,
+ "loss": 2.7872,
+ "step": 905
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.4859115343419366,
+ "learning_rate": 0.0006584604947103514,
+ "loss": 2.7676,
+ "step": 906
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.5085655835784982,
+ "learning_rate": 0.0006577555778798993,
+ "loss": 2.7351,
+ "step": 907
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.4860661074427596,
+ "learning_rate": 0.0006570503126524336,
+ "loss": 2.7096,
+ "step": 908
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.4699952016210587,
+ "learning_rate": 0.0006563447005855054,
+ "loss": 2.7034,
+ "step": 909
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.5060565648840971,
+ "learning_rate": 0.000655638743237431,
+ "loss": 2.7263,
+ "step": 910
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.47594623453155094,
+ "learning_rate": 0.0006549324421672894,
+ "loss": 2.7143,
+ "step": 911
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.51332013498744,
+ "learning_rate": 0.0006542257989349194,
+ "loss": 2.7109,
+ "step": 912
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.49432391712052376,
+ "learning_rate": 0.0006535188151009142,
+ "loss": 2.7343,
+ "step": 913
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.5386518205007791,
+ "learning_rate": 0.0006528114922266204,
+ "loss": 2.7886,
+ "step": 914
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.5499047415726279,
+ "learning_rate": 0.0006521038318741327,
+ "loss": 2.7817,
+ "step": 915
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.5244798311024138,
+ "learning_rate": 0.0006513958356062912,
+ "loss": 2.8404,
+ "step": 916
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.5413925971366917,
+ "learning_rate": 0.0006506875049866781,
+ "loss": 2.6724,
+ "step": 917
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.5134299712873807,
+ "learning_rate": 0.0006499788415796137,
+ "loss": 2.7241,
+ "step": 918
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.4569887968329291,
+ "learning_rate": 0.0006492698469501532,
+ "loss": 2.7116,
+ "step": 919
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.5224365872653551,
+ "learning_rate": 0.0006485605226640837,
+ "loss": 2.7662,
+ "step": 920
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.5322607243526167,
+ "learning_rate": 0.00064785087028792,
+ "loss": 2.7222,
+ "step": 921
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.49290865595176075,
+ "learning_rate": 0.0006471408913889019,
+ "loss": 2.8255,
+ "step": 922
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.5333278998203554,
+ "learning_rate": 0.0006464305875349892,
+ "loss": 2.7839,
+ "step": 923
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.5121321016797032,
+ "learning_rate": 0.000645719960294861,
+ "loss": 2.7508,
+ "step": 924
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.48885744251733626,
+ "learning_rate": 0.0006450090112379092,
+ "loss": 2.7846,
+ "step": 925
+ },
+ {
+ "epoch": 0.42,
+ "grad_norm": 0.5120561873544219,
+ "learning_rate": 0.0006442977419342371,
+ "loss": 2.7736,
+ "step": 926
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.5306422198088276,
+ "learning_rate": 0.000643586153954655,
+ "loss": 2.7874,
+ "step": 927
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.565391511598094,
+ "learning_rate": 0.0006428742488706772,
+ "loss": 2.7837,
+ "step": 928
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.5100710447487015,
+ "learning_rate": 0.0006421620282545182,
+ "loss": 2.7277,
+ "step": 929
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.5068331248190641,
+ "learning_rate": 0.0006414494936790892,
+ "loss": 2.7277,
+ "step": 930
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.5268686984396896,
+ "learning_rate": 0.0006407366467179951,
+ "loss": 2.8227,
+ "step": 931
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.6331680104171693,
+ "learning_rate": 0.0006400234889455301,
+ "loss": 2.7951,
+ "step": 932
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.5286557193661298,
+ "learning_rate": 0.0006393100219366755,
+ "loss": 2.8292,
+ "step": 933
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.47977126524528535,
+ "learning_rate": 0.0006385962472670953,
+ "loss": 2.8411,
+ "step": 934
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.5306774229454788,
+ "learning_rate": 0.0006378821665131328,
+ "loss": 2.8441,
+ "step": 935
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.5531360143509272,
+ "learning_rate": 0.0006371677812518072,
+ "loss": 2.7508,
+ "step": 936
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.49015902487161006,
+ "learning_rate": 0.0006364530930608107,
+ "loss": 2.767,
+ "step": 937
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.48197842791490547,
+ "learning_rate": 0.0006357381035185038,
+ "loss": 2.7212,
+ "step": 938
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.4960997386760046,
+ "learning_rate": 0.0006350228142039131,
+ "loss": 2.756,
+ "step": 939
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.4889530258340255,
+ "learning_rate": 0.000634307226696727,
+ "loss": 2.783,
+ "step": 940
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.5057503592741923,
+ "learning_rate": 0.0006335913425772926,
+ "loss": 2.7917,
+ "step": 941
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.4943948350428404,
+ "learning_rate": 0.0006328751634266117,
+ "loss": 2.6598,
+ "step": 942
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.46633811615065784,
+ "learning_rate": 0.0006321586908263382,
+ "loss": 2.8788,
+ "step": 943
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.5027357980409337,
+ "learning_rate": 0.0006314419263587732,
+ "loss": 2.8161,
+ "step": 944
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.4780403947059295,
+ "learning_rate": 0.0006307248716068637,
+ "loss": 2.7568,
+ "step": 945
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.5354898944258344,
+ "learning_rate": 0.0006300075281541964,
+ "loss": 2.782,
+ "step": 946
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.4825693139234744,
+ "learning_rate": 0.0006292898975849966,
+ "loss": 2.6409,
+ "step": 947
+ },
+ {
+ "epoch": 0.43,
+ "grad_norm": 0.4953320693853845,
+ "learning_rate": 0.000628571981484123,
+ "loss": 2.7598,
+ "step": 948
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.49583074441018976,
+ "learning_rate": 0.0006278537814370654,
+ "loss": 2.731,
+ "step": 949
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.506943671479962,
+ "learning_rate": 0.0006271352990299406,
+ "loss": 2.7261,
+ "step": 950
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.49590862978012457,
+ "learning_rate": 0.0006264165358494885,
+ "loss": 2.6804,
+ "step": 951
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.4836761407691934,
+ "learning_rate": 0.0006256974934830694,
+ "loss": 2.7867,
+ "step": 952
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.49074157533365576,
+ "learning_rate": 0.0006249781735186606,
+ "loss": 2.7114,
+ "step": 953
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.484774695388245,
+ "learning_rate": 0.0006242585775448518,
+ "loss": 2.7948,
+ "step": 954
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.4941148175026909,
+ "learning_rate": 0.0006235387071508427,
+ "loss": 2.692,
+ "step": 955
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.4847084319877268,
+ "learning_rate": 0.0006228185639264384,
+ "loss": 2.7059,
+ "step": 956
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.5071978236179299,
+ "learning_rate": 0.0006220981494620475,
+ "loss": 2.7394,
+ "step": 957
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.5052658765389426,
+ "learning_rate": 0.000621377465348677,
+ "loss": 2.7638,
+ "step": 958
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.5064093323515364,
+ "learning_rate": 0.0006206565131779293,
+ "loss": 2.7689,
+ "step": 959
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.5266116971099526,
+ "learning_rate": 0.0006199352945419994,
+ "loss": 2.7573,
+ "step": 960
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.52258713353477,
+ "learning_rate": 0.00061921381103367,
+ "loss": 2.7787,
+ "step": 961
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.492513899615317,
+ "learning_rate": 0.0006184920642463094,
+ "loss": 2.7568,
+ "step": 962
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.4940183946813311,
+ "learning_rate": 0.0006177700557738672,
+ "loss": 2.7518,
+ "step": 963
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.48677685476444593,
+ "learning_rate": 0.0006170477872108706,
+ "loss": 2.6772,
+ "step": 964
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.4939143055623651,
+ "learning_rate": 0.0006163252601524216,
+ "loss": 2.7732,
+ "step": 965
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.5095451104144231,
+ "learning_rate": 0.0006156024761941925,
+ "loss": 2.679,
+ "step": 966
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.513846128660273,
+ "learning_rate": 0.000614879436932424,
+ "loss": 2.7193,
+ "step": 967
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.4724434249408676,
+ "learning_rate": 0.0006141561439639196,
+ "loss": 2.7792,
+ "step": 968
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.48717747250736454,
+ "learning_rate": 0.0006134325988860433,
+ "loss": 2.794,
+ "step": 969
+ },
+ {
+ "epoch": 0.44,
+ "grad_norm": 0.5029007019676133,
+ "learning_rate": 0.0006127088032967165,
+ "loss": 2.7591,
+ "step": 970
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.4787567617208102,
+ "learning_rate": 0.0006119847587944131,
+ "loss": 2.784,
+ "step": 971
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.4639459141945223,
+ "learning_rate": 0.0006112604669781572,
+ "loss": 2.7821,
+ "step": 972
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.4803397577434123,
+ "learning_rate": 0.0006105359294475188,
+ "loss": 2.7296,
+ "step": 973
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.5274444754035275,
+ "learning_rate": 0.0006098111478026107,
+ "loss": 2.7894,
+ "step": 974
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.49882630644473697,
+ "learning_rate": 0.0006090861236440848,
+ "loss": 2.7505,
+ "step": 975
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.5118656383712906,
+ "learning_rate": 0.0006083608585731282,
+ "loss": 2.7583,
+ "step": 976
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.5382357072396842,
+ "learning_rate": 0.0006076353541914609,
+ "loss": 2.7801,
+ "step": 977
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.46786372948033095,
+ "learning_rate": 0.0006069096121013307,
+ "loss": 2.7018,
+ "step": 978
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.5176454688066614,
+ "learning_rate": 0.0006061836339055105,
+ "loss": 2.7784,
+ "step": 979
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.5218061053688355,
+ "learning_rate": 0.0006054574212072948,
+ "loss": 2.7018,
+ "step": 980
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.4683031931534088,
+ "learning_rate": 0.0006047309756104958,
+ "loss": 2.7693,
+ "step": 981
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.48629138149265483,
+ "learning_rate": 0.00060400429871944,
+ "loss": 2.6775,
+ "step": 982
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.557362827835192,
+ "learning_rate": 0.0006032773921389654,
+ "loss": 2.7742,
+ "step": 983
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.5254113038041371,
+ "learning_rate": 0.0006025502574744162,
+ "loss": 2.78,
+ "step": 984
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.5059265191460307,
+ "learning_rate": 0.000601822896331641,
+ "loss": 2.7791,
+ "step": 985
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.47105009173924467,
+ "learning_rate": 0.0006010953103169883,
+ "loss": 2.6798,
+ "step": 986
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.5561196580147597,
+ "learning_rate": 0.0006003675010373034,
+ "loss": 2.8296,
+ "step": 987
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.5165045770334074,
+ "learning_rate": 0.0005996394700999246,
+ "loss": 2.9029,
+ "step": 988
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.5101993421533687,
+ "learning_rate": 0.0005989112191126794,
+ "loss": 2.7209,
+ "step": 989
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.5177171252845812,
+ "learning_rate": 0.0005981827496838822,
+ "loss": 2.7911,
+ "step": 990
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.4724719435665852,
+ "learning_rate": 0.0005974540634223286,
+ "loss": 2.786,
+ "step": 991
+ },
+ {
+ "epoch": 0.45,
+ "grad_norm": 0.5191707403352941,
+ "learning_rate": 0.0005967251619372939,
+ "loss": 2.6762,
+ "step": 992
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.5349647184014108,
+ "learning_rate": 0.0005959960468385284,
+ "loss": 2.6603,
+ "step": 993
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.5276285975099707,
+ "learning_rate": 0.0005952667197362542,
+ "loss": 2.742,
+ "step": 994
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.48761272423611945,
+ "learning_rate": 0.0005945371822411621,
+ "loss": 2.7236,
+ "step": 995
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.4951388158867274,
+ "learning_rate": 0.0005938074359644063,
+ "loss": 2.7032,
+ "step": 996
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.5179825452776126,
+ "learning_rate": 0.0005930774825176034,
+ "loss": 2.7732,
+ "step": 997
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.5261671815856594,
+ "learning_rate": 0.0005923473235128268,
+ "loss": 2.6949,
+ "step": 998
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.5454529618310321,
+ "learning_rate": 0.0005916169605626042,
+ "loss": 2.7585,
+ "step": 999
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.5171200520944219,
+ "learning_rate": 0.0005908863952799134,
+ "loss": 2.7531,
+ "step": 1000
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.5108482656003888,
+ "learning_rate": 0.0005901556292781793,
+ "loss": 2.6341,
+ "step": 1001
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.5133915239424481,
+ "learning_rate": 0.0005894246641712698,
+ "loss": 2.7207,
+ "step": 1002
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.5178007832270196,
+ "learning_rate": 0.0005886935015734931,
+ "loss": 2.6931,
+ "step": 1003
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.5261630751402555,
+ "learning_rate": 0.0005879621430995928,
+ "loss": 2.7794,
+ "step": 1004
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.5800813607505337,
+ "learning_rate": 0.0005872305903647455,
+ "loss": 2.7248,
+ "step": 1005
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.47443508986314176,
+ "learning_rate": 0.0005864988449845569,
+ "loss": 2.6529,
+ "step": 1006
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.5070692657788186,
+ "learning_rate": 0.0005857669085750578,
+ "loss": 2.698,
+ "step": 1007
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.49710768733498933,
+ "learning_rate": 0.0005850347827527013,
+ "loss": 2.8145,
+ "step": 1008
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.48794295035729135,
+ "learning_rate": 0.0005843024691343584,
+ "loss": 2.7522,
+ "step": 1009
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.5372183075893122,
+ "learning_rate": 0.000583569969337315,
+ "loss": 2.6309,
+ "step": 1010
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.5148698851559652,
+ "learning_rate": 0.0005828372849792686,
+ "loss": 2.7922,
+ "step": 1011
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.5253495243002605,
+ "learning_rate": 0.0005821044176783234,
+ "loss": 2.6397,
+ "step": 1012
+ },
+ {
+ "epoch": 0.46,
+ "grad_norm": 0.5615607894519717,
+ "learning_rate": 0.0005813713690529886,
+ "loss": 2.8245,
+ "step": 1013
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.5584846261092475,
+ "learning_rate": 0.0005806381407221729,
+ "loss": 2.7152,
+ "step": 1014
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.6014166606226927,
+ "learning_rate": 0.0005799047343051826,
+ "loss": 2.7203,
+ "step": 1015
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.525573548062308,
+ "learning_rate": 0.0005791711514217171,
+ "loss": 2.6897,
+ "step": 1016
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.5312153749005265,
+ "learning_rate": 0.0005784373936918654,
+ "loss": 2.809,
+ "step": 1017
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.5143882775165995,
+ "learning_rate": 0.0005777034627361025,
+ "loss": 2.662,
+ "step": 1018
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.5359573245878332,
+ "learning_rate": 0.0005769693601752864,
+ "loss": 2.6911,
+ "step": 1019
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.5109889887406951,
+ "learning_rate": 0.0005762350876306537,
+ "loss": 2.7018,
+ "step": 1020
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.5172131414778152,
+ "learning_rate": 0.0005755006467238168,
+ "loss": 2.6924,
+ "step": 1021
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.5485499278181196,
+ "learning_rate": 0.0005747660390767593,
+ "loss": 2.8091,
+ "step": 1022
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.48054642686647003,
+ "learning_rate": 0.0005740312663118338,
+ "loss": 2.6736,
+ "step": 1023
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.521044664523183,
+ "learning_rate": 0.0005732963300517568,
+ "loss": 2.7532,
+ "step": 1024
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.5361701837440671,
+ "learning_rate": 0.0005725612319196064,
+ "loss": 2.8393,
+ "step": 1025
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.49084106343502676,
+ "learning_rate": 0.000571825973538818,
+ "loss": 2.7183,
+ "step": 1026
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.5375516542212618,
+ "learning_rate": 0.0005710905565331811,
+ "loss": 2.7335,
+ "step": 1027
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.5053029384847482,
+ "learning_rate": 0.0005703549825268353,
+ "loss": 2.7108,
+ "step": 1028
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.519682072044458,
+ "learning_rate": 0.0005696192531442667,
+ "loss": 2.7367,
+ "step": 1029
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.4757472288509061,
+ "learning_rate": 0.000568883370010305,
+ "loss": 2.7173,
+ "step": 1030
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.5392330537381186,
+ "learning_rate": 0.0005681473347501192,
+ "loss": 2.7214,
+ "step": 1031
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.5315838547724138,
+ "learning_rate": 0.0005674111489892144,
+ "loss": 2.7917,
+ "step": 1032
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.448318881403438,
+ "learning_rate": 0.0005666748143534282,
+ "loss": 2.6982,
+ "step": 1033
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.4972833521635573,
+ "learning_rate": 0.0005659383324689266,
+ "loss": 2.8145,
+ "step": 1034
+ },
+ {
+ "epoch": 0.47,
+ "grad_norm": 0.4916731453327063,
+ "learning_rate": 0.0005652017049622007,
+ "loss": 2.7421,
+ "step": 1035
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.5171637772665034,
+ "learning_rate": 0.0005644649334600641,
+ "loss": 2.7596,
+ "step": 1036
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.542883455271621,
+ "learning_rate": 0.0005637280195896474,
+ "loss": 2.7189,
+ "step": 1037
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.5282758428090761,
+ "learning_rate": 0.0005629909649783961,
+ "loss": 2.6737,
+ "step": 1038
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.5089528080633915,
+ "learning_rate": 0.0005622537712540664,
+ "loss": 2.6951,
+ "step": 1039
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.5243099019740807,
+ "learning_rate": 0.0005615164400447218,
+ "loss": 2.7161,
+ "step": 1040
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.509150957834291,
+ "learning_rate": 0.0005607789729787294,
+ "loss": 2.7112,
+ "step": 1041
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.5631580259985867,
+ "learning_rate": 0.0005600413716847564,
+ "loss": 2.7171,
+ "step": 1042
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.5872546894665344,
+ "learning_rate": 0.000559303637791766,
+ "loss": 2.7637,
+ "step": 1043
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.45929386151150653,
+ "learning_rate": 0.0005585657729290151,
+ "loss": 2.6886,
+ "step": 1044
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.44537439497508813,
+ "learning_rate": 0.000557827778726049,
+ "loss": 2.7289,
+ "step": 1045
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.5381913357471398,
+ "learning_rate": 0.0005570896568126993,
+ "loss": 2.8449,
+ "step": 1046
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.5504937001251007,
+ "learning_rate": 0.0005563514088190788,
+ "loss": 2.7537,
+ "step": 1047
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.5667706482242284,
+ "learning_rate": 0.0005556130363755798,
+ "loss": 2.7856,
+ "step": 1048
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.5138881491458973,
+ "learning_rate": 0.0005548745411128688,
+ "loss": 2.7844,
+ "step": 1049
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.5078894393261245,
+ "learning_rate": 0.0005541359246618835,
+ "loss": 2.6914,
+ "step": 1050
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.5473828440912689,
+ "learning_rate": 0.0005533971886538293,
+ "loss": 2.7733,
+ "step": 1051
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.5122145383119249,
+ "learning_rate": 0.000552658334720176,
+ "loss": 2.7788,
+ "step": 1052
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.5094814064577552,
+ "learning_rate": 0.0005519193644926535,
+ "loss": 2.7697,
+ "step": 1053
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.5311239360105956,
+ "learning_rate": 0.0005511802796032485,
+ "loss": 2.6993,
+ "step": 1054
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.5056310115445127,
+ "learning_rate": 0.0005504410816842009,
+ "loss": 2.7289,
+ "step": 1055
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.4949936441053868,
+ "learning_rate": 0.0005497017723680009,
+ "loss": 2.7502,
+ "step": 1056
+ },
+ {
+ "epoch": 0.48,
+ "grad_norm": 0.5454563029138563,
+ "learning_rate": 0.0005489623532873836,
+ "loss": 2.7846,
+ "step": 1057
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.5354216455781943,
+ "learning_rate": 0.0005482228260753273,
+ "loss": 2.7601,
+ "step": 1058
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.48572818637266924,
+ "learning_rate": 0.0005474831923650488,
+ "loss": 2.7872,
+ "step": 1059
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.5263026450656437,
+ "learning_rate": 0.00054674345379,
+ "loss": 2.6232,
+ "step": 1060
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.5000018369139888,
+ "learning_rate": 0.000546003611983865,
+ "loss": 2.6328,
+ "step": 1061
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.5556285101513226,
+ "learning_rate": 0.0005452636685805552,
+ "loss": 2.7592,
+ "step": 1062
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.45809746005774116,
+ "learning_rate": 0.0005445236252142066,
+ "loss": 2.755,
+ "step": 1063
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.4946177166899814,
+ "learning_rate": 0.000543783483519176,
+ "loss": 2.7223,
+ "step": 1064
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.5428958050953088,
+ "learning_rate": 0.0005430432451300374,
+ "loss": 2.728,
+ "step": 1065
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.4984862220837191,
+ "learning_rate": 0.0005423029116815781,
+ "loss": 2.6564,
+ "step": 1066
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.5202225535340897,
+ "learning_rate": 0.0005415624848087959,
+ "loss": 2.6913,
+ "step": 1067
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.4840020784650056,
+ "learning_rate": 0.000540821966146894,
+ "loss": 2.7645,
+ "step": 1068
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.537218186589317,
+ "learning_rate": 0.0005400813573312793,
+ "loss": 2.6545,
+ "step": 1069
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.515707831667649,
+ "learning_rate": 0.0005393406599975572,
+ "loss": 2.749,
+ "step": 1070
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.5549236832823138,
+ "learning_rate": 0.0005385998757815287,
+ "loss": 2.7562,
+ "step": 1071
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.5343596595593862,
+ "learning_rate": 0.0005378590063191867,
+ "loss": 2.6751,
+ "step": 1072
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.4858988524039984,
+ "learning_rate": 0.0005371180532467124,
+ "loss": 2.6543,
+ "step": 1073
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.5207563675106729,
+ "learning_rate": 0.000536377018200472,
+ "loss": 2.7075,
+ "step": 1074
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.5016061501970053,
+ "learning_rate": 0.0005356359028170118,
+ "loss": 2.7058,
+ "step": 1075
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.5389093829141083,
+ "learning_rate": 0.0005348947087330564,
+ "loss": 2.6559,
+ "step": 1076
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.500205939366958,
+ "learning_rate": 0.0005341534375855037,
+ "loss": 2.7282,
+ "step": 1077
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.5106159799557759,
+ "learning_rate": 0.0005334120910114222,
+ "loss": 2.6785,
+ "step": 1078
+ },
+ {
+ "epoch": 0.49,
+ "grad_norm": 0.504326286873783,
+ "learning_rate": 0.0005326706706480467,
+ "loss": 2.6254,
+ "step": 1079
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.5253973148032602,
+ "learning_rate": 0.0005319291781327749,
+ "loss": 2.7623,
+ "step": 1080
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.5000257783006888,
+ "learning_rate": 0.0005311876151031642,
+ "loss": 2.6727,
+ "step": 1081
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.5172388003338478,
+ "learning_rate": 0.0005304459831969274,
+ "loss": 2.7316,
+ "step": 1082
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.5201798177167883,
+ "learning_rate": 0.0005297042840519294,
+ "loss": 2.7758,
+ "step": 1083
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.5309435318399293,
+ "learning_rate": 0.0005289625193061838,
+ "loss": 2.7189,
+ "step": 1084
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.527008962765747,
+ "learning_rate": 0.0005282206905978489,
+ "loss": 2.7294,
+ "step": 1085
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.5050458147191232,
+ "learning_rate": 0.0005274787995652246,
+ "loss": 2.6612,
+ "step": 1086
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.5502926357148964,
+ "learning_rate": 0.000526736847846748,
+ "loss": 2.7704,
+ "step": 1087
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.490455214150219,
+ "learning_rate": 0.0005259948370809901,
+ "loss": 2.7215,
+ "step": 1088
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.5347934406932487,
+ "learning_rate": 0.0005252527689066533,
+ "loss": 2.775,
+ "step": 1089
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.5154514383005036,
+ "learning_rate": 0.0005245106449625654,
+ "loss": 2.7741,
+ "step": 1090
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.49788471680941243,
+ "learning_rate": 0.0005237684668876785,
+ "loss": 2.6886,
+ "step": 1091
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.5622040490861864,
+ "learning_rate": 0.0005230262363210637,
+ "loss": 2.7918,
+ "step": 1092
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.5033855334705318,
+ "learning_rate": 0.0005222839549019079,
+ "loss": 2.7837,
+ "step": 1093
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.48777337194218956,
+ "learning_rate": 0.0005215416242695108,
+ "loss": 2.712,
+ "step": 1094
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.5174186221509732,
+ "learning_rate": 0.0005207992460632804,
+ "loss": 2.8,
+ "step": 1095
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.5142965927082324,
+ "learning_rate": 0.0005200568219227299,
+ "loss": 2.7445,
+ "step": 1096
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.5322244992730195,
+ "learning_rate": 0.000519314353487474,
+ "loss": 2.7588,
+ "step": 1097
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.48693592767653204,
+ "learning_rate": 0.0005185718423972251,
+ "loss": 2.6827,
+ "step": 1098
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.5046692545615626,
+ "learning_rate": 0.0005178292902917898,
+ "loss": 2.7289,
+ "step": 1099
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.47978454683003274,
+ "learning_rate": 0.0005170866988110656,
+ "loss": 2.7013,
+ "step": 1100
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 0.4788540630749357,
+ "learning_rate": 0.0005163440695950362,
+ "loss": 2.6882,
+ "step": 1101
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.4612893735815785,
+ "learning_rate": 0.0005156014042837695,
+ "loss": 2.617,
+ "step": 1102
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.4898027943481741,
+ "learning_rate": 0.0005148587045174128,
+ "loss": 2.7188,
+ "step": 1103
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.5748413551545054,
+ "learning_rate": 0.0005141159719361891,
+ "loss": 2.6455,
+ "step": 1104
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.5042890521101759,
+ "learning_rate": 0.0005133732081803945,
+ "loss": 2.7683,
+ "step": 1105
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.467371098882922,
+ "learning_rate": 0.0005126304148903936,
+ "loss": 2.7084,
+ "step": 1106
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.49812638741410353,
+ "learning_rate": 0.0005118875937066161,
+ "loss": 2.6525,
+ "step": 1107
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.5431472257240424,
+ "learning_rate": 0.0005111447462695537,
+ "loss": 2.6888,
+ "step": 1108
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.522739308694978,
+ "learning_rate": 0.0005104018742197557,
+ "loss": 2.7076,
+ "step": 1109
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.5522937972499815,
+ "learning_rate": 0.0005096589791978261,
+ "loss": 2.7346,
+ "step": 1110
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.5031886324169182,
+ "learning_rate": 0.0005089160628444192,
+ "loss": 2.7199,
+ "step": 1111
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.5084434276140318,
+ "learning_rate": 0.0005081731268002371,
+ "loss": 2.7622,
+ "step": 1112
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.511271211085511,
+ "learning_rate": 0.0005074301727060243,
+ "loss": 2.7239,
+ "step": 1113
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.544946721292002,
+ "learning_rate": 0.0005066872022025663,
+ "loss": 2.7618,
+ "step": 1114
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.5619258028846829,
+ "learning_rate": 0.0005059442169306844,
+ "loss": 2.6509,
+ "step": 1115
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.5295161880183932,
+ "learning_rate": 0.0005052012185312321,
+ "loss": 2.7507,
+ "step": 1116
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.5068540600601226,
+ "learning_rate": 0.0005044582086450925,
+ "loss": 2.7624,
+ "step": 1117
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.5268615789640267,
+ "learning_rate": 0.0005037151889131737,
+ "loss": 2.6579,
+ "step": 1118
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.5153348593645998,
+ "learning_rate": 0.0005029721609764059,
+ "loss": 2.7871,
+ "step": 1119
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.5100418837135368,
+ "learning_rate": 0.000502229126475737,
+ "loss": 2.6463,
+ "step": 1120
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.5443509651515888,
+ "learning_rate": 0.0005014860870521293,
+ "loss": 2.6746,
+ "step": 1121
+ },
+ {
+ "epoch": 0.51,
+ "grad_norm": 0.5419846571794396,
+ "learning_rate": 0.0005007430443465569,
+ "loss": 2.7415,
+ "step": 1122
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.5175078596247342,
+ "learning_rate": 0.0005,
+ "loss": 2.6247,
+ "step": 1123
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.5179433140743496,
+ "learning_rate": 0.0004992569556534432,
+ "loss": 2.6846,
+ "step": 1124
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.4946276392672176,
+ "learning_rate": 0.0004985139129478707,
+ "loss": 2.6575,
+ "step": 1125
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.48893495857371877,
+ "learning_rate": 0.0004977708735242633,
+ "loss": 2.7387,
+ "step": 1126
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.4955502765030083,
+ "learning_rate": 0.0004970278390235942,
+ "loss": 2.7535,
+ "step": 1127
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.5168723682375538,
+ "learning_rate": 0.0004962848110868262,
+ "loss": 2.8069,
+ "step": 1128
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.5428503966348986,
+ "learning_rate": 0.0004955417913549074,
+ "loss": 2.7472,
+ "step": 1129
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.5306641995533713,
+ "learning_rate": 0.0004947987814687679,
+ "loss": 2.6359,
+ "step": 1130
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.48015854367251193,
+ "learning_rate": 0.0004940557830693157,
+ "loss": 2.6359,
+ "step": 1131
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.48039658976659516,
+ "learning_rate": 0.0004933127977974338,
+ "loss": 2.6904,
+ "step": 1132
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.4853648270056312,
+ "learning_rate": 0.0004925698272939757,
+ "loss": 2.6597,
+ "step": 1133
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.5279285976556198,
+ "learning_rate": 0.0004918268731997632,
+ "loss": 2.7264,
+ "step": 1134
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.5161431000228389,
+ "learning_rate": 0.0004910839371555809,
+ "loss": 2.7227,
+ "step": 1135
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.49553525141278476,
+ "learning_rate": 0.0004903410208021739,
+ "loss": 2.6899,
+ "step": 1136
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.5059000734394524,
+ "learning_rate": 0.0004895981257802443,
+ "loss": 2.7795,
+ "step": 1137
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.5350253735217294,
+ "learning_rate": 0.0004888552537304463,
+ "loss": 2.7082,
+ "step": 1138
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.5019712060738322,
+ "learning_rate": 0.00048811240629338394,
+ "loss": 2.6921,
+ "step": 1139
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.523206090444028,
+ "learning_rate": 0.00048736958510960663,
+ "loss": 2.6801,
+ "step": 1140
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.5164272224817171,
+ "learning_rate": 0.00048662679181960564,
+ "loss": 2.738,
+ "step": 1141
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.5830602270625335,
+ "learning_rate": 0.00048588402806381094,
+ "loss": 2.7607,
+ "step": 1142
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.5298383467699622,
+ "learning_rate": 0.0004851412954825874,
+ "loss": 2.717,
+ "step": 1143
+ },
+ {
+ "epoch": 0.52,
+ "grad_norm": 0.512246394627895,
+ "learning_rate": 0.00048439859571623034,
+ "loss": 2.6335,
+ "step": 1144
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.5011379483814702,
+ "learning_rate": 0.00048365593040496373,
+ "loss": 2.6826,
+ "step": 1145
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.5281686013861873,
+ "learning_rate": 0.00048291330118893443,
+ "loss": 2.7771,
+ "step": 1146
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.5102472914604643,
+ "learning_rate": 0.0004821707097082102,
+ "loss": 2.6045,
+ "step": 1147
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.5091022957421559,
+ "learning_rate": 0.0004814281576027749,
+ "loss": 2.8107,
+ "step": 1148
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.5303007055945778,
+ "learning_rate": 0.000480685646512526,
+ "loss": 2.7708,
+ "step": 1149
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.49490887220412944,
+ "learning_rate": 0.00047994317807727025,
+ "loss": 2.6633,
+ "step": 1150
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.513011380391448,
+ "learning_rate": 0.00047920075393671974,
+ "loss": 2.7299,
+ "step": 1151
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.5059021296300894,
+ "learning_rate": 0.0004784583757304893,
+ "loss": 2.679,
+ "step": 1152
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.48525591061607676,
+ "learning_rate": 0.00047771604509809214,
+ "loss": 2.6396,
+ "step": 1153
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.5659968954298682,
+ "learning_rate": 0.0004769737636789364,
+ "loss": 2.7725,
+ "step": 1154
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.5279178618523368,
+ "learning_rate": 0.00047623153311232157,
+ "loss": 2.7942,
+ "step": 1155
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.5146934688183075,
+ "learning_rate": 0.0004754893550374346,
+ "loss": 2.6455,
+ "step": 1156
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.5003048276789507,
+ "learning_rate": 0.00047474723109334685,
+ "loss": 2.7766,
+ "step": 1157
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.5297617102579024,
+ "learning_rate": 0.00047400516291900993,
+ "loss": 2.6946,
+ "step": 1158
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.5477355727573853,
+ "learning_rate": 0.0004732631521532522,
+ "loss": 2.7106,
+ "step": 1159
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.545802547383211,
+ "learning_rate": 0.0004725212004347755,
+ "loss": 2.7187,
+ "step": 1160
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.5231349493200539,
+ "learning_rate": 0.00047177930940215095,
+ "loss": 2.5814,
+ "step": 1161
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.5099141050656486,
+ "learning_rate": 0.00047103748069381624,
+ "loss": 2.6898,
+ "step": 1162
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.4930609535375999,
+ "learning_rate": 0.0004702957159480707,
+ "loss": 2.5817,
+ "step": 1163
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.5110460153161239,
+ "learning_rate": 0.00046955401680307267,
+ "loss": 2.7144,
+ "step": 1164
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.5264240278823245,
+ "learning_rate": 0.0004688123848968359,
+ "loss": 2.6491,
+ "step": 1165
+ },
+ {
+ "epoch": 0.53,
+ "grad_norm": 0.4861388795181152,
+ "learning_rate": 0.00046807082186722516,
+ "loss": 2.7457,
+ "step": 1166
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.538320758121501,
+ "learning_rate": 0.0004673293293519535,
+ "loss": 2.7267,
+ "step": 1167
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.5141531601284485,
+ "learning_rate": 0.00046658790898857806,
+ "loss": 2.6588,
+ "step": 1168
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.5393189514744423,
+ "learning_rate": 0.0004658465624144963,
+ "loss": 2.6439,
+ "step": 1169
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.5075406485654212,
+ "learning_rate": 0.0004651052912669438,
+ "loss": 2.6696,
+ "step": 1170
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.5084751938532551,
+ "learning_rate": 0.0004643640971829883,
+ "loss": 2.6247,
+ "step": 1171
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.5521555175386781,
+ "learning_rate": 0.0004636229817995281,
+ "loss": 2.7402,
+ "step": 1172
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.4957649974272555,
+ "learning_rate": 0.0004628819467532876,
+ "loss": 2.6356,
+ "step": 1173
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.5433136295282748,
+ "learning_rate": 0.00046214099368081335,
+ "loss": 2.6873,
+ "step": 1174
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.4912787340676148,
+ "learning_rate": 0.0004614001242184714,
+ "loss": 2.6935,
+ "step": 1175
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.5217062443217886,
+ "learning_rate": 0.000460659340002443,
+ "loss": 2.7332,
+ "step": 1176
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.5442638613752073,
+ "learning_rate": 0.00045991864266872073,
+ "loss": 2.7246,
+ "step": 1177
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.5493579234487449,
+ "learning_rate": 0.00045917803385310595,
+ "loss": 2.6759,
+ "step": 1178
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.5076552213401087,
+ "learning_rate": 0.00045843751519120417,
+ "loss": 2.6131,
+ "step": 1179
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.5152874355491169,
+ "learning_rate": 0.00045769708831842193,
+ "loss": 2.6546,
+ "step": 1180
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.5248816597544856,
+ "learning_rate": 0.00045695675486996266,
+ "loss": 2.7054,
+ "step": 1181
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.5289329890099478,
+ "learning_rate": 0.00045621651648082405,
+ "loss": 2.777,
+ "step": 1182
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.5524923456607232,
+ "learning_rate": 0.00045547637478579356,
+ "loss": 2.7582,
+ "step": 1183
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.5140880299388174,
+ "learning_rate": 0.0004547363314194449,
+ "loss": 2.7499,
+ "step": 1184
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.5260468960845509,
+ "learning_rate": 0.000453996388016135,
+ "loss": 2.7139,
+ "step": 1185
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.5697662426989528,
+ "learning_rate": 0.0004532565462099999,
+ "loss": 2.7058,
+ "step": 1186
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.5446586048220051,
+ "learning_rate": 0.0004525168076349513,
+ "loss": 2.7671,
+ "step": 1187
+ },
+ {
+ "epoch": 0.54,
+ "grad_norm": 0.5154600575004109,
+ "learning_rate": 0.0004517771739246729,
+ "loss": 2.7239,
+ "step": 1188
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.5092276381940357,
+ "learning_rate": 0.0004510376467126165,
+ "loss": 2.6469,
+ "step": 1189
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.5340423602240948,
+ "learning_rate": 0.0004502982276319992,
+ "loss": 2.7083,
+ "step": 1190
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.5021036478189213,
+ "learning_rate": 0.0004495589183157991,
+ "loss": 2.7102,
+ "step": 1191
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.5278837389171002,
+ "learning_rate": 0.0004488197203967517,
+ "loss": 2.6987,
+ "step": 1192
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.5239167410416333,
+ "learning_rate": 0.0004480806355073467,
+ "loss": 2.7199,
+ "step": 1193
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.5537995592297151,
+ "learning_rate": 0.000447341665279824,
+ "loss": 2.6851,
+ "step": 1194
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.4983249639717384,
+ "learning_rate": 0.0004466028113461708,
+ "loss": 2.6985,
+ "step": 1195
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.4929675781049322,
+ "learning_rate": 0.0004458640753381167,
+ "loss": 2.6313,
+ "step": 1196
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.5089935520278261,
+ "learning_rate": 0.0004451254588871313,
+ "loss": 2.733,
+ "step": 1197
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.5451378729112433,
+ "learning_rate": 0.0004443869636244203,
+ "loss": 2.7469,
+ "step": 1198
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.5320232607590252,
+ "learning_rate": 0.0004436485911809212,
+ "loss": 2.6481,
+ "step": 1199
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.5317670289041627,
+ "learning_rate": 0.00044291034318730087,
+ "loss": 2.7457,
+ "step": 1200
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.5156506956502293,
+ "learning_rate": 0.0004421722212739511,
+ "loss": 2.7613,
+ "step": 1201
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.5272163387861835,
+ "learning_rate": 0.0004414342270709848,
+ "loss": 2.7265,
+ "step": 1202
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.5275098425681611,
+ "learning_rate": 0.00044069636220823397,
+ "loss": 2.5964,
+ "step": 1203
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.5240106078741427,
+ "learning_rate": 0.0004399586283152437,
+ "loss": 2.7432,
+ "step": 1204
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.49511084326793237,
+ "learning_rate": 0.0004392210270212706,
+ "loss": 2.6584,
+ "step": 1205
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.5208526920810025,
+ "learning_rate": 0.00043848355995527825,
+ "loss": 2.7275,
+ "step": 1206
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.5191648086883246,
+ "learning_rate": 0.00043774622874593374,
+ "loss": 2.7275,
+ "step": 1207
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.4976784460103935,
+ "learning_rate": 0.000437009035021604,
+ "loss": 2.749,
+ "step": 1208
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.48846678466292937,
+ "learning_rate": 0.00043627198041035274,
+ "loss": 2.6654,
+ "step": 1209
+ },
+ {
+ "epoch": 0.55,
+ "grad_norm": 0.48387609147398325,
+ "learning_rate": 0.00043553506653993597,
+ "loss": 2.6456,
+ "step": 1210
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.530094160468025,
+ "learning_rate": 0.0004347982950377992,
+ "loss": 2.7032,
+ "step": 1211
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.5136700628059662,
+ "learning_rate": 0.0004340616675310735,
+ "loss": 2.73,
+ "step": 1212
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.5234312619642245,
+ "learning_rate": 0.00043332518564657193,
+ "loss": 2.7077,
+ "step": 1213
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.5549987779837912,
+ "learning_rate": 0.0004325888510107856,
+ "loss": 2.7254,
+ "step": 1214
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.536956217722958,
+ "learning_rate": 0.0004318526652498809,
+ "loss": 2.6648,
+ "step": 1215
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.5266642997036883,
+ "learning_rate": 0.00043111662998969523,
+ "loss": 2.6836,
+ "step": 1216
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.5205419535470811,
+ "learning_rate": 0.0004303807468557335,
+ "loss": 2.7394,
+ "step": 1217
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.5162685973116814,
+ "learning_rate": 0.0004296450174731648,
+ "loss": 2.6234,
+ "step": 1218
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.47005263283225,
+ "learning_rate": 0.0004289094434668188,
+ "loss": 2.7119,
+ "step": 1219
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.4856691255220598,
+ "learning_rate": 0.00042817402646118185,
+ "loss": 2.5816,
+ "step": 1220
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.5175805949037477,
+ "learning_rate": 0.0004274387680803936,
+ "loss": 2.7353,
+ "step": 1221
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.5211854619477375,
+ "learning_rate": 0.00042670366994824327,
+ "loss": 2.7057,
+ "step": 1222
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.5238526713577384,
+ "learning_rate": 0.0004259687336881663,
+ "loss": 2.7197,
+ "step": 1223
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.5130864930776108,
+ "learning_rate": 0.0004252339609232408,
+ "loss": 2.7056,
+ "step": 1224
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.4709359184046822,
+ "learning_rate": 0.0004244993532761834,
+ "loss": 2.6671,
+ "step": 1225
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.5006742495645969,
+ "learning_rate": 0.00042376491236934634,
+ "loss": 2.6794,
+ "step": 1226
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.5306017315906422,
+ "learning_rate": 0.0004230306398247136,
+ "loss": 2.7454,
+ "step": 1227
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.4902486378456207,
+ "learning_rate": 0.0004222965372638976,
+ "loss": 2.7657,
+ "step": 1228
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.48884495835320135,
+ "learning_rate": 0.0004215626063081348,
+ "loss": 2.7078,
+ "step": 1229
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.5369922979048567,
+ "learning_rate": 0.000420828848578283,
+ "loss": 2.6114,
+ "step": 1230
+ },
+ {
+ "epoch": 0.56,
+ "grad_norm": 0.5651909198412906,
+ "learning_rate": 0.0004200952656948175,
+ "loss": 2.705,
+ "step": 1231
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.5091114658115851,
+ "learning_rate": 0.0004193618592778272,
+ "loss": 2.763,
+ "step": 1232
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.5391752109391068,
+ "learning_rate": 0.0004186286309470116,
+ "loss": 2.7214,
+ "step": 1233
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.5266451505884581,
+ "learning_rate": 0.0004178955823216767,
+ "loss": 2.6838,
+ "step": 1234
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.5079695365655955,
+ "learning_rate": 0.00041716271502073137,
+ "loss": 2.6945,
+ "step": 1235
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.5115712255812047,
+ "learning_rate": 0.000416430030662685,
+ "loss": 2.6174,
+ "step": 1236
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.47719038433776967,
+ "learning_rate": 0.00041569753086564173,
+ "loss": 2.615,
+ "step": 1237
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.48504822455139973,
+ "learning_rate": 0.0004149652172472988,
+ "loss": 2.6736,
+ "step": 1238
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.5027945111144037,
+ "learning_rate": 0.00041423309142494234,
+ "loss": 2.7765,
+ "step": 1239
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.5087264453689427,
+ "learning_rate": 0.0004135011550154433,
+ "loss": 2.6747,
+ "step": 1240
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.4963248329607529,
+ "learning_rate": 0.0004127694096352546,
+ "loss": 2.6672,
+ "step": 1241
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.49245570308527503,
+ "learning_rate": 0.00041203785690040743,
+ "loss": 2.6622,
+ "step": 1242
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.511324616946125,
+ "learning_rate": 0.00041130649842650694,
+ "loss": 2.7645,
+ "step": 1243
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.506264914707819,
+ "learning_rate": 0.00041057533582873016,
+ "loss": 2.5778,
+ "step": 1244
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.5247516709137736,
+ "learning_rate": 0.0004098443707218208,
+ "loss": 2.7934,
+ "step": 1245
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.5121379808649518,
+ "learning_rate": 0.00040911360472008673,
+ "loss": 2.6048,
+ "step": 1246
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.5213086279626764,
+ "learning_rate": 0.0004083830394373959,
+ "loss": 2.5874,
+ "step": 1247
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.5002438181372962,
+ "learning_rate": 0.00040765267648717324,
+ "loss": 2.6725,
+ "step": 1248
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.5331152391687023,
+ "learning_rate": 0.00040692251748239677,
+ "loss": 2.7529,
+ "step": 1249
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.4922541965967873,
+ "learning_rate": 0.00040619256403559383,
+ "loss": 2.6033,
+ "step": 1250
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.5153929595123157,
+ "learning_rate": 0.000405462817758838,
+ "loss": 2.718,
+ "step": 1251
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.5362437889613196,
+ "learning_rate": 0.0004047332802637457,
+ "loss": 2.6772,
+ "step": 1252
+ },
+ {
+ "epoch": 0.57,
+ "grad_norm": 0.537617974282814,
+ "learning_rate": 0.00040400395316147157,
+ "loss": 2.6029,
+ "step": 1253
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.5302414105894713,
+ "learning_rate": 0.00040327483806270627,
+ "loss": 2.7248,
+ "step": 1254
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.5411024000098807,
+ "learning_rate": 0.0004025459365776715,
+ "loss": 2.7704,
+ "step": 1255
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.5108937202580804,
+ "learning_rate": 0.00040181725031611794,
+ "loss": 2.6939,
+ "step": 1256
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.5417170269937848,
+ "learning_rate": 0.0004010887808873206,
+ "loss": 2.5715,
+ "step": 1257
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.5265344253058065,
+ "learning_rate": 0.00040036052990007553,
+ "loss": 2.7045,
+ "step": 1258
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.49242870611440315,
+ "learning_rate": 0.0003996324989626967,
+ "loss": 2.7156,
+ "step": 1259
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.5262843821454762,
+ "learning_rate": 0.00039890468968301166,
+ "loss": 2.6944,
+ "step": 1260
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.5062871500981689,
+ "learning_rate": 0.0003981771036683591,
+ "loss": 2.688,
+ "step": 1261
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.5613320881888487,
+ "learning_rate": 0.00039744974252558385,
+ "loss": 2.6829,
+ "step": 1262
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.528478583718188,
+ "learning_rate": 0.00039672260786103463,
+ "loss": 2.6247,
+ "step": 1263
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.4864969756943371,
+ "learning_rate": 0.00039599570128055994,
+ "loss": 2.6176,
+ "step": 1264
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.5663985879252396,
+ "learning_rate": 0.0003952690243895044,
+ "loss": 2.8156,
+ "step": 1265
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.5275365590680847,
+ "learning_rate": 0.0003945425787927054,
+ "loss": 2.7165,
+ "step": 1266
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.5493667599593538,
+ "learning_rate": 0.00039381636609448975,
+ "loss": 2.6462,
+ "step": 1267
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.5611665351308049,
+ "learning_rate": 0.0003930903878986693,
+ "loss": 2.6689,
+ "step": 1268
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.5404360719910868,
+ "learning_rate": 0.00039236464580853916,
+ "loss": 2.6527,
+ "step": 1269
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.505174874369067,
+ "learning_rate": 0.0003916391414268718,
+ "loss": 2.6826,
+ "step": 1270
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.5639900241376554,
+ "learning_rate": 0.00039091387635591536,
+ "loss": 2.7014,
+ "step": 1271
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.49449134587345966,
+ "learning_rate": 0.0003901888521973894,
+ "loss": 2.5968,
+ "step": 1272
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.5143663600475958,
+ "learning_rate": 0.0003894640705524813,
+ "loss": 2.684,
+ "step": 1273
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.48680340727192584,
+ "learning_rate": 0.00038873953302184284,
+ "loss": 2.5858,
+ "step": 1274
+ },
+ {
+ "epoch": 0.58,
+ "grad_norm": 0.5056531725024779,
+ "learning_rate": 0.000388015241205587,
+ "loss": 2.668,
+ "step": 1275
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.522206055277458,
+ "learning_rate": 0.00038729119670328355,
+ "loss": 2.6914,
+ "step": 1276
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.4971525583490198,
+ "learning_rate": 0.00038656740111395665,
+ "loss": 2.6147,
+ "step": 1277
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.47069233433337143,
+ "learning_rate": 0.00038584385603608053,
+ "loss": 2.6734,
+ "step": 1278
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.48432379552169946,
+ "learning_rate": 0.00038512056306757615,
+ "loss": 2.5903,
+ "step": 1279
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.506206348808886,
+ "learning_rate": 0.0003843975238058075,
+ "loss": 2.6175,
+ "step": 1280
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.545245898521085,
+ "learning_rate": 0.00038367473984757863,
+ "loss": 2.6534,
+ "step": 1281
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.49604806389875744,
+ "learning_rate": 0.0003829522127891296,
+ "loss": 2.6956,
+ "step": 1282
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.5158270886662941,
+ "learning_rate": 0.0003822299442261329,
+ "loss": 2.6417,
+ "step": 1283
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.5059214299850134,
+ "learning_rate": 0.00038150793575369063,
+ "loss": 2.675,
+ "step": 1284
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.5224810009016285,
+ "learning_rate": 0.0003807861889663299,
+ "loss": 2.7552,
+ "step": 1285
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.5195794970194385,
+ "learning_rate": 0.0003800647054580006,
+ "loss": 2.6491,
+ "step": 1286
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.5148188704483899,
+ "learning_rate": 0.00037934348682207064,
+ "loss": 2.695,
+ "step": 1287
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.5278647166587669,
+ "learning_rate": 0.00037862253465132306,
+ "loss": 2.6972,
+ "step": 1288
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.4892436913639856,
+ "learning_rate": 0.00037790185053795245,
+ "loss": 2.7493,
+ "step": 1289
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.5352080394436993,
+ "learning_rate": 0.0003771814360735616,
+ "loss": 2.7055,
+ "step": 1290
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.5373346606741812,
+ "learning_rate": 0.00037646129284915755,
+ "loss": 2.6753,
+ "step": 1291
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.5249264201475462,
+ "learning_rate": 0.00037574142245514825,
+ "loss": 2.7573,
+ "step": 1292
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.48280529815600787,
+ "learning_rate": 0.0003750218264813393,
+ "loss": 2.7079,
+ "step": 1293
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.5196471508521048,
+ "learning_rate": 0.0003743025065169305,
+ "loss": 2.6802,
+ "step": 1294
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.4964173378444257,
+ "learning_rate": 0.0003735834641505116,
+ "loss": 2.6752,
+ "step": 1295
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.5162613483990264,
+ "learning_rate": 0.00037286470097005954,
+ "loss": 2.7268,
+ "step": 1296
+ },
+ {
+ "epoch": 0.59,
+ "grad_norm": 0.5018651588395292,
+ "learning_rate": 0.0003721462185629347,
+ "loss": 2.7103,
+ "step": 1297
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.5507834983005782,
+ "learning_rate": 0.00037142801851587707,
+ "loss": 2.6567,
+ "step": 1298
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.5154756264521153,
+ "learning_rate": 0.00037071010241500357,
+ "loss": 2.7171,
+ "step": 1299
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.5029247817534002,
+ "learning_rate": 0.00036999247184580383,
+ "loss": 2.6644,
+ "step": 1300
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.50994069274581,
+ "learning_rate": 0.00036927512839313636,
+ "loss": 2.6381,
+ "step": 1301
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.536276373105891,
+ "learning_rate": 0.0003685580736412268,
+ "loss": 2.67,
+ "step": 1302
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.5475948271066223,
+ "learning_rate": 0.000367841309173662,
+ "loss": 2.5782,
+ "step": 1303
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.49993086294162664,
+ "learning_rate": 0.0003671248365733883,
+ "loss": 2.6694,
+ "step": 1304
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.5278557948829261,
+ "learning_rate": 0.0003664086574227075,
+ "loss": 2.5884,
+ "step": 1305
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.5540280963053137,
+ "learning_rate": 0.000365692773303273,
+ "loss": 2.6333,
+ "step": 1306
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.5372922526398834,
+ "learning_rate": 0.00036497718579608696,
+ "loss": 2.6449,
+ "step": 1307
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.5214245722065235,
+ "learning_rate": 0.0003642618964814964,
+ "loss": 2.6123,
+ "step": 1308
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.5352511013531179,
+ "learning_rate": 0.00036354690693918946,
+ "loss": 2.6486,
+ "step": 1309
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.5453198872038278,
+ "learning_rate": 0.00036283221874819284,
+ "loss": 2.7267,
+ "step": 1310
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.5472038402482459,
+ "learning_rate": 0.0003621178334868672,
+ "loss": 2.6638,
+ "step": 1311
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.5259870423086725,
+ "learning_rate": 0.00036140375273290476,
+ "loss": 2.7166,
+ "step": 1312
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.5231931408709464,
+ "learning_rate": 0.0003606899780633245,
+ "loss": 2.6994,
+ "step": 1313
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.5223921064242383,
+ "learning_rate": 0.0003599765110544699,
+ "loss": 2.722,
+ "step": 1314
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.5271834978796934,
+ "learning_rate": 0.0003592633532820052,
+ "loss": 2.681,
+ "step": 1315
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.5082674225390232,
+ "learning_rate": 0.0003585505063209109,
+ "loss": 2.6802,
+ "step": 1316
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.4986074496910536,
+ "learning_rate": 0.00035783797174548194,
+ "loss": 2.6523,
+ "step": 1317
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.5051620073045062,
+ "learning_rate": 0.00035712575112932277,
+ "loss": 2.6989,
+ "step": 1318
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 0.5160093602678967,
+ "learning_rate": 0.000356413846045345,
+ "loss": 2.6912,
+ "step": 1319
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.5213383514155379,
+ "learning_rate": 0.000355702258065763,
+ "loss": 2.584,
+ "step": 1320
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.5340990832988344,
+ "learning_rate": 0.0003549909887620909,
+ "loss": 2.6896,
+ "step": 1321
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.5067360663370191,
+ "learning_rate": 0.00035428003970513914,
+ "loss": 2.6335,
+ "step": 1322
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.5079787276852579,
+ "learning_rate": 0.00035356941246501085,
+ "loss": 2.6712,
+ "step": 1323
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.485577544885249,
+ "learning_rate": 0.0003528591086110984,
+ "loss": 2.6764,
+ "step": 1324
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.5382166800900163,
+ "learning_rate": 0.00035214912971208,
+ "loss": 2.7166,
+ "step": 1325
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.5438082787138756,
+ "learning_rate": 0.0003514394773359163,
+ "loss": 2.6632,
+ "step": 1326
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.5175385265452299,
+ "learning_rate": 0.0003507301530498469,
+ "loss": 2.7192,
+ "step": 1327
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.5447481287357926,
+ "learning_rate": 0.00035002115842038646,
+ "loss": 2.734,
+ "step": 1328
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.536057808840742,
+ "learning_rate": 0.00034931249501332195,
+ "loss": 2.7175,
+ "step": 1329
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.5099243388229495,
+ "learning_rate": 0.00034860416439370885,
+ "loss": 2.7495,
+ "step": 1330
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.5319499433274613,
+ "learning_rate": 0.0003478961681258674,
+ "loss": 2.7375,
+ "step": 1331
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.5339495623948575,
+ "learning_rate": 0.0003471885077733796,
+ "loss": 2.6261,
+ "step": 1332
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.5046021856215106,
+ "learning_rate": 0.0003464811848990859,
+ "loss": 2.7132,
+ "step": 1333
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.5003942937815565,
+ "learning_rate": 0.00034577420106508063,
+ "loss": 2.6119,
+ "step": 1334
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.5299897735958715,
+ "learning_rate": 0.0003450675578327105,
+ "loss": 2.7272,
+ "step": 1335
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.5343157362738072,
+ "learning_rate": 0.000344361256762569,
+ "loss": 2.6606,
+ "step": 1336
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.5244329880484088,
+ "learning_rate": 0.00034365529941449456,
+ "loss": 2.7595,
+ "step": 1337
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.49156570465653665,
+ "learning_rate": 0.0003429496873475664,
+ "loss": 2.6735,
+ "step": 1338
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.4999738343605983,
+ "learning_rate": 0.0003422444221201009,
+ "loss": 2.6461,
+ "step": 1339
+ },
+ {
+ "epoch": 0.61,
+ "grad_norm": 0.5233411077451025,
+ "learning_rate": 0.0003415395052896487,
+ "loss": 2.6833,
+ "step": 1340
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.5219460272096302,
+ "learning_rate": 0.0003408349384129912,
+ "loss": 2.6299,
+ "step": 1341
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.6077928523510566,
+ "learning_rate": 0.00034013072304613643,
+ "loss": 2.7924,
+ "step": 1342
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.5146843252289197,
+ "learning_rate": 0.00033942686074431674,
+ "loss": 2.7318,
+ "step": 1343
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.522018439418992,
+ "learning_rate": 0.0003387233530619843,
+ "loss": 2.6913,
+ "step": 1344
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.5101831099857319,
+ "learning_rate": 0.0003380202015528084,
+ "loss": 2.5946,
+ "step": 1345
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.46660384265215094,
+ "learning_rate": 0.0003373174077696715,
+ "loss": 2.6945,
+ "step": 1346
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.516312427764236,
+ "learning_rate": 0.0003366149732646661,
+ "loss": 2.6382,
+ "step": 1347
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.5095199457046187,
+ "learning_rate": 0.00033591289958909143,
+ "loss": 2.6688,
+ "step": 1348
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.4885245746572849,
+ "learning_rate": 0.00033521118829344954,
+ "loss": 2.5742,
+ "step": 1349
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.521895943365929,
+ "learning_rate": 0.0003345098409274423,
+ "loss": 2.7726,
+ "step": 1350
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.5113189581344126,
+ "learning_rate": 0.00033380885903996796,
+ "loss": 2.6056,
+ "step": 1351
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.5521376184530942,
+ "learning_rate": 0.00033310824417911766,
+ "loss": 2.6662,
+ "step": 1352
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.526853057352678,
+ "learning_rate": 0.00033240799789217184,
+ "loss": 2.7882,
+ "step": 1353
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.5413493777020469,
+ "learning_rate": 0.00033170812172559694,
+ "loss": 2.7082,
+ "step": 1354
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.5165695197844109,
+ "learning_rate": 0.000331008617225042,
+ "loss": 2.6602,
+ "step": 1355
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.5193692697977721,
+ "learning_rate": 0.0003303094859353355,
+ "loss": 2.6117,
+ "step": 1356
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.5255952757460676,
+ "learning_rate": 0.0003296107294004812,
+ "loss": 2.6354,
+ "step": 1357
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.5433918967408917,
+ "learning_rate": 0.0003289123491636559,
+ "loss": 2.5422,
+ "step": 1358
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.5341109023350894,
+ "learning_rate": 0.00032821434676720443,
+ "loss": 2.5804,
+ "step": 1359
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.529402305940971,
+ "learning_rate": 0.00032751672375263836,
+ "loss": 2.6891,
+ "step": 1360
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.5016619793424085,
+ "learning_rate": 0.0003268194816606305,
+ "loss": 2.6179,
+ "step": 1361
+ },
+ {
+ "epoch": 0.62,
+ "grad_norm": 0.5657375052996718,
+ "learning_rate": 0.00032612262203101267,
+ "loss": 2.6414,
+ "step": 1362
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.5422066378923954,
+ "learning_rate": 0.00032542614640277225,
+ "loss": 2.6076,
+ "step": 1363
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.5287488249105601,
+ "learning_rate": 0.0003247300563140481,
+ "loss": 2.6615,
+ "step": 1364
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.5819683575763082,
+ "learning_rate": 0.00032403435330212807,
+ "loss": 2.7259,
+ "step": 1365
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.5475986305843664,
+ "learning_rate": 0.00032333903890344515,
+ "loss": 2.686,
+ "step": 1366
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.4935288695787895,
+ "learning_rate": 0.00032264411465357333,
+ "loss": 2.5674,
+ "step": 1367
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.5262052352606102,
+ "learning_rate": 0.00032194958208722654,
+ "loss": 2.6771,
+ "step": 1368
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.5445183733396111,
+ "learning_rate": 0.00032125544273825204,
+ "loss": 2.7843,
+ "step": 1369
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.5238394369182566,
+ "learning_rate": 0.0003205616981396297,
+ "loss": 2.6502,
+ "step": 1370
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.4977683537022086,
+ "learning_rate": 0.00031986834982346713,
+ "loss": 2.7124,
+ "step": 1371
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.4874506234474769,
+ "learning_rate": 0.00031917539932099694,
+ "loss": 2.6795,
+ "step": 1372
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.5023626411399686,
+ "learning_rate": 0.00031848284816257336,
+ "loss": 2.7011,
+ "step": 1373
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.5426472166234935,
+ "learning_rate": 0.0003177906978776682,
+ "loss": 2.6814,
+ "step": 1374
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.5642880016533615,
+ "learning_rate": 0.0003170989499948683,
+ "loss": 2.7464,
+ "step": 1375
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.5157116294003709,
+ "learning_rate": 0.0003164076060418719,
+ "loss": 2.6379,
+ "step": 1376
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.5186909042468566,
+ "learning_rate": 0.000315716667545485,
+ "loss": 2.6998,
+ "step": 1377
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.4952085663608333,
+ "learning_rate": 0.00031502613603161836,
+ "loss": 2.6487,
+ "step": 1378
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.5850830319130214,
+ "learning_rate": 0.00031433601302528335,
+ "loss": 2.6798,
+ "step": 1379
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.5492048047397665,
+ "learning_rate": 0.00031364630005058995,
+ "loss": 2.627,
+ "step": 1380
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.5335190534299099,
+ "learning_rate": 0.0003129569986307422,
+ "loss": 2.6604,
+ "step": 1381
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.5066734226947467,
+ "learning_rate": 0.00031226811028803515,
+ "loss": 2.6743,
+ "step": 1382
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.5138738632470816,
+ "learning_rate": 0.00031157963654385173,
+ "loss": 2.638,
+ "step": 1383
+ },
+ {
+ "epoch": 0.63,
+ "grad_norm": 0.4956355925211338,
+ "learning_rate": 0.0003108915789186592,
+ "loss": 2.7389,
+ "step": 1384
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.5208069789695808,
+ "learning_rate": 0.00031020393893200604,
+ "loss": 2.7045,
+ "step": 1385
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.5316353965257978,
+ "learning_rate": 0.00030951671810251823,
+ "loss": 2.7065,
+ "step": 1386
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.5260349868498675,
+ "learning_rate": 0.0003088299179478959,
+ "loss": 2.6821,
+ "step": 1387
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.5042055701627023,
+ "learning_rate": 0.0003081435399849104,
+ "loss": 2.6329,
+ "step": 1388
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.564684890477677,
+ "learning_rate": 0.0003074575857294004,
+ "loss": 2.7557,
+ "step": 1389
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.5103677357724797,
+ "learning_rate": 0.0003067720566962691,
+ "loss": 2.6885,
+ "step": 1390
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.5740174553771336,
+ "learning_rate": 0.0003060869543994806,
+ "loss": 2.6182,
+ "step": 1391
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.5738561979214507,
+ "learning_rate": 0.0003054022803520562,
+ "loss": 2.6612,
+ "step": 1392
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.5372126198016173,
+ "learning_rate": 0.0003047180360660721,
+ "loss": 2.6364,
+ "step": 1393
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.501160716823567,
+ "learning_rate": 0.00030403422305265475,
+ "loss": 2.6712,
+ "step": 1394
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.5114866334109534,
+ "learning_rate": 0.0003033508428219785,
+ "loss": 2.6873,
+ "step": 1395
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.5232681840541096,
+ "learning_rate": 0.00030266789688326184,
+ "loss": 2.5691,
+ "step": 1396
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.5393814668125796,
+ "learning_rate": 0.00030198538674476393,
+ "loss": 2.6997,
+ "step": 1397
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.5967989026102972,
+ "learning_rate": 0.00030130331391378185,
+ "loss": 2.7028,
+ "step": 1398
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.5600477146014537,
+ "learning_rate": 0.0003006216798966468,
+ "loss": 2.763,
+ "step": 1399
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.5654385397756314,
+ "learning_rate": 0.00029994048619872034,
+ "loss": 2.693,
+ "step": 1400
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.5299226380316983,
+ "learning_rate": 0.0002992597343243927,
+ "loss": 2.6608,
+ "step": 1401
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.5332431168796034,
+ "learning_rate": 0.0002985794257770773,
+ "loss": 2.6744,
+ "step": 1402
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.5599907413920047,
+ "learning_rate": 0.0002978995620592092,
+ "loss": 2.7762,
+ "step": 1403
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.5259219784993198,
+ "learning_rate": 0.0002972201446722405,
+ "loss": 2.6704,
+ "step": 1404
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.5289110190907368,
+ "learning_rate": 0.00029654117511663803,
+ "loss": 2.6691,
+ "step": 1405
+ },
+ {
+ "epoch": 0.64,
+ "grad_norm": 0.5666789751206317,
+ "learning_rate": 0.0002958626548918795,
+ "loss": 2.6319,
+ "step": 1406
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.537924097368643,
+ "learning_rate": 0.00029518458549645014,
+ "loss": 2.6829,
+ "step": 1407
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.5250651072830079,
+ "learning_rate": 0.00029450696842783954,
+ "loss": 2.5617,
+ "step": 1408
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.5413987082913225,
+ "learning_rate": 0.00029382980518253865,
+ "loss": 2.6939,
+ "step": 1409
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.5198850495320728,
+ "learning_rate": 0.00029315309725603595,
+ "loss": 2.7114,
+ "step": 1410
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.5205726163557534,
+ "learning_rate": 0.00029247684614281446,
+ "loss": 2.6288,
+ "step": 1411
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.5066031300184763,
+ "learning_rate": 0.0002918010533363481,
+ "loss": 2.6293,
+ "step": 1412
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.5052016588684003,
+ "learning_rate": 0.0002911257203290987,
+ "loss": 2.6302,
+ "step": 1413
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.4830621053248146,
+ "learning_rate": 0.00029045084861251314,
+ "loss": 2.6059,
+ "step": 1414
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.5105135117228916,
+ "learning_rate": 0.00028977643967701897,
+ "loss": 2.7091,
+ "step": 1415
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.5181454402780247,
+ "learning_rate": 0.00028910249501202156,
+ "loss": 2.6829,
+ "step": 1416
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.49973317100856485,
+ "learning_rate": 0.00028842901610590165,
+ "loss": 2.5842,
+ "step": 1417
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.5073894080741461,
+ "learning_rate": 0.00028775600444601123,
+ "loss": 2.6679,
+ "step": 1418
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.5437132557995807,
+ "learning_rate": 0.00028708346151866973,
+ "loss": 2.6164,
+ "step": 1419
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.4985011262072265,
+ "learning_rate": 0.0002864113888091622,
+ "loss": 2.6482,
+ "step": 1420
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.526944767280008,
+ "learning_rate": 0.0002857397878017348,
+ "loss": 2.6875,
+ "step": 1421
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.5203838861769262,
+ "learning_rate": 0.00028506865997959173,
+ "loss": 2.6779,
+ "step": 1422
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.5102656749515754,
+ "learning_rate": 0.000284398006824893,
+ "loss": 2.5647,
+ "step": 1423
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.520395819567321,
+ "learning_rate": 0.00028372782981874963,
+ "loss": 2.6876,
+ "step": 1424
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.49829621726395185,
+ "learning_rate": 0.00028305813044122096,
+ "loss": 2.6435,
+ "step": 1425
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.5440936122894126,
+ "learning_rate": 0.0002823889101713122,
+ "loss": 2.6147,
+ "step": 1426
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.5764556440051147,
+ "learning_rate": 0.0002817201704869701,
+ "loss": 2.7322,
+ "step": 1427
+ },
+ {
+ "epoch": 0.65,
+ "grad_norm": 0.5362159401223764,
+ "learning_rate": 0.00028105191286508,
+ "loss": 2.6486,
+ "step": 1428
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.5466499533987904,
+ "learning_rate": 0.00028038413878146245,
+ "loss": 2.7046,
+ "step": 1429
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.5132498804391473,
+ "learning_rate": 0.00027971684971087073,
+ "loss": 2.6387,
+ "step": 1430
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.5150065927763824,
+ "learning_rate": 0.00027905004712698643,
+ "loss": 2.5806,
+ "step": 1431
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.5060318307730233,
+ "learning_rate": 0.0002783837325024167,
+ "loss": 2.6609,
+ "step": 1432
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.5175130911221689,
+ "learning_rate": 0.00027771790730869153,
+ "loss": 2.6201,
+ "step": 1433
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.5127763109143173,
+ "learning_rate": 0.0002770525730162599,
+ "loss": 2.6918,
+ "step": 1434
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.5101713640524365,
+ "learning_rate": 0.00027638773109448645,
+ "loss": 2.6363,
+ "step": 1435
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.4977353453449889,
+ "learning_rate": 0.00027572338301164824,
+ "loss": 2.6749,
+ "step": 1436
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.5152001930798664,
+ "learning_rate": 0.0002750595302349324,
+ "loss": 2.7359,
+ "step": 1437
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.5418196502338989,
+ "learning_rate": 0.00027439617423043145,
+ "loss": 2.6405,
+ "step": 1438
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.5202765834346041,
+ "learning_rate": 0.00027373331646314114,
+ "loss": 2.7226,
+ "step": 1439
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.5397980586715199,
+ "learning_rate": 0.0002730709583969572,
+ "loss": 2.6664,
+ "step": 1440
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.5043474054028523,
+ "learning_rate": 0.0002724091014946711,
+ "loss": 2.5438,
+ "step": 1441
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.4946802793438189,
+ "learning_rate": 0.00027174774721796824,
+ "loss": 2.6366,
+ "step": 1442
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.5417840765906496,
+ "learning_rate": 0.0002710868970274232,
+ "loss": 2.6567,
+ "step": 1443
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.5489253642537957,
+ "learning_rate": 0.0002704265523824982,
+ "loss": 2.59,
+ "step": 1444
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.5803818513629694,
+ "learning_rate": 0.00026976671474153826,
+ "loss": 2.5948,
+ "step": 1445
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.5517370509149084,
+ "learning_rate": 0.00026910738556176886,
+ "loss": 2.7432,
+ "step": 1446
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.5135361314804905,
+ "learning_rate": 0.0002684485662992929,
+ "loss": 2.6115,
+ "step": 1447
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.5337181086537189,
+ "learning_rate": 0.0002677902584090869,
+ "loss": 2.5381,
+ "step": 1448
+ },
+ {
+ "epoch": 0.66,
+ "grad_norm": 0.5610967536723442,
+ "learning_rate": 0.00026713246334499774,
+ "loss": 2.696,
+ "step": 1449
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.564343060737695,
+ "learning_rate": 0.00026647518255974023,
+ "loss": 2.5733,
+ "step": 1450
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.5527781937344152,
+ "learning_rate": 0.0002658184175048934,
+ "loss": 2.6563,
+ "step": 1451
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.5421097229014369,
+ "learning_rate": 0.00026516216963089694,
+ "loss": 2.6844,
+ "step": 1452
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.5471746344692654,
+ "learning_rate": 0.0002645064403870488,
+ "loss": 2.6965,
+ "step": 1453
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.5376147689259011,
+ "learning_rate": 0.0002638512312215011,
+ "loss": 2.6935,
+ "step": 1454
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.5189339406840692,
+ "learning_rate": 0.0002631965435812575,
+ "loss": 2.6773,
+ "step": 1455
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.5518657545246427,
+ "learning_rate": 0.00026254237891217046,
+ "loss": 2.7785,
+ "step": 1456
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.5298639828296662,
+ "learning_rate": 0.0002618887386589367,
+ "loss": 2.6014,
+ "step": 1457
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.510199465396997,
+ "learning_rate": 0.0002612356242650949,
+ "loss": 2.6573,
+ "step": 1458
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.5036444576831944,
+ "learning_rate": 0.0002605830371730229,
+ "loss": 2.6739,
+ "step": 1459
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.5542983995400107,
+ "learning_rate": 0.0002599309788239339,
+ "loss": 2.6603,
+ "step": 1460
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.5092107088269644,
+ "learning_rate": 0.00025927945065787306,
+ "loss": 2.6108,
+ "step": 1461
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.5064272689216358,
+ "learning_rate": 0.0002586284541137145,
+ "loss": 2.6153,
+ "step": 1462
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.5180283883834162,
+ "learning_rate": 0.00025797799062915905,
+ "loss": 2.5744,
+ "step": 1463
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.5055843608166704,
+ "learning_rate": 0.00025732806164072966,
+ "loss": 2.6848,
+ "step": 1464
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.5185821371538296,
+ "learning_rate": 0.00025667866858376874,
+ "loss": 2.5959,
+ "step": 1465
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.5176633431516448,
+ "learning_rate": 0.0002560298128924358,
+ "loss": 2.6282,
+ "step": 1466
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.5273843104219313,
+ "learning_rate": 0.0002553814959997032,
+ "loss": 2.7327,
+ "step": 1467
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.5711054615423432,
+ "learning_rate": 0.00025473371933735334,
+ "loss": 2.6961,
+ "step": 1468
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.5383704906387032,
+ "learning_rate": 0.00025408648433597534,
+ "loss": 2.6674,
+ "step": 1469
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.5427733271080686,
+ "learning_rate": 0.00025343979242496283,
+ "loss": 2.6195,
+ "step": 1470
+ },
+ {
+ "epoch": 0.67,
+ "grad_norm": 0.5365929833764801,
+ "learning_rate": 0.00025279364503250925,
+ "loss": 2.6737,
+ "step": 1471
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.5115049533560345,
+ "learning_rate": 0.0002521480435856056,
+ "loss": 2.6393,
+ "step": 1472
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.5193419047301462,
+ "learning_rate": 0.0002515029895100378,
+ "loss": 2.5748,
+ "step": 1473
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.5349163495852879,
+ "learning_rate": 0.0002508584842303822,
+ "loss": 2.664,
+ "step": 1474
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.5231080571453198,
+ "learning_rate": 0.0002502145291700038,
+ "loss": 2.5552,
+ "step": 1475
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.5680389782159602,
+ "learning_rate": 0.0002495711257510517,
+ "loss": 2.7241,
+ "step": 1476
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.5002819969270994,
+ "learning_rate": 0.0002489282753944575,
+ "loss": 2.5844,
+ "step": 1477
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.5412066763071609,
+ "learning_rate": 0.00024828597951993093,
+ "loss": 2.6433,
+ "step": 1478
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.5676842560351678,
+ "learning_rate": 0.00024764423954595706,
+ "loss": 2.6848,
+ "step": 1479
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.5906613736610669,
+ "learning_rate": 0.0002470030568897938,
+ "loss": 2.6737,
+ "step": 1480
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.5673379624383489,
+ "learning_rate": 0.00024636243296746773,
+ "loss": 2.6745,
+ "step": 1481
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.5206353450569106,
+ "learning_rate": 0.0002457223691937716,
+ "loss": 2.6633,
+ "step": 1482
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.5160386604266843,
+ "learning_rate": 0.0002450828669822613,
+ "loss": 2.6439,
+ "step": 1483
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.5533087453045891,
+ "learning_rate": 0.00024444392774525253,
+ "loss": 2.7156,
+ "step": 1484
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.5465306666046615,
+ "learning_rate": 0.00024380555289381733,
+ "loss": 2.5933,
+ "step": 1485
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.542473365369578,
+ "learning_rate": 0.00024316774383778184,
+ "loss": 2.6647,
+ "step": 1486
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.5237270013241679,
+ "learning_rate": 0.0002425305019857222,
+ "loss": 2.6483,
+ "step": 1487
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.5193715169276047,
+ "learning_rate": 0.00024189382874496184,
+ "loss": 2.6638,
+ "step": 1488
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.5897079369245194,
+ "learning_rate": 0.00024125772552156916,
+ "loss": 2.7208,
+ "step": 1489
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.5139682715458571,
+ "learning_rate": 0.00024062219372035292,
+ "loss": 2.6786,
+ "step": 1490
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.5179543000300554,
+ "learning_rate": 0.00023998723474486007,
+ "loss": 2.6111,
+ "step": 1491
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.5347407856165681,
+ "learning_rate": 0.00023935284999737272,
+ "loss": 2.6616,
+ "step": 1492
+ },
+ {
+ "epoch": 0.68,
+ "grad_norm": 0.5272279684299376,
+ "learning_rate": 0.00023871904087890505,
+ "loss": 2.7029,
+ "step": 1493
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.5234490480223335,
+ "learning_rate": 0.00023808580878919945,
+ "loss": 2.6136,
+ "step": 1494
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.5582945697087572,
+ "learning_rate": 0.00023745315512672398,
+ "loss": 2.7117,
+ "step": 1495
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.5231832324951284,
+ "learning_rate": 0.0002368210812886698,
+ "loss": 2.6618,
+ "step": 1496
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.5616947979265463,
+ "learning_rate": 0.0002361895886709471,
+ "loss": 2.6816,
+ "step": 1497
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.5185055761386351,
+ "learning_rate": 0.0002355586786681823,
+ "loss": 2.608,
+ "step": 1498
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.539317601724139,
+ "learning_rate": 0.00023492835267371575,
+ "loss": 2.6858,
+ "step": 1499
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.5345901343877524,
+ "learning_rate": 0.0002342986120795978,
+ "loss": 2.6481,
+ "step": 1500
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.5122342904163094,
+ "learning_rate": 0.0002336694582765857,
+ "loss": 2.5651,
+ "step": 1501
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.5196213534016761,
+ "learning_rate": 0.00023304089265414085,
+ "loss": 2.6116,
+ "step": 1502
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.5155030891507187,
+ "learning_rate": 0.00023241291660042613,
+ "loss": 2.6156,
+ "step": 1503
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.5729630278348291,
+ "learning_rate": 0.00023178553150230186,
+ "loss": 2.7175,
+ "step": 1504
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.5594060944325331,
+ "learning_rate": 0.00023115873874532324,
+ "loss": 2.713,
+ "step": 1505
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.5277105104636358,
+ "learning_rate": 0.00023053253971373796,
+ "loss": 2.654,
+ "step": 1506
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.5162148271146483,
+ "learning_rate": 0.00022990693579048166,
+ "loss": 2.5876,
+ "step": 1507
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.5300984993351018,
+ "learning_rate": 0.00022928192835717644,
+ "loss": 2.602,
+ "step": 1508
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.513353442934914,
+ "learning_rate": 0.00022865751879412634,
+ "loss": 2.676,
+ "step": 1509
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.5097239768653745,
+ "learning_rate": 0.00022803370848031585,
+ "loss": 2.6202,
+ "step": 1510
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.5456174738564598,
+ "learning_rate": 0.00022741049879340542,
+ "loss": 2.6358,
+ "step": 1511
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.5484806040785072,
+ "learning_rate": 0.00022678789110972897,
+ "loss": 2.5856,
+ "step": 1512
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.5546216675903627,
+ "learning_rate": 0.00022616588680429155,
+ "loss": 2.6514,
+ "step": 1513
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.5617218968324993,
+ "learning_rate": 0.00022554448725076526,
+ "loss": 2.7548,
+ "step": 1514
+ },
+ {
+ "epoch": 0.69,
+ "grad_norm": 0.569613187708911,
+ "learning_rate": 0.0002249236938214863,
+ "loss": 2.5341,
+ "step": 1515
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.5501106307012626,
+ "learning_rate": 0.00022430350788745296,
+ "loss": 2.5603,
+ "step": 1516
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.5390219955294513,
+ "learning_rate": 0.00022368393081832166,
+ "loss": 2.69,
+ "step": 1517
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.5897811537094955,
+ "learning_rate": 0.00022306496398240383,
+ "loss": 2.6767,
+ "step": 1518
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.5347395112834396,
+ "learning_rate": 0.00022244660874666373,
+ "loss": 2.5479,
+ "step": 1519
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.532099024339146,
+ "learning_rate": 0.00022182886647671452,
+ "loss": 2.6148,
+ "step": 1520
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.5197008526818121,
+ "learning_rate": 0.0002212117385368157,
+ "loss": 2.5834,
+ "step": 1521
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.525993286414334,
+ "learning_rate": 0.00022059522628987038,
+ "loss": 2.7812,
+ "step": 1522
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.5283498413141905,
+ "learning_rate": 0.00021997933109742162,
+ "loss": 2.6849,
+ "step": 1523
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.5050910637464837,
+ "learning_rate": 0.00021936405431964969,
+ "loss": 2.5724,
+ "step": 1524
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.5250402825442426,
+ "learning_rate": 0.00021874939731536926,
+ "loss": 2.6456,
+ "step": 1525
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.5262360172728192,
+ "learning_rate": 0.00021813536144202656,
+ "loss": 2.6649,
+ "step": 1526
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.5386503618407155,
+ "learning_rate": 0.00021752194805569553,
+ "loss": 2.6524,
+ "step": 1527
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.5217466893978039,
+ "learning_rate": 0.0002169091585110754,
+ "loss": 2.6671,
+ "step": 1528
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.5400829363920241,
+ "learning_rate": 0.00021629699416148828,
+ "loss": 2.6783,
+ "step": 1529
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.5294514013487223,
+ "learning_rate": 0.000215685456358875,
+ "loss": 2.6293,
+ "step": 1530
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.5694555506648116,
+ "learning_rate": 0.00021507454645379258,
+ "loss": 2.5874,
+ "step": 1531
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.5265427964792164,
+ "learning_rate": 0.00021446426579541184,
+ "loss": 2.6416,
+ "step": 1532
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.5149494502222681,
+ "learning_rate": 0.00021385461573151387,
+ "loss": 2.6493,
+ "step": 1533
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.5832936251131656,
+ "learning_rate": 0.00021324559760848677,
+ "loss": 2.7218,
+ "step": 1534
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.5657731206716266,
+ "learning_rate": 0.00021263721277132303,
+ "loss": 2.5902,
+ "step": 1535
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.5242313701067509,
+ "learning_rate": 0.0002120294625636171,
+ "loss": 2.5691,
+ "step": 1536
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 0.5433520498868684,
+ "learning_rate": 0.0002114223483275613,
+ "loss": 2.6662,
+ "step": 1537
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.5541520795803548,
+ "learning_rate": 0.0002108158714039435,
+ "loss": 2.5679,
+ "step": 1538
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.5920854297181153,
+ "learning_rate": 0.00021021003313214455,
+ "loss": 2.6702,
+ "step": 1539
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.5346900369551251,
+ "learning_rate": 0.00020960483485013432,
+ "loss": 2.6517,
+ "step": 1540
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.5643621486573619,
+ "learning_rate": 0.0002090002778944694,
+ "loss": 2.6152,
+ "step": 1541
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.567044693530176,
+ "learning_rate": 0.00020839636360029025,
+ "loss": 2.6198,
+ "step": 1542
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.577927786948691,
+ "learning_rate": 0.00020779309330131818,
+ "loss": 2.5774,
+ "step": 1543
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.5719137133497179,
+ "learning_rate": 0.00020719046832985184,
+ "loss": 2.8123,
+ "step": 1544
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.5180931132175836,
+ "learning_rate": 0.0002065884900167646,
+ "loss": 2.6041,
+ "step": 1545
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.5224195494034202,
+ "learning_rate": 0.0002059871596915024,
+ "loss": 2.6042,
+ "step": 1546
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.5288820415224451,
+ "learning_rate": 0.0002053864786820795,
+ "loss": 2.7324,
+ "step": 1547
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.48909132475895206,
+ "learning_rate": 0.00020478644831507627,
+ "loss": 2.6326,
+ "step": 1548
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.5264829559344815,
+ "learning_rate": 0.00020418706991563634,
+ "loss": 2.6185,
+ "step": 1549
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.5184649457276234,
+ "learning_rate": 0.00020358834480746363,
+ "loss": 2.5696,
+ "step": 1550
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.4890895726634323,
+ "learning_rate": 0.0002029902743128188,
+ "loss": 2.5935,
+ "step": 1551
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.5073768980879371,
+ "learning_rate": 0.0002023928597525174,
+ "loss": 2.6385,
+ "step": 1552
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.5276495283221572,
+ "learning_rate": 0.00020179610244592595,
+ "loss": 2.7057,
+ "step": 1553
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.5165094433644738,
+ "learning_rate": 0.00020120000371095937,
+ "loss": 2.6762,
+ "step": 1554
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.518793887619729,
+ "learning_rate": 0.0002006045648640787,
+ "loss": 2.7091,
+ "step": 1555
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.5246544971119942,
+ "learning_rate": 0.00020000978722028713,
+ "loss": 2.6711,
+ "step": 1556
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.5215130139384636,
+ "learning_rate": 0.00019941567209312767,
+ "loss": 2.5976,
+ "step": 1557
+ },
+ {
+ "epoch": 0.71,
+ "grad_norm": 0.5463917375285926,
+ "learning_rate": 0.00019882222079468036,
+ "loss": 2.6617,
+ "step": 1558
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.543868774564057,
+ "learning_rate": 0.0001982294346355595,
+ "loss": 2.7018,
+ "step": 1559
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.5378778964625525,
+ "learning_rate": 0.00019763731492490976,
+ "loss": 2.7008,
+ "step": 1560
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.5423401893488466,
+ "learning_rate": 0.00019704586297040422,
+ "loss": 2.6772,
+ "step": 1561
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.5332443738305217,
+ "learning_rate": 0.0001964550800782417,
+ "loss": 2.666,
+ "step": 1562
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.5444301665790526,
+ "learning_rate": 0.00019586496755314288,
+ "loss": 2.6345,
+ "step": 1563
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.513558461810192,
+ "learning_rate": 0.00019527552669834798,
+ "loss": 2.6318,
+ "step": 1564
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.49982186312761545,
+ "learning_rate": 0.0001946867588156142,
+ "loss": 2.5932,
+ "step": 1565
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.5055236621814083,
+ "learning_rate": 0.00019409866520521258,
+ "loss": 2.5897,
+ "step": 1566
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.516065270746231,
+ "learning_rate": 0.00019351124716592455,
+ "loss": 2.7165,
+ "step": 1567
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.5277570504491967,
+ "learning_rate": 0.0001929245059950397,
+ "loss": 2.6549,
+ "step": 1568
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.5171019803008386,
+ "learning_rate": 0.0001923384429883533,
+ "loss": 2.6035,
+ "step": 1569
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.5262507808011542,
+ "learning_rate": 0.00019175305944016237,
+ "loss": 2.6037,
+ "step": 1570
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.5153470443332753,
+ "learning_rate": 0.00019116835664326326,
+ "loss": 2.6733,
+ "step": 1571
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.523730142526998,
+ "learning_rate": 0.0001905843358889497,
+ "loss": 2.5972,
+ "step": 1572
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.4923266765926923,
+ "learning_rate": 0.00019000099846700836,
+ "loss": 2.5601,
+ "step": 1573
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.5457293917465317,
+ "learning_rate": 0.00018941834566571692,
+ "loss": 2.6832,
+ "step": 1574
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.5068475300401649,
+ "learning_rate": 0.00018883637877184145,
+ "loss": 2.6852,
+ "step": 1575
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.5483264256408378,
+ "learning_rate": 0.00018825509907063325,
+ "loss": 2.6164,
+ "step": 1576
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.5400015884979669,
+ "learning_rate": 0.00018767450784582557,
+ "loss": 2.679,
+ "step": 1577
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.5462849398792158,
+ "learning_rate": 0.00018709460637963122,
+ "loss": 2.6862,
+ "step": 1578
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.535656024635467,
+ "learning_rate": 0.00018651539595274013,
+ "loss": 2.6039,
+ "step": 1579
+ },
+ {
+ "epoch": 0.72,
+ "grad_norm": 0.545470749810151,
+ "learning_rate": 0.00018593687784431578,
+ "loss": 2.5927,
+ "step": 1580
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.5654794153265718,
+ "learning_rate": 0.00018535905333199248,
+ "loss": 2.6115,
+ "step": 1581
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.5442121995362399,
+ "learning_rate": 0.0001847819236918733,
+ "loss": 2.654,
+ "step": 1582
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.5361455297166935,
+ "learning_rate": 0.00018420549019852655,
+ "loss": 2.6771,
+ "step": 1583
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.49199618672352247,
+ "learning_rate": 0.00018362975412498266,
+ "loss": 2.5546,
+ "step": 1584
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.5379377343184183,
+ "learning_rate": 0.00018305471674273261,
+ "loss": 2.645,
+ "step": 1585
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.5514526754304316,
+ "learning_rate": 0.0001824803793217237,
+ "loss": 2.6903,
+ "step": 1586
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.5092193066921264,
+ "learning_rate": 0.00018190674313035737,
+ "loss": 2.5942,
+ "step": 1587
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.4963657698046657,
+ "learning_rate": 0.00018133380943548716,
+ "loss": 2.6365,
+ "step": 1588
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.5385601164738788,
+ "learning_rate": 0.00018076157950241452,
+ "loss": 2.6713,
+ "step": 1589
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.5116368126754929,
+ "learning_rate": 0.00018019005459488652,
+ "loss": 2.5941,
+ "step": 1590
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.5150258847864664,
+ "learning_rate": 0.00017961923597509388,
+ "loss": 2.5683,
+ "step": 1591
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.5031381095040823,
+ "learning_rate": 0.00017904912490366722,
+ "loss": 2.6515,
+ "step": 1592
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.5371636510106691,
+ "learning_rate": 0.00017847972263967433,
+ "loss": 2.6688,
+ "step": 1593
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.5366368195190238,
+ "learning_rate": 0.0001779110304406177,
+ "loss": 2.4204,
+ "step": 1594
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.5265619086921874,
+ "learning_rate": 0.000177343049562432,
+ "loss": 2.7382,
+ "step": 1595
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.5466934167400589,
+ "learning_rate": 0.0001767757812594807,
+ "loss": 2.6928,
+ "step": 1596
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.5968912123524217,
+ "learning_rate": 0.0001762092267845534,
+ "loss": 2.6484,
+ "step": 1597
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.5364204205129771,
+ "learning_rate": 0.00017564338738886365,
+ "loss": 2.6145,
+ "step": 1598
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.5332747017716918,
+ "learning_rate": 0.0001750782643220457,
+ "loss": 2.676,
+ "step": 1599
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.5646977329788221,
+ "learning_rate": 0.00017451385883215166,
+ "loss": 2.6644,
+ "step": 1600
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.5213871071379184,
+ "learning_rate": 0.00017395017216564863,
+ "loss": 2.6017,
+ "step": 1601
+ },
+ {
+ "epoch": 0.73,
+ "grad_norm": 0.5007839433276626,
+ "learning_rate": 0.00017338720556741687,
+ "loss": 2.6291,
+ "step": 1602
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.5590048493859461,
+ "learning_rate": 0.00017282496028074606,
+ "loss": 2.6167,
+ "step": 1603
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.5066745325111831,
+ "learning_rate": 0.00017226343754733254,
+ "loss": 2.6141,
+ "step": 1604
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.5422014666870855,
+ "learning_rate": 0.00017170263860727769,
+ "loss": 2.7176,
+ "step": 1605
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.5425212472023727,
+ "learning_rate": 0.0001711425646990838,
+ "loss": 2.6119,
+ "step": 1606
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.5272280508735294,
+ "learning_rate": 0.00017058321705965202,
+ "loss": 2.6717,
+ "step": 1607
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.5336839049554646,
+ "learning_rate": 0.0001700245969242798,
+ "loss": 2.6205,
+ "step": 1608
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.5482261027882128,
+ "learning_rate": 0.00016946670552665804,
+ "loss": 2.6019,
+ "step": 1609
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.5472390313948575,
+ "learning_rate": 0.00016890954409886795,
+ "loss": 2.7677,
+ "step": 1610
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.5642024499297316,
+ "learning_rate": 0.00016835311387137836,
+ "loss": 2.5689,
+ "step": 1611
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.5748762267918953,
+ "learning_rate": 0.0001677974160730441,
+ "loss": 2.7319,
+ "step": 1612
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.5274177155566764,
+ "learning_rate": 0.00016724245193110176,
+ "loss": 2.5966,
+ "step": 1613
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.5503209534240237,
+ "learning_rate": 0.00016668822267116784,
+ "loss": 2.6217,
+ "step": 1614
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.5626151078416814,
+ "learning_rate": 0.00016613472951723597,
+ "loss": 2.6272,
+ "step": 1615
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.55445741179899,
+ "learning_rate": 0.00016558197369167434,
+ "loss": 2.6675,
+ "step": 1616
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.5619264941914996,
+ "learning_rate": 0.00016502995641522216,
+ "loss": 2.6902,
+ "step": 1617
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.5110522581709123,
+ "learning_rate": 0.00016447867890698843,
+ "loss": 2.5981,
+ "step": 1618
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.5405115968834316,
+ "learning_rate": 0.00016392814238444753,
+ "loss": 2.6154,
+ "step": 1619
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.5362386395477635,
+ "learning_rate": 0.00016337834806343782,
+ "loss": 2.6145,
+ "step": 1620
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.5562726061697222,
+ "learning_rate": 0.0001628292971581588,
+ "loss": 2.6752,
+ "step": 1621
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.5138901371850961,
+ "learning_rate": 0.00016228099088116772,
+ "loss": 2.6545,
+ "step": 1622
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.5240736483676336,
+ "learning_rate": 0.00016173343044337734,
+ "loss": 2.6586,
+ "step": 1623
+ },
+ {
+ "epoch": 0.74,
+ "grad_norm": 0.5322491046086423,
+ "learning_rate": 0.00016118661705405356,
+ "loss": 2.6537,
+ "step": 1624
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.5244359554006681,
+ "learning_rate": 0.00016064055192081255,
+ "loss": 2.5796,
+ "step": 1625
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.5680219325603981,
+ "learning_rate": 0.00016009523624961757,
+ "loss": 2.6471,
+ "step": 1626
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.5740940996864791,
+ "learning_rate": 0.00015955067124477678,
+ "loss": 2.6028,
+ "step": 1627
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.6023190021050099,
+ "learning_rate": 0.000159006858108941,
+ "loss": 2.6125,
+ "step": 1628
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.5830735261622197,
+ "learning_rate": 0.00015846379804310002,
+ "loss": 2.6765,
+ "step": 1629
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.5913668041474787,
+ "learning_rate": 0.00015792149224658054,
+ "loss": 2.6482,
+ "step": 1630
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.5261665924200206,
+ "learning_rate": 0.00015737994191704385,
+ "loss": 2.6442,
+ "step": 1631
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.5298900785738464,
+ "learning_rate": 0.0001568391482504829,
+ "loss": 2.6429,
+ "step": 1632
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.5719526933494885,
+ "learning_rate": 0.00015629911244121903,
+ "loss": 2.5693,
+ "step": 1633
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.5222692889586663,
+ "learning_rate": 0.0001557598356819,
+ "loss": 2.6491,
+ "step": 1634
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.5867055605998461,
+ "learning_rate": 0.00015522131916349786,
+ "loss": 2.7019,
+ "step": 1635
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.5294334833962776,
+ "learning_rate": 0.00015468356407530493,
+ "loss": 2.7075,
+ "step": 1636
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.5359405824538117,
+ "learning_rate": 0.00015414657160493217,
+ "loss": 2.7096,
+ "step": 1637
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.5490111826525037,
+ "learning_rate": 0.00015361034293830673,
+ "loss": 2.5614,
+ "step": 1638
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.5137240563860516,
+ "learning_rate": 0.00015307487925966844,
+ "loss": 2.5864,
+ "step": 1639
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.5281239830520816,
+ "learning_rate": 0.00015254018175156776,
+ "loss": 2.5691,
+ "step": 1640
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.5407841898908815,
+ "learning_rate": 0.0001520062515948632,
+ "loss": 2.645,
+ "step": 1641
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.5712122303596,
+ "learning_rate": 0.0001514730899687189,
+ "loss": 2.6661,
+ "step": 1642
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.5419987346279507,
+ "learning_rate": 0.00015094069805060122,
+ "loss": 2.5803,
+ "step": 1643
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.5503105457045007,
+ "learning_rate": 0.00015040907701627666,
+ "loss": 2.7235,
+ "step": 1644
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.5680547403546333,
+ "learning_rate": 0.00014987822803980976,
+ "loss": 2.6256,
+ "step": 1645
+ },
+ {
+ "epoch": 0.75,
+ "grad_norm": 0.5701688762099579,
+ "learning_rate": 0.00014934815229355965,
+ "loss": 2.6727,
+ "step": 1646
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.563042871960405,
+ "learning_rate": 0.00014881885094817748,
+ "loss": 2.683,
+ "step": 1647
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.5815451375342733,
+ "learning_rate": 0.00014829032517260488,
+ "loss": 2.6041,
+ "step": 1648
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.5671998478554637,
+ "learning_rate": 0.0001477625761340704,
+ "loss": 2.6868,
+ "step": 1649
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.545143664786361,
+ "learning_rate": 0.0001472356049980868,
+ "loss": 2.5912,
+ "step": 1650
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.5517594197395428,
+ "learning_rate": 0.00014670941292844954,
+ "loss": 2.6507,
+ "step": 1651
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.5344167945944867,
+ "learning_rate": 0.00014618400108723295,
+ "loss": 2.5732,
+ "step": 1652
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.5684446666618682,
+ "learning_rate": 0.00014565937063478862,
+ "loss": 2.6814,
+ "step": 1653
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.5359949038414973,
+ "learning_rate": 0.00014513552272974207,
+ "loss": 2.6375,
+ "step": 1654
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.5476790225417992,
+ "learning_rate": 0.0001446124585289913,
+ "loss": 2.6441,
+ "step": 1655
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.538946989560625,
+ "learning_rate": 0.00014409017918770266,
+ "loss": 2.7112,
+ "step": 1656
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.5597086298018595,
+ "learning_rate": 0.00014356868585930994,
+ "loss": 2.6678,
+ "step": 1657
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.5441589345574198,
+ "learning_rate": 0.00014304797969551077,
+ "loss": 2.5752,
+ "step": 1658
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.5686067425585911,
+ "learning_rate": 0.00014252806184626417,
+ "loss": 2.6465,
+ "step": 1659
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.5466099142902209,
+ "learning_rate": 0.00014200893345978817,
+ "loss": 2.6135,
+ "step": 1660
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.5518992032124671,
+ "learning_rate": 0.00014149059568255778,
+ "loss": 2.647,
+ "step": 1661
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.5572770672745526,
+ "learning_rate": 0.00014097304965930157,
+ "loss": 2.5774,
+ "step": 1662
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.5276082705791915,
+ "learning_rate": 0.00014045629653299953,
+ "loss": 2.5275,
+ "step": 1663
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.5146435268229343,
+ "learning_rate": 0.00013994033744488076,
+ "loss": 2.6819,
+ "step": 1664
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.5213038004858518,
+ "learning_rate": 0.00013942517353442092,
+ "loss": 2.6667,
+ "step": 1665
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.5226528277689608,
+ "learning_rate": 0.0001389108059393391,
+ "loss": 2.6502,
+ "step": 1666
+ },
+ {
+ "epoch": 0.76,
+ "grad_norm": 0.5412570142869617,
+ "learning_rate": 0.00013839723579559581,
+ "loss": 2.6226,
+ "step": 1667
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.5344368429361485,
+ "learning_rate": 0.00013788446423739103,
+ "loss": 2.5949,
+ "step": 1668
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.5451804398916766,
+ "learning_rate": 0.00013737249239716042,
+ "loss": 2.6875,
+ "step": 1669
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.5491917614718567,
+ "learning_rate": 0.00013686132140557355,
+ "loss": 2.6121,
+ "step": 1670
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.570343125937439,
+ "learning_rate": 0.00013635095239153188,
+ "loss": 2.5425,
+ "step": 1671
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.5175849049558212,
+ "learning_rate": 0.00013584138648216527,
+ "loss": 2.5849,
+ "step": 1672
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.5425256919294746,
+ "learning_rate": 0.0001353326248028298,
+ "loss": 2.6148,
+ "step": 1673
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.5463945037322986,
+ "learning_rate": 0.00013482466847710594,
+ "loss": 2.5646,
+ "step": 1674
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.5876310619474768,
+ "learning_rate": 0.00013431751862679554,
+ "loss": 2.6538,
+ "step": 1675
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.5908732469594689,
+ "learning_rate": 0.00013381117637191887,
+ "loss": 2.6319,
+ "step": 1676
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.5535024471658077,
+ "learning_rate": 0.00013330564283071293,
+ "loss": 2.6389,
+ "step": 1677
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.5643002589392547,
+ "learning_rate": 0.000132800919119629,
+ "loss": 2.5598,
+ "step": 1678
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.5585889490701113,
+ "learning_rate": 0.00013229700635332948,
+ "loss": 2.6606,
+ "step": 1679
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.5707670251319692,
+ "learning_rate": 0.00013179390564468585,
+ "loss": 2.6027,
+ "step": 1680
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.5366146986324742,
+ "learning_rate": 0.00013129161810477641,
+ "loss": 2.6122,
+ "step": 1681
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.5239223265856179,
+ "learning_rate": 0.0001307901448428837,
+ "loss": 2.5756,
+ "step": 1682
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.5243083793090889,
+ "learning_rate": 0.0001302894869664916,
+ "loss": 2.5856,
+ "step": 1683
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.5397573897008999,
+ "learning_rate": 0.00012978964558128336,
+ "loss": 2.619,
+ "step": 1684
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.5284857990679738,
+ "learning_rate": 0.00012929062179113925,
+ "loss": 2.6086,
+ "step": 1685
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.5664310993763689,
+ "learning_rate": 0.00012879241669813368,
+ "loss": 2.6429,
+ "step": 1686
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.5426754049447348,
+ "learning_rate": 0.00012829503140253295,
+ "loss": 2.7112,
+ "step": 1687
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.5348538821526615,
+ "learning_rate": 0.0001277984670027933,
+ "loss": 2.6067,
+ "step": 1688
+ },
+ {
+ "epoch": 0.77,
+ "grad_norm": 0.5569987614416546,
+ "learning_rate": 0.00012730272459555737,
+ "loss": 2.6706,
+ "step": 1689
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.5237205982167247,
+ "learning_rate": 0.00012680780527565312,
+ "loss": 2.5507,
+ "step": 1690
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.5409291958263605,
+ "learning_rate": 0.0001263137101360905,
+ "loss": 2.6481,
+ "step": 1691
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.5234761335877628,
+ "learning_rate": 0.00012582044026805922,
+ "loss": 2.6164,
+ "step": 1692
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.5373827268209918,
+ "learning_rate": 0.00012532799676092627,
+ "loss": 2.5859,
+ "step": 1693
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.532880211848324,
+ "learning_rate": 0.00012483638070223414,
+ "loss": 2.6099,
+ "step": 1694
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.5557804437799018,
+ "learning_rate": 0.00012434559317769752,
+ "loss": 2.515,
+ "step": 1695
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.5577247470692999,
+ "learning_rate": 0.0001238556352712012,
+ "loss": 2.6751,
+ "step": 1696
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.5665113806272568,
+ "learning_rate": 0.00012336650806479827,
+ "loss": 2.6865,
+ "step": 1697
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.5628909625696256,
+ "learning_rate": 0.00012287821263870708,
+ "loss": 2.6627,
+ "step": 1698
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.5382150662864119,
+ "learning_rate": 0.00012239075007130885,
+ "loss": 2.6005,
+ "step": 1699
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.52754821709818,
+ "learning_rate": 0.00012190412143914536,
+ "loss": 2.7571,
+ "step": 1700
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.5683479833833636,
+ "learning_rate": 0.0001214183278169172,
+ "loss": 2.6982,
+ "step": 1701
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.5125528282902689,
+ "learning_rate": 0.00012093337027748042,
+ "loss": 2.609,
+ "step": 1702
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.5451802392484422,
+ "learning_rate": 0.00012044924989184459,
+ "loss": 2.7034,
+ "step": 1703
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.5178563483276967,
+ "learning_rate": 0.0001199659677291709,
+ "loss": 2.5884,
+ "step": 1704
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.5642323984557994,
+ "learning_rate": 0.00011948352485676895,
+ "loss": 2.6054,
+ "step": 1705
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.5654319795204232,
+ "learning_rate": 0.00011900192234009477,
+ "loss": 2.7164,
+ "step": 1706
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.5563588698788585,
+ "learning_rate": 0.00011852116124274875,
+ "loss": 2.6106,
+ "step": 1707
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.5395462475560304,
+ "learning_rate": 0.00011804124262647314,
+ "loss": 2.619,
+ "step": 1708
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.5550487837993278,
+ "learning_rate": 0.00011756216755114929,
+ "loss": 2.546,
+ "step": 1709
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.539296691819172,
+ "learning_rate": 0.00011708393707479548,
+ "loss": 2.643,
+ "step": 1710
+ },
+ {
+ "epoch": 0.78,
+ "grad_norm": 0.5447409826922939,
+ "learning_rate": 0.00011660655225356531,
+ "loss": 2.6808,
+ "step": 1711
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.5482008436519805,
+ "learning_rate": 0.0001161300141417444,
+ "loss": 2.7427,
+ "step": 1712
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.5444583054887217,
+ "learning_rate": 0.00011565432379174823,
+ "loss": 2.5526,
+ "step": 1713
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.5598490028672822,
+ "learning_rate": 0.00011517948225412056,
+ "loss": 2.5821,
+ "step": 1714
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.5488518174144517,
+ "learning_rate": 0.00011470549057753032,
+ "loss": 2.6692,
+ "step": 1715
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.5448784312560685,
+ "learning_rate": 0.00011423234980876957,
+ "loss": 2.5754,
+ "step": 1716
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.5549707676888599,
+ "learning_rate": 0.00011376006099275099,
+ "loss": 2.5817,
+ "step": 1717
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.5465738827508374,
+ "learning_rate": 0.00011328862517250609,
+ "loss": 2.6125,
+ "step": 1718
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.5740267266945138,
+ "learning_rate": 0.00011281804338918239,
+ "loss": 2.6833,
+ "step": 1719
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.5446652038704665,
+ "learning_rate": 0.00011234831668204115,
+ "loss": 2.611,
+ "step": 1720
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.5635434129944377,
+ "learning_rate": 0.00011187944608845569,
+ "loss": 2.6382,
+ "step": 1721
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.5572752070930089,
+ "learning_rate": 0.00011141143264390801,
+ "loss": 2.675,
+ "step": 1722
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.535967905002558,
+ "learning_rate": 0.0001109442773819877,
+ "loss": 2.6024,
+ "step": 1723
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.5399016275544049,
+ "learning_rate": 0.0001104779813343889,
+ "loss": 2.7266,
+ "step": 1724
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.5317105462501159,
+ "learning_rate": 0.00011001254553090812,
+ "loss": 2.5172,
+ "step": 1725
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.5350168909062053,
+ "learning_rate": 0.00010954797099944186,
+ "loss": 2.6889,
+ "step": 1726
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.5446504250382401,
+ "learning_rate": 0.0001090842587659851,
+ "loss": 2.6661,
+ "step": 1727
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.5279143446357414,
+ "learning_rate": 0.00010862140985462804,
+ "loss": 2.4785,
+ "step": 1728
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.5864000692972191,
+ "learning_rate": 0.00010815942528755418,
+ "loss": 2.6563,
+ "step": 1729
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.5470521318311045,
+ "learning_rate": 0.00010769830608503844,
+ "loss": 2.5164,
+ "step": 1730
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.5187775723646955,
+ "learning_rate": 0.00010723805326544473,
+ "loss": 2.5848,
+ "step": 1731
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.5552772845000137,
+ "learning_rate": 0.00010677866784522316,
+ "loss": 2.5786,
+ "step": 1732
+ },
+ {
+ "epoch": 0.79,
+ "grad_norm": 0.5369081434778854,
+ "learning_rate": 0.00010632015083890839,
+ "loss": 2.6338,
+ "step": 1733
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.62070806256124,
+ "learning_rate": 0.00010586250325911745,
+ "loss": 2.6428,
+ "step": 1734
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.5622142693494053,
+ "learning_rate": 0.00010540572611654697,
+ "loss": 2.6593,
+ "step": 1735
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.5583690218038259,
+ "learning_rate": 0.00010494982041997126,
+ "loss": 2.6499,
+ "step": 1736
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.5292073566770972,
+ "learning_rate": 0.0001044947871762405,
+ "loss": 2.6172,
+ "step": 1737
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.5191810033878912,
+ "learning_rate": 0.00010404062739027753,
+ "loss": 2.5655,
+ "step": 1738
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.5547799066811584,
+ "learning_rate": 0.00010358734206507641,
+ "loss": 2.603,
+ "step": 1739
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.5362438810091777,
+ "learning_rate": 0.00010313493220170017,
+ "loss": 2.5714,
+ "step": 1740
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.5899901340667328,
+ "learning_rate": 0.00010268339879927836,
+ "loss": 2.6195,
+ "step": 1741
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.5469847307481933,
+ "learning_rate": 0.00010223274285500466,
+ "loss": 2.5791,
+ "step": 1742
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.5587432277494334,
+ "learning_rate": 0.00010178296536413495,
+ "loss": 2.6166,
+ "step": 1743
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.5488966774348845,
+ "learning_rate": 0.00010133406731998546,
+ "loss": 2.6943,
+ "step": 1744
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.5883890367017691,
+ "learning_rate": 0.00010088604971392979,
+ "loss": 2.4534,
+ "step": 1745
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.5407202392821733,
+ "learning_rate": 0.0001004389135353972,
+ "loss": 2.6647,
+ "step": 1746
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.5506938558142956,
+ "learning_rate": 9.999265977187049e-05,
+ "loss": 2.6668,
+ "step": 1747
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.5479614136045822,
+ "learning_rate": 9.95472894088838e-05,
+ "loss": 2.6912,
+ "step": 1748
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.5487257574413881,
+ "learning_rate": 9.910280343001993e-05,
+ "loss": 2.5572,
+ "step": 1749
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.5564984764111545,
+ "learning_rate": 9.865920281690866e-05,
+ "loss": 2.6374,
+ "step": 1750
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.5381469587843284,
+ "learning_rate": 9.821648854922482e-05,
+ "loss": 2.5954,
+ "step": 1751
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.5170316744215978,
+ "learning_rate": 9.77746616046854e-05,
+ "loss": 2.5043,
+ "step": 1752
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.5518426972528372,
+ "learning_rate": 9.733372295904774e-05,
+ "loss": 2.5819,
+ "step": 1753
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.5907148581127032,
+ "learning_rate": 9.68936735861079e-05,
+ "loss": 2.6221,
+ "step": 1754
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 0.5306908205408962,
+ "learning_rate": 9.645451445769737e-05,
+ "loss": 2.5901,
+ "step": 1755
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.5490707915940605,
+ "learning_rate": 9.601624654368196e-05,
+ "loss": 2.6413,
+ "step": 1756
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.5714813514791723,
+ "learning_rate": 9.557887081195938e-05,
+ "loss": 2.6247,
+ "step": 1757
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.5408811783025709,
+ "learning_rate": 9.514238822845667e-05,
+ "loss": 2.6939,
+ "step": 1758
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.5544013801058092,
+ "learning_rate": 9.470679975712837e-05,
+ "loss": 2.544,
+ "step": 1759
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.5321985628370466,
+ "learning_rate": 9.427210635995481e-05,
+ "loss": 2.602,
+ "step": 1760
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.5451530908732214,
+ "learning_rate": 9.383830899693923e-05,
+ "loss": 2.6225,
+ "step": 1761
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.5565953836543832,
+ "learning_rate": 9.340540862610591e-05,
+ "loss": 2.6615,
+ "step": 1762
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.5441808450124133,
+ "learning_rate": 9.297340620349854e-05,
+ "loss": 2.7066,
+ "step": 1763
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.5344214290666994,
+ "learning_rate": 9.25423026831777e-05,
+ "loss": 2.6769,
+ "step": 1764
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.5491051615004859,
+ "learning_rate": 9.211209901721846e-05,
+ "loss": 2.6707,
+ "step": 1765
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.6106753857847599,
+ "learning_rate": 9.168279615570863e-05,
+ "loss": 2.539,
+ "step": 1766
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.5344851828333826,
+ "learning_rate": 9.125439504674699e-05,
+ "loss": 2.6032,
+ "step": 1767
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.5324494724345998,
+ "learning_rate": 9.082689663644057e-05,
+ "loss": 2.6112,
+ "step": 1768
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.5575088929336367,
+ "learning_rate": 9.040030186890264e-05,
+ "loss": 2.7511,
+ "step": 1769
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.5239100832178339,
+ "learning_rate": 8.997461168625138e-05,
+ "loss": 2.6463,
+ "step": 1770
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.5801588569088706,
+ "learning_rate": 8.954982702860664e-05,
+ "loss": 2.6277,
+ "step": 1771
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.5133599350993528,
+ "learning_rate": 8.912594883408865e-05,
+ "loss": 2.7109,
+ "step": 1772
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.5682979468664974,
+ "learning_rate": 8.870297803881589e-05,
+ "loss": 2.5859,
+ "step": 1773
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.5129385586006339,
+ "learning_rate": 8.828091557690287e-05,
+ "loss": 2.5573,
+ "step": 1774
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.5290786428372344,
+ "learning_rate": 8.785976238045801e-05,
+ "loss": 2.6164,
+ "step": 1775
+ },
+ {
+ "epoch": 0.81,
+ "grad_norm": 0.545305501637547,
+ "learning_rate": 8.743951937958144e-05,
+ "loss": 2.6673,
+ "step": 1776
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.5657109058351861,
+ "learning_rate": 8.702018750236357e-05,
+ "loss": 2.6727,
+ "step": 1777
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.5582963981092707,
+ "learning_rate": 8.660176767488237e-05,
+ "loss": 2.7218,
+ "step": 1778
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.5735216135727991,
+ "learning_rate": 8.618426082120146e-05,
+ "loss": 2.5729,
+ "step": 1779
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.5631030619693372,
+ "learning_rate": 8.576766786336854e-05,
+ "loss": 2.5433,
+ "step": 1780
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.5550510943306851,
+ "learning_rate": 8.535198972141294e-05,
+ "loss": 2.5463,
+ "step": 1781
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.5435231618465673,
+ "learning_rate": 8.493722731334347e-05,
+ "loss": 2.6633,
+ "step": 1782
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.5585007276953949,
+ "learning_rate": 8.452338155514644e-05,
+ "loss": 2.6395,
+ "step": 1783
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.5485125839745638,
+ "learning_rate": 8.411045336078426e-05,
+ "loss": 2.693,
+ "step": 1784
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.5471434269174249,
+ "learning_rate": 8.369844364219264e-05,
+ "loss": 2.5727,
+ "step": 1785
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.5410174539665094,
+ "learning_rate": 8.328735330927873e-05,
+ "loss": 2.6142,
+ "step": 1786
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.5765358160346983,
+ "learning_rate": 8.287718326991961e-05,
+ "loss": 2.6867,
+ "step": 1787
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.5625347776401176,
+ "learning_rate": 8.246793442995954e-05,
+ "loss": 2.6872,
+ "step": 1788
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.5423815791449932,
+ "learning_rate": 8.205960769320875e-05,
+ "loss": 2.5215,
+ "step": 1789
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.5418964417672332,
+ "learning_rate": 8.165220396144085e-05,
+ "loss": 2.5728,
+ "step": 1790
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.5308861677059935,
+ "learning_rate": 8.12457241343909e-05,
+ "loss": 2.6112,
+ "step": 1791
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.559872390520589,
+ "learning_rate": 8.084016910975367e-05,
+ "loss": 2.6295,
+ "step": 1792
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.5330425745491147,
+ "learning_rate": 8.043553978318169e-05,
+ "loss": 2.6133,
+ "step": 1793
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.5373662749025255,
+ "learning_rate": 8.003183704828281e-05,
+ "loss": 2.5563,
+ "step": 1794
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.5697522265009489,
+ "learning_rate": 7.962906179661872e-05,
+ "loss": 2.5703,
+ "step": 1795
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.5403239474908315,
+ "learning_rate": 7.922721491770296e-05,
+ "loss": 2.6816,
+ "step": 1796
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.5357331282649023,
+ "learning_rate": 7.882629729899832e-05,
+ "loss": 2.5324,
+ "step": 1797
+ },
+ {
+ "epoch": 0.82,
+ "grad_norm": 0.5330425392505703,
+ "learning_rate": 7.842630982591598e-05,
+ "loss": 2.6271,
+ "step": 1798
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.5497993099013603,
+ "learning_rate": 7.802725338181232e-05,
+ "loss": 2.6284,
+ "step": 1799
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.5785227746536995,
+ "learning_rate": 7.762912884798812e-05,
+ "loss": 2.5593,
+ "step": 1800
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.5796617564855405,
+ "learning_rate": 7.723193710368564e-05,
+ "loss": 2.6891,
+ "step": 1801
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.5717313633312403,
+ "learning_rate": 7.683567902608729e-05,
+ "loss": 2.6167,
+ "step": 1802
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.5544617598187658,
+ "learning_rate": 7.644035549031364e-05,
+ "loss": 2.6502,
+ "step": 1803
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.5707450426280389,
+ "learning_rate": 7.604596736942115e-05,
+ "loss": 2.6446,
+ "step": 1804
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.5689919735836062,
+ "learning_rate": 7.56525155344004e-05,
+ "loss": 2.6216,
+ "step": 1805
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.5481009818940108,
+ "learning_rate": 7.52600008541745e-05,
+ "loss": 2.6349,
+ "step": 1806
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.5724154404305976,
+ "learning_rate": 7.486842419559681e-05,
+ "loss": 2.6432,
+ "step": 1807
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.5372138222283107,
+ "learning_rate": 7.447778642344898e-05,
+ "loss": 2.5991,
+ "step": 1808
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.5609282609839842,
+ "learning_rate": 7.408808840043912e-05,
+ "loss": 2.5021,
+ "step": 1809
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.5415748883648057,
+ "learning_rate": 7.369933098720021e-05,
+ "loss": 2.5238,
+ "step": 1810
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.548574122861022,
+ "learning_rate": 7.331151504228767e-05,
+ "loss": 2.5734,
+ "step": 1811
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.5273357262431444,
+ "learning_rate": 7.292464142217775e-05,
+ "loss": 2.5457,
+ "step": 1812
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.5477377816470547,
+ "learning_rate": 7.25387109812658e-05,
+ "loss": 2.577,
+ "step": 1813
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.5149933044746808,
+ "learning_rate": 7.215372457186415e-05,
+ "loss": 2.6433,
+ "step": 1814
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.5349448460951758,
+ "learning_rate": 7.176968304420007e-05,
+ "loss": 2.6173,
+ "step": 1815
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.5476430810329321,
+ "learning_rate": 7.138658724641417e-05,
+ "loss": 2.6073,
+ "step": 1816
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.5496804331090777,
+ "learning_rate": 7.10044380245587e-05,
+ "loss": 2.5704,
+ "step": 1817
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.5673331480646342,
+ "learning_rate": 7.062323622259515e-05,
+ "loss": 2.6118,
+ "step": 1818
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.5358399014340838,
+ "learning_rate": 7.024298268239265e-05,
+ "loss": 2.6441,
+ "step": 1819
+ },
+ {
+ "epoch": 0.83,
+ "grad_norm": 0.5600576303312883,
+ "learning_rate": 6.986367824372647e-05,
+ "loss": 2.6055,
+ "step": 1820
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.5494191886853074,
+ "learning_rate": 6.948532374427541e-05,
+ "loss": 2.4867,
+ "step": 1821
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.5658075152185014,
+ "learning_rate": 6.910792001962063e-05,
+ "loss": 2.6615,
+ "step": 1822
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.5362673718007406,
+ "learning_rate": 6.873146790324358e-05,
+ "loss": 2.569,
+ "step": 1823
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.5592104425968037,
+ "learning_rate": 6.83559682265239e-05,
+ "loss": 2.6465,
+ "step": 1824
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.5623609993071005,
+ "learning_rate": 6.798142181873784e-05,
+ "loss": 2.6165,
+ "step": 1825
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.5459918493923973,
+ "learning_rate": 6.760782950705662e-05,
+ "loss": 2.5953,
+ "step": 1826
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.5943883152641627,
+ "learning_rate": 6.723519211654422e-05,
+ "loss": 2.5188,
+ "step": 1827
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.5638920635113399,
+ "learning_rate": 6.686351047015554e-05,
+ "loss": 2.6788,
+ "step": 1828
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.5615639471648708,
+ "learning_rate": 6.649278538873515e-05,
+ "loss": 2.6668,
+ "step": 1829
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.5088103445168204,
+ "learning_rate": 6.612301769101465e-05,
+ "loss": 2.6292,
+ "step": 1830
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.5406730930468409,
+ "learning_rate": 6.575420819361177e-05,
+ "loss": 2.624,
+ "step": 1831
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.5388718228721487,
+ "learning_rate": 6.538635771102757e-05,
+ "loss": 2.6235,
+ "step": 1832
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.5328894666272541,
+ "learning_rate": 6.501946705564566e-05,
+ "loss": 2.5642,
+ "step": 1833
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.5567356052375771,
+ "learning_rate": 6.465353703772959e-05,
+ "loss": 2.6026,
+ "step": 1834
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.5582170076448739,
+ "learning_rate": 6.428856846542136e-05,
+ "loss": 2.6433,
+ "step": 1835
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.5741568161669568,
+ "learning_rate": 6.392456214473996e-05,
+ "loss": 2.6161,
+ "step": 1836
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.5213392174036122,
+ "learning_rate": 6.3561518879579e-05,
+ "loss": 2.6734,
+ "step": 1837
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.5406757269685757,
+ "learning_rate": 6.31994394717052e-05,
+ "loss": 2.5142,
+ "step": 1838
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.5722243661896202,
+ "learning_rate": 6.283832472075685e-05,
+ "loss": 2.5703,
+ "step": 1839
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.5355174723845649,
+ "learning_rate": 6.247817542424178e-05,
+ "loss": 2.6415,
+ "step": 1840
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.5626031451509184,
+ "learning_rate": 6.211899237753559e-05,
+ "loss": 2.5507,
+ "step": 1841
+ },
+ {
+ "epoch": 0.84,
+ "grad_norm": 0.5553912574581619,
+ "learning_rate": 6.176077637387984e-05,
+ "loss": 2.5559,
+ "step": 1842
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.5343724008924052,
+ "learning_rate": 6.140352820438066e-05,
+ "loss": 2.5922,
+ "step": 1843
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.5717148800912805,
+ "learning_rate": 6.104724865800665e-05,
+ "loss": 2.6343,
+ "step": 1844
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.5627965263510422,
+ "learning_rate": 6.069193852158711e-05,
+ "loss": 2.6531,
+ "step": 1845
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.532720445355231,
+ "learning_rate": 6.0337598579810584e-05,
+ "loss": 2.6855,
+ "step": 1846
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.5594427258109053,
+ "learning_rate": 5.9984229615223096e-05,
+ "loss": 2.562,
+ "step": 1847
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.557453981521758,
+ "learning_rate": 5.963183240822606e-05,
+ "loss": 2.4471,
+ "step": 1848
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.5784412960381519,
+ "learning_rate": 5.9280407737074825e-05,
+ "loss": 2.662,
+ "step": 1849
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.529999920497906,
+ "learning_rate": 5.8929956377877125e-05,
+ "loss": 2.4706,
+ "step": 1850
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.5279777567586011,
+ "learning_rate": 5.8580479104591075e-05,
+ "loss": 2.6327,
+ "step": 1851
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.5303784890895924,
+ "learning_rate": 5.823197668902341e-05,
+ "loss": 2.5787,
+ "step": 1852
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.5615200627267017,
+ "learning_rate": 5.78844499008282e-05,
+ "loss": 2.5963,
+ "step": 1853
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.5556635131994543,
+ "learning_rate": 5.753789950750454e-05,
+ "loss": 2.6531,
+ "step": 1854
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.5365574692874224,
+ "learning_rate": 5.719232627439558e-05,
+ "loss": 2.5386,
+ "step": 1855
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.5528271396864795,
+ "learning_rate": 5.6847730964686315e-05,
+ "loss": 2.5724,
+ "step": 1856
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.5563348612936558,
+ "learning_rate": 5.650411433940189e-05,
+ "loss": 2.5846,
+ "step": 1857
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.5488295613427898,
+ "learning_rate": 5.61614771574061e-05,
+ "loss": 2.5516,
+ "step": 1858
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.5711293329632678,
+ "learning_rate": 5.581982017539988e-05,
+ "loss": 2.6656,
+ "step": 1859
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.5465930217077909,
+ "learning_rate": 5.5479144147919216e-05,
+ "loss": 2.6724,
+ "step": 1860
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.5376997482569861,
+ "learning_rate": 5.51394498273336e-05,
+ "loss": 2.6298,
+ "step": 1861
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.5385324458653116,
+ "learning_rate": 5.480073796384494e-05,
+ "loss": 2.5873,
+ "step": 1862
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.6128694428166196,
+ "learning_rate": 5.446300930548492e-05,
+ "loss": 2.6758,
+ "step": 1863
+ },
+ {
+ "epoch": 0.85,
+ "grad_norm": 0.565728493901671,
+ "learning_rate": 5.412626459811415e-05,
+ "loss": 2.6221,
+ "step": 1864
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.561917637579157,
+ "learning_rate": 5.3790504585419954e-05,
+ "loss": 2.6401,
+ "step": 1865
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.5540313083764529,
+ "learning_rate": 5.345573000891541e-05,
+ "loss": 2.5593,
+ "step": 1866
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.5744651038959714,
+ "learning_rate": 5.312194160793693e-05,
+ "loss": 2.6149,
+ "step": 1867
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.555728674263838,
+ "learning_rate": 5.278914011964303e-05,
+ "loss": 2.692,
+ "step": 1868
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.5747536830564772,
+ "learning_rate": 5.2457326279013006e-05,
+ "loss": 2.7261,
+ "step": 1869
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.5680417469566157,
+ "learning_rate": 5.2126500818844514e-05,
+ "loss": 2.5819,
+ "step": 1870
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.5653359070758456,
+ "learning_rate": 5.1796664469752566e-05,
+ "loss": 2.6669,
+ "step": 1871
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.5576077609092146,
+ "learning_rate": 5.1467817960167975e-05,
+ "loss": 2.4854,
+ "step": 1872
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.5575213830188462,
+ "learning_rate": 5.113996201633536e-05,
+ "loss": 2.6286,
+ "step": 1873
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.5450600362465774,
+ "learning_rate": 5.0813097362311765e-05,
+ "loss": 2.6339,
+ "step": 1874
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.5293144625593531,
+ "learning_rate": 5.048722471996475e-05,
+ "loss": 2.5199,
+ "step": 1875
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.5506130282725965,
+ "learning_rate": 5.016234480897158e-05,
+ "loss": 2.6013,
+ "step": 1876
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.5515824496578585,
+ "learning_rate": 4.9838458346816664e-05,
+ "loss": 2.5965,
+ "step": 1877
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.5638673406490248,
+ "learning_rate": 4.9515566048790485e-05,
+ "loss": 2.7194,
+ "step": 1878
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.5597673939948471,
+ "learning_rate": 4.9193668627988074e-05,
+ "loss": 2.6027,
+ "step": 1879
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.5555095251873071,
+ "learning_rate": 4.887276679530744e-05,
+ "loss": 2.5756,
+ "step": 1880
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.5797999831068436,
+ "learning_rate": 4.855286125944752e-05,
+ "loss": 2.6448,
+ "step": 1881
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.566653169360532,
+ "learning_rate": 4.8233952726907224e-05,
+ "loss": 2.5706,
+ "step": 1882
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.546248178287042,
+ "learning_rate": 4.7916041901983565e-05,
+ "loss": 2.6407,
+ "step": 1883
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.556134752042644,
+ "learning_rate": 4.7599129486770145e-05,
+ "loss": 2.6353,
+ "step": 1884
+ },
+ {
+ "epoch": 0.86,
+ "grad_norm": 0.5395160196183679,
+ "learning_rate": 4.728321618115555e-05,
+ "loss": 2.5826,
+ "step": 1885
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.5441132004996001,
+ "learning_rate": 4.696830268282204e-05,
+ "loss": 2.6388,
+ "step": 1886
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.5457343814525684,
+ "learning_rate": 4.665438968724361e-05,
+ "loss": 2.5419,
+ "step": 1887
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.5559220140590277,
+ "learning_rate": 4.634147788768489e-05,
+ "loss": 2.6599,
+ "step": 1888
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.56038111484417,
+ "learning_rate": 4.6029567975199414e-05,
+ "loss": 2.6585,
+ "step": 1889
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.5707031086434,
+ "learning_rate": 4.571866063862795e-05,
+ "loss": 2.5907,
+ "step": 1890
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.5778647507623041,
+ "learning_rate": 4.540875656459703e-05,
+ "loss": 2.615,
+ "step": 1891
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.5580906433187225,
+ "learning_rate": 4.509985643751785e-05,
+ "loss": 2.554,
+ "step": 1892
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.5340167900556103,
+ "learning_rate": 4.479196093958421e-05,
+ "loss": 2.5467,
+ "step": 1893
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.5415175921688076,
+ "learning_rate": 4.4485070750771187e-05,
+ "loss": 2.6266,
+ "step": 1894
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.5320737601625204,
+ "learning_rate": 4.417918654883363e-05,
+ "loss": 2.6269,
+ "step": 1895
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.5484890170694927,
+ "learning_rate": 4.3874309009305e-05,
+ "loss": 2.6587,
+ "step": 1896
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.5404961269753865,
+ "learning_rate": 4.357043880549538e-05,
+ "loss": 2.5962,
+ "step": 1897
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.5659249952145858,
+ "learning_rate": 4.326757660849012e-05,
+ "loss": 2.5435,
+ "step": 1898
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.5832755132000856,
+ "learning_rate": 4.2965723087148635e-05,
+ "loss": 2.6425,
+ "step": 1899
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.5612634462750079,
+ "learning_rate": 4.266487890810256e-05,
+ "loss": 2.5898,
+ "step": 1900
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.5163529465472395,
+ "learning_rate": 4.2365044735754365e-05,
+ "loss": 2.6262,
+ "step": 1901
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.5549367226778276,
+ "learning_rate": 4.2066221232276266e-05,
+ "loss": 2.6214,
+ "step": 1902
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.5701982005948943,
+ "learning_rate": 4.176840905760815e-05,
+ "loss": 2.5619,
+ "step": 1903
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.5605467505726942,
+ "learning_rate": 4.1471608869456443e-05,
+ "loss": 2.5536,
+ "step": 1904
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.5554422965625773,
+ "learning_rate": 4.117582132329284e-05,
+ "loss": 2.584,
+ "step": 1905
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.548234285706048,
+ "learning_rate": 4.088104707235263e-05,
+ "loss": 2.5208,
+ "step": 1906
+ },
+ {
+ "epoch": 0.87,
+ "grad_norm": 0.5736323684411657,
+ "learning_rate": 4.058728676763313e-05,
+ "loss": 2.6048,
+ "step": 1907
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.5336417815764188,
+ "learning_rate": 4.0294541057892375e-05,
+ "loss": 2.5608,
+ "step": 1908
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.5658236931945755,
+ "learning_rate": 4.000281058964794e-05,
+ "loss": 2.6635,
+ "step": 1909
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.5608256544030341,
+ "learning_rate": 3.971209600717507e-05,
+ "loss": 2.5535,
+ "step": 1910
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.5748952501024838,
+ "learning_rate": 3.9422397952505465e-05,
+ "loss": 2.7331,
+ "step": 1911
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.5749733120053022,
+ "learning_rate": 3.913371706542596e-05,
+ "loss": 2.5722,
+ "step": 1912
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.5278295237135808,
+ "learning_rate": 3.884605398347707e-05,
+ "loss": 2.6533,
+ "step": 1913
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.5543588962568842,
+ "learning_rate": 3.8559409341951456e-05,
+ "loss": 2.6164,
+ "step": 1914
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.5949226489812678,
+ "learning_rate": 3.8273783773892404e-05,
+ "loss": 2.7377,
+ "step": 1915
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.5323498199290453,
+ "learning_rate": 3.798917791009293e-05,
+ "loss": 2.6285,
+ "step": 1916
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.563348606963326,
+ "learning_rate": 3.770559237909393e-05,
+ "loss": 2.6438,
+ "step": 1917
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.5187195495450032,
+ "learning_rate": 3.742302780718288e-05,
+ "loss": 2.579,
+ "step": 1918
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.5488845723888442,
+ "learning_rate": 3.7141484818392635e-05,
+ "loss": 2.6211,
+ "step": 1919
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.5841904034758841,
+ "learning_rate": 3.686096403449973e-05,
+ "loss": 2.7047,
+ "step": 1920
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.5474220941034659,
+ "learning_rate": 3.658146607502344e-05,
+ "loss": 2.5843,
+ "step": 1921
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.5892908011748751,
+ "learning_rate": 3.630299155722411e-05,
+ "loss": 2.7218,
+ "step": 1922
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.5284963043794547,
+ "learning_rate": 3.6025541096101676e-05,
+ "loss": 2.6381,
+ "step": 1923
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.5540251586144554,
+ "learning_rate": 3.574911530439473e-05,
+ "loss": 2.7206,
+ "step": 1924
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.547543586283262,
+ "learning_rate": 3.5473714792578606e-05,
+ "loss": 2.5744,
+ "step": 1925
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.5258221256374013,
+ "learning_rate": 3.519934016886478e-05,
+ "loss": 2.5755,
+ "step": 1926
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.5923978559455237,
+ "learning_rate": 3.4925992039198776e-05,
+ "loss": 2.5743,
+ "step": 1927
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.5445422820473839,
+ "learning_rate": 3.465367100725908e-05,
+ "loss": 2.6345,
+ "step": 1928
+ },
+ {
+ "epoch": 0.88,
+ "grad_norm": 0.5714715217198404,
+ "learning_rate": 3.438237767445618e-05,
+ "loss": 2.5756,
+ "step": 1929
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.5636338269073103,
+ "learning_rate": 3.4112112639930804e-05,
+ "loss": 2.652,
+ "step": 1930
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.5569823094512336,
+ "learning_rate": 3.3842876500552564e-05,
+ "loss": 2.6339,
+ "step": 1931
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.549198770775433,
+ "learning_rate": 3.357466985091906e-05,
+ "loss": 2.675,
+ "step": 1932
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.5558372522382634,
+ "learning_rate": 3.330749328335414e-05,
+ "loss": 2.6178,
+ "step": 1933
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.5618887762122602,
+ "learning_rate": 3.304134738790659e-05,
+ "loss": 2.5464,
+ "step": 1934
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.5815484656885673,
+ "learning_rate": 3.277623275234953e-05,
+ "loss": 2.6934,
+ "step": 1935
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.5767150180503202,
+ "learning_rate": 3.2512149962177994e-05,
+ "loss": 2.5918,
+ "step": 1936
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.5708263055908799,
+ "learning_rate": 3.224909960060851e-05,
+ "loss": 2.658,
+ "step": 1937
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.5713859977920954,
+ "learning_rate": 3.198708224857755e-05,
+ "loss": 2.5925,
+ "step": 1938
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.5786494780437252,
+ "learning_rate": 3.172609848474023e-05,
+ "loss": 2.6289,
+ "step": 1939
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.5412803674694997,
+ "learning_rate": 3.1466148885468895e-05,
+ "loss": 2.601,
+ "step": 1940
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.5677518875545632,
+ "learning_rate": 3.120723402485198e-05,
+ "loss": 2.5157,
+ "step": 1941
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.5369470045006737,
+ "learning_rate": 3.094935447469294e-05,
+ "loss": 2.48,
+ "step": 1942
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.5414999957089642,
+ "learning_rate": 3.069251080450863e-05,
+ "loss": 2.5812,
+ "step": 1943
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.5703195241957268,
+ "learning_rate": 3.0436703581528113e-05,
+ "loss": 2.56,
+ "step": 1944
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.5308083448069597,
+ "learning_rate": 3.0181933370691694e-05,
+ "loss": 2.5313,
+ "step": 1945
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.6305228273904662,
+ "learning_rate": 2.9928200734649523e-05,
+ "loss": 2.6101,
+ "step": 1946
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.5703336386914977,
+ "learning_rate": 2.9675506233760142e-05,
+ "loss": 2.6373,
+ "step": 1947
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.5765640918305002,
+ "learning_rate": 2.942385042608925e-05,
+ "loss": 2.6079,
+ "step": 1948
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.5598241949294356,
+ "learning_rate": 2.9173233867409054e-05,
+ "loss": 2.545,
+ "step": 1949
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.5678715812727344,
+ "learning_rate": 2.892365711119638e-05,
+ "loss": 2.6138,
+ "step": 1950
+ },
+ {
+ "epoch": 0.89,
+ "grad_norm": 0.5786734644760139,
+ "learning_rate": 2.8675120708631596e-05,
+ "loss": 2.7019,
+ "step": 1951
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.5540140426315893,
+ "learning_rate": 2.8427625208597764e-05,
+ "loss": 2.6644,
+ "step": 1952
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.553583199893664,
+ "learning_rate": 2.8181171157678874e-05,
+ "loss": 2.5402,
+ "step": 1953
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.5575865710103217,
+ "learning_rate": 2.7935759100159053e-05,
+ "loss": 2.664,
+ "step": 1954
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.5554717486693093,
+ "learning_rate": 2.7691389578021365e-05,
+ "loss": 2.657,
+ "step": 1955
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.5583210849179047,
+ "learning_rate": 2.7448063130946223e-05,
+ "loss": 2.4653,
+ "step": 1956
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.5412067199731216,
+ "learning_rate": 2.7205780296310544e-05,
+ "loss": 2.5567,
+ "step": 1957
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.55625528253911,
+ "learning_rate": 2.6964541609186378e-05,
+ "loss": 2.5468,
+ "step": 1958
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.5589387151421397,
+ "learning_rate": 2.6724347602340104e-05,
+ "loss": 2.6495,
+ "step": 1959
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.5700018897605778,
+ "learning_rate": 2.6485198806230682e-05,
+ "loss": 2.5676,
+ "step": 1960
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.5550350043330647,
+ "learning_rate": 2.6247095749008797e-05,
+ "loss": 2.702,
+ "step": 1961
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.5553559995117753,
+ "learning_rate": 2.6010038956515826e-05,
+ "loss": 2.5779,
+ "step": 1962
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.5586032531347148,
+ "learning_rate": 2.5774028952282423e-05,
+ "loss": 2.6463,
+ "step": 1963
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.5819573899832859,
+ "learning_rate": 2.5539066257527277e-05,
+ "loss": 2.6219,
+ "step": 1964
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.570469545623377,
+ "learning_rate": 2.530515139115652e-05,
+ "loss": 2.5564,
+ "step": 1965
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.5323936406036233,
+ "learning_rate": 2.5072284869761874e-05,
+ "loss": 2.6222,
+ "step": 1966
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.5649571504661642,
+ "learning_rate": 2.4840467207619786e-05,
+ "loss": 2.545,
+ "step": 1967
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.5383741778359044,
+ "learning_rate": 2.460969891669068e-05,
+ "loss": 2.7287,
+ "step": 1968
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.5776750777006242,
+ "learning_rate": 2.4379980506617272e-05,
+ "loss": 2.6357,
+ "step": 1969
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.5383209832443907,
+ "learning_rate": 2.4151312484723464e-05,
+ "loss": 2.6186,
+ "step": 1970
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.5515820712853415,
+ "learning_rate": 2.3923695356013798e-05,
+ "loss": 2.5774,
+ "step": 1971
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.5515145921446176,
+ "learning_rate": 2.3697129623171833e-05,
+ "loss": 2.654,
+ "step": 1972
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 0.5546555397613108,
+ "learning_rate": 2.3471615786559042e-05,
+ "loss": 2.6185,
+ "step": 1973
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.5432639152693998,
+ "learning_rate": 2.3247154344213818e-05,
+ "loss": 2.5488,
+ "step": 1974
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.5676140348441109,
+ "learning_rate": 2.3023745791850625e-05,
+ "loss": 2.5906,
+ "step": 1975
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.5359552127588788,
+ "learning_rate": 2.2801390622858354e-05,
+ "loss": 2.53,
+ "step": 1976
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.5563462736046625,
+ "learning_rate": 2.2580089328299746e-05,
+ "loss": 2.6612,
+ "step": 1977
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.5738741312851797,
+ "learning_rate": 2.235984239690997e-05,
+ "loss": 2.5911,
+ "step": 1978
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.6041119881334491,
+ "learning_rate": 2.2140650315095934e-05,
+ "loss": 2.5549,
+ "step": 1979
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.5643772450688785,
+ "learning_rate": 2.192251356693459e-05,
+ "loss": 2.6162,
+ "step": 1980
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.5753423188167993,
+ "learning_rate": 2.170543263417246e-05,
+ "loss": 2.6977,
+ "step": 1981
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.5659412879568343,
+ "learning_rate": 2.1489407996224286e-05,
+ "loss": 2.5832,
+ "step": 1982
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.595598053777555,
+ "learning_rate": 2.127444013017199e-05,
+ "loss": 2.6555,
+ "step": 1983
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.5798754753275857,
+ "learning_rate": 2.1060529510763648e-05,
+ "loss": 2.6103,
+ "step": 1984
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.584147449452292,
+ "learning_rate": 2.084767661041259e-05,
+ "loss": 2.5332,
+ "step": 1985
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.60196740428947,
+ "learning_rate": 2.063588189919596e-05,
+ "loss": 2.6075,
+ "step": 1986
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.5482427737178445,
+ "learning_rate": 2.0425145844854275e-05,
+ "loss": 2.6379,
+ "step": 1987
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.5607081909696284,
+ "learning_rate": 2.0215468912789693e-05,
+ "loss": 2.624,
+ "step": 1988
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.5593411189123502,
+ "learning_rate": 2.0006851566065575e-05,
+ "loss": 2.541,
+ "step": 1989
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.5654861259582422,
+ "learning_rate": 1.9799294265405166e-05,
+ "loss": 2.5475,
+ "step": 1990
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.5788071402697547,
+ "learning_rate": 1.9592797469190572e-05,
+ "loss": 2.6639,
+ "step": 1991
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.5408683455456988,
+ "learning_rate": 1.938736163346194e-05,
+ "loss": 2.5134,
+ "step": 1992
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.5430106968101099,
+ "learning_rate": 1.9182987211916246e-05,
+ "loss": 2.6233,
+ "step": 1993
+ },
+ {
+ "epoch": 0.91,
+ "grad_norm": 0.5422432728401679,
+ "learning_rate": 1.8979674655906332e-05,
+ "loss": 2.6307,
+ "step": 1994
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.583073805234189,
+ "learning_rate": 1.8777424414440024e-05,
+ "loss": 2.6182,
+ "step": 1995
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.5791752278489167,
+ "learning_rate": 1.8576236934179202e-05,
+ "loss": 2.5133,
+ "step": 1996
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.5664425614757755,
+ "learning_rate": 1.8376112659438393e-05,
+ "loss": 2.5911,
+ "step": 1997
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.6037602975112603,
+ "learning_rate": 1.8177052032184282e-05,
+ "loss": 2.6039,
+ "step": 1998
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.5282021780354484,
+ "learning_rate": 1.7979055492034435e-05,
+ "loss": 2.5463,
+ "step": 1999
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.5594585572464797,
+ "learning_rate": 1.7782123476256407e-05,
+ "loss": 2.6044,
+ "step": 2000
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.5468791615059586,
+ "learning_rate": 1.7586256419766965e-05,
+ "loss": 2.4832,
+ "step": 2001
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.5699449854950449,
+ "learning_rate": 1.7391454755130766e-05,
+ "loss": 2.6199,
+ "step": 2002
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.6055361068323695,
+ "learning_rate": 1.7197718912559557e-05,
+ "loss": 2.654,
+ "step": 2003
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.550062263239398,
+ "learning_rate": 1.700504931991148e-05,
+ "loss": 2.5736,
+ "step": 2004
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.5585744144969212,
+ "learning_rate": 1.681344640268978e-05,
+ "loss": 2.5365,
+ "step": 2005
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.5835383094196378,
+ "learning_rate": 1.6622910584041974e-05,
+ "loss": 2.6064,
+ "step": 2006
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.5495878980535746,
+ "learning_rate": 1.6433442284758903e-05,
+ "loss": 2.7435,
+ "step": 2007
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.5728677402158682,
+ "learning_rate": 1.624504192327392e-05,
+ "loss": 2.6706,
+ "step": 2008
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.5827190995800148,
+ "learning_rate": 1.6057709915661856e-05,
+ "loss": 2.6244,
+ "step": 2009
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.5576657271073403,
+ "learning_rate": 1.5871446675638057e-05,
+ "loss": 2.6192,
+ "step": 2010
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.5668046744903577,
+ "learning_rate": 1.5686252614557638e-05,
+ "loss": 2.6168,
+ "step": 2011
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.5578617827113114,
+ "learning_rate": 1.5502128141414497e-05,
+ "loss": 2.629,
+ "step": 2012
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.5629790073525897,
+ "learning_rate": 1.5319073662840188e-05,
+ "loss": 2.611,
+ "step": 2013
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.5742780040458331,
+ "learning_rate": 1.5137089583103391e-05,
+ "loss": 2.6562,
+ "step": 2014
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.5840306678585309,
+ "learning_rate": 1.4956176304108893e-05,
+ "loss": 2.5728,
+ "step": 2015
+ },
+ {
+ "epoch": 0.92,
+ "grad_norm": 0.565662016557998,
+ "learning_rate": 1.4776334225396481e-05,
+ "loss": 2.5833,
+ "step": 2016
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.5473874428764963,
+ "learning_rate": 1.4597563744140397e-05,
+ "loss": 2.6196,
+ "step": 2017
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.5742914832325806,
+ "learning_rate": 1.4419865255148269e-05,
+ "loss": 2.6388,
+ "step": 2018
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.5832428790768223,
+ "learning_rate": 1.4243239150860122e-05,
+ "loss": 2.6109,
+ "step": 2019
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.5474813761774111,
+ "learning_rate": 1.4067685821347932e-05,
+ "loss": 2.6495,
+ "step": 2020
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.5743090296722626,
+ "learning_rate": 1.389320565431429e-05,
+ "loss": 2.4992,
+ "step": 2021
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.5611969145396326,
+ "learning_rate": 1.3719799035091851e-05,
+ "loss": 2.6273,
+ "step": 2022
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.5642264550228595,
+ "learning_rate": 1.3547466346642278e-05,
+ "loss": 2.6551,
+ "step": 2023
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.5578223016295393,
+ "learning_rate": 1.3376207969555577e-05,
+ "loss": 2.5434,
+ "step": 2024
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.5591708618447149,
+ "learning_rate": 1.32060242820492e-05,
+ "loss": 2.5776,
+ "step": 2025
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.5281557235890622,
+ "learning_rate": 1.3036915659967118e-05,
+ "loss": 2.6204,
+ "step": 2026
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.5409089565451778,
+ "learning_rate": 1.2868882476779087e-05,
+ "loss": 2.6111,
+ "step": 2027
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.556044649290045,
+ "learning_rate": 1.2701925103579815e-05,
+ "loss": 2.5592,
+ "step": 2028
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.5713805225054195,
+ "learning_rate": 1.2536043909088191e-05,
+ "loss": 2.6052,
+ "step": 2029
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.5790642069738291,
+ "learning_rate": 1.2371239259646228e-05,
+ "loss": 2.5494,
+ "step": 2030
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.585255255102651,
+ "learning_rate": 1.2207511519218672e-05,
+ "loss": 2.6427,
+ "step": 2031
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.5649573174523728,
+ "learning_rate": 1.2044861049391676e-05,
+ "loss": 2.6349,
+ "step": 2032
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.5786562186417628,
+ "learning_rate": 1.1883288209372512e-05,
+ "loss": 2.5655,
+ "step": 2033
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.5579688162203225,
+ "learning_rate": 1.1722793355988471e-05,
+ "loss": 2.5627,
+ "step": 2034
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.5370557926554806,
+ "learning_rate": 1.1563376843686135e-05,
+ "loss": 2.6402,
+ "step": 2035
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.5529135492195391,
+ "learning_rate": 1.140503902453055e-05,
+ "loss": 2.6197,
+ "step": 2036
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.5971178712109217,
+ "learning_rate": 1.1247780248204665e-05,
+ "loss": 2.5404,
+ "step": 2037
+ },
+ {
+ "epoch": 0.93,
+ "grad_norm": 0.5662732256894579,
+ "learning_rate": 1.1091600862008333e-05,
+ "loss": 2.6307,
+ "step": 2038
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.5753445115564004,
+ "learning_rate": 1.0936501210857652e-05,
+ "loss": 2.5623,
+ "step": 2039
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.5460918727883507,
+ "learning_rate": 1.0782481637284013e-05,
+ "loss": 2.5622,
+ "step": 2040
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.5366200522676484,
+ "learning_rate": 1.0629542481433663e-05,
+ "loss": 2.5414,
+ "step": 2041
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.5443654657351544,
+ "learning_rate": 1.0477684081066751e-05,
+ "loss": 2.5711,
+ "step": 2042
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.5731169615476871,
+ "learning_rate": 1.0326906771556566e-05,
+ "loss": 2.587,
+ "step": 2043
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.5727770499153597,
+ "learning_rate": 1.017721088588891e-05,
+ "loss": 2.6988,
+ "step": 2044
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.5696538920467711,
+ "learning_rate": 1.0028596754661334e-05,
+ "loss": 2.4932,
+ "step": 2045
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.5371149279838393,
+ "learning_rate": 9.881064706082298e-06,
+ "loss": 2.5983,
+ "step": 2046
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.563129387514548,
+ "learning_rate": 9.734615065970454e-06,
+ "loss": 2.5931,
+ "step": 2047
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.5668196886222572,
+ "learning_rate": 9.58924815775425e-06,
+ "loss": 2.5436,
+ "step": 2048
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.5696311629164159,
+ "learning_rate": 9.444964302470715e-06,
+ "loss": 2.6363,
+ "step": 2049
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.5623287843579446,
+ "learning_rate": 9.301763818765018e-06,
+ "loss": 2.6385,
+ "step": 2050
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.5929867465817565,
+ "learning_rate": 9.15964702288996e-06,
+ "loss": 2.7061,
+ "step": 2051
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.5708810810824109,
+ "learning_rate": 9.018614228704925e-06,
+ "loss": 2.677,
+ "step": 2052
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.535918052656494,
+ "learning_rate": 8.878665747675152e-06,
+ "loss": 2.516,
+ "step": 2053
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.5546241578958482,
+ "learning_rate": 8.739801888871469e-06,
+ "loss": 2.6661,
+ "step": 2054
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.5567785062752941,
+ "learning_rate": 8.602022958969336e-06,
+ "loss": 2.6273,
+ "step": 2055
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.5448922601135263,
+ "learning_rate": 8.465329262248078e-06,
+ "loss": 2.6086,
+ "step": 2056
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.552292675792888,
+ "learning_rate": 8.32972110059027e-06,
+ "loss": 2.6343,
+ "step": 2057
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.561376888209488,
+ "learning_rate": 8.195198773481406e-06,
+ "loss": 2.567,
+ "step": 2058
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.5819295374372363,
+ "learning_rate": 8.061762578008613e-06,
+ "loss": 2.6298,
+ "step": 2059
+ },
+ {
+ "epoch": 0.94,
+ "grad_norm": 0.5326175076295221,
+ "learning_rate": 7.929412808860559e-06,
+ "loss": 2.5034,
+ "step": 2060
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.5797491706004261,
+ "learning_rate": 7.79814975832649e-06,
+ "loss": 2.5665,
+ "step": 2061
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.57193157490852,
+ "learning_rate": 7.667973716295851e-06,
+ "loss": 2.567,
+ "step": 2062
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.538564404834179,
+ "learning_rate": 7.5388849702571205e-06,
+ "loss": 2.5525,
+ "step": 2063
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.5883414851403509,
+ "learning_rate": 7.4108838052979185e-06,
+ "loss": 2.5515,
+ "step": 2064
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.5478563367474956,
+ "learning_rate": 7.283970504103732e-06,
+ "loss": 2.5973,
+ "step": 2065
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.5495135966394802,
+ "learning_rate": 7.1581453469575785e-06,
+ "loss": 2.6312,
+ "step": 2066
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.5576200541002543,
+ "learning_rate": 7.033408611739456e-06,
+ "loss": 2.6026,
+ "step": 2067
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.5724250187046992,
+ "learning_rate": 6.909760573925561e-06,
+ "loss": 2.7012,
+ "step": 2068
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.5823113108747624,
+ "learning_rate": 6.787201506587626e-06,
+ "loss": 2.6885,
+ "step": 2069
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.6150148014329102,
+ "learning_rate": 6.66573168039264e-06,
+ "loss": 2.6405,
+ "step": 2070
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.5703291555552336,
+ "learning_rate": 6.545351363601959e-06,
+ "loss": 2.6393,
+ "step": 2071
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.558210757437288,
+ "learning_rate": 6.426060822070812e-06,
+ "loss": 2.6657,
+ "step": 2072
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.5769210607972206,
+ "learning_rate": 6.3078603192475716e-06,
+ "loss": 2.5515,
+ "step": 2073
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.5570014480852326,
+ "learning_rate": 6.1907501161735934e-06,
+ "loss": 2.594,
+ "step": 2074
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.5545379674171393,
+ "learning_rate": 6.074730471482049e-06,
+ "loss": 2.5655,
+ "step": 2075
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.54907370488049,
+ "learning_rate": 5.959801641397755e-06,
+ "loss": 2.5946,
+ "step": 2076
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.5811989138676003,
+ "learning_rate": 5.845963879736627e-06,
+ "loss": 2.5009,
+ "step": 2077
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.5544534333870564,
+ "learning_rate": 5.733217437904892e-06,
+ "loss": 2.5362,
+ "step": 2078
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.575375017553867,
+ "learning_rate": 5.621562564898597e-06,
+ "loss": 2.5593,
+ "step": 2079
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.6016595488047402,
+ "learning_rate": 5.51099950730316e-06,
+ "loss": 2.6339,
+ "step": 2080
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.5474253167053842,
+ "learning_rate": 5.401528509292763e-06,
+ "loss": 2.6224,
+ "step": 2081
+ },
+ {
+ "epoch": 0.95,
+ "grad_norm": 0.5530691588081266,
+ "learning_rate": 5.2931498126298495e-06,
+ "loss": 2.5955,
+ "step": 2082
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.5562050791935046,
+ "learning_rate": 5.1858636566645135e-06,
+ "loss": 2.6917,
+ "step": 2083
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.5217604552340692,
+ "learning_rate": 5.0796702783340035e-06,
+ "loss": 2.584,
+ "step": 2084
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.5666016806480639,
+ "learning_rate": 4.97456991216233e-06,
+ "loss": 2.5472,
+ "step": 2085
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.5652803803620395,
+ "learning_rate": 4.870562790259325e-06,
+ "loss": 2.5567,
+ "step": 2086
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.5842044033424149,
+ "learning_rate": 4.7676491423208625e-06,
+ "loss": 2.6401,
+ "step": 2087
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.562770883080947,
+ "learning_rate": 4.66582919562758e-06,
+ "loss": 2.4939,
+ "step": 2088
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.5484103346356,
+ "learning_rate": 4.5651031750448825e-06,
+ "loss": 2.5868,
+ "step": 2089
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.590502732538457,
+ "learning_rate": 4.465471303022217e-06,
+ "loss": 2.6658,
+ "step": 2090
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.5392488344319273,
+ "learning_rate": 4.366933799592743e-06,
+ "loss": 2.568,
+ "step": 2091
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.5601854038391295,
+ "learning_rate": 4.269490882372551e-06,
+ "loss": 2.6078,
+ "step": 2092
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.5369166153756106,
+ "learning_rate": 4.1731427665606115e-06,
+ "loss": 2.4947,
+ "step": 2093
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.5737511820047038,
+ "learning_rate": 4.077889664937884e-06,
+ "loss": 2.5969,
+ "step": 2094
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.558281963674582,
+ "learning_rate": 3.983731787867207e-06,
+ "loss": 2.6377,
+ "step": 2095
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.5425270059103064,
+ "learning_rate": 3.890669343292464e-06,
+ "loss": 2.5631,
+ "step": 2096
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.5755389713792203,
+ "learning_rate": 3.7987025367384743e-06,
+ "loss": 2.6226,
+ "step": 2097
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.5271638126428261,
+ "learning_rate": 3.707831571310327e-06,
+ "loss": 2.5256,
+ "step": 2098
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.5530518242945796,
+ "learning_rate": 3.6180566476929912e-06,
+ "loss": 2.6348,
+ "step": 2099
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.5749689291026111,
+ "learning_rate": 3.529377964150815e-06,
+ "loss": 2.5372,
+ "step": 2100
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.5715509056340315,
+ "learning_rate": 3.441795716527307e-06,
+ "loss": 2.597,
+ "step": 2101
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.5557179996764093,
+ "learning_rate": 3.355310098244302e-06,
+ "loss": 2.5676,
+ "step": 2102
+ },
+ {
+ "epoch": 0.96,
+ "grad_norm": 0.5876664933903689,
+ "learning_rate": 3.269921300301959e-06,
+ "loss": 2.6379,
+ "step": 2103
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.5727898864825025,
+ "learning_rate": 3.1856295112780988e-06,
+ "loss": 2.6334,
+ "step": 2104
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.561437565959899,
+ "learning_rate": 3.102434917327812e-06,
+ "loss": 2.6564,
+ "step": 2105
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.5699702891480478,
+ "learning_rate": 3.0203377021831292e-06,
+ "loss": 2.6226,
+ "step": 2106
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.5501392716239842,
+ "learning_rate": 2.939338047152573e-06,
+ "loss": 2.5599,
+ "step": 2107
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.5578602440758822,
+ "learning_rate": 2.8594361311206073e-06,
+ "loss": 2.5811,
+ "step": 2108
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.6037927835743853,
+ "learning_rate": 2.7806321305475225e-06,
+ "loss": 2.6589,
+ "step": 2109
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.5495922254467186,
+ "learning_rate": 2.7029262194688818e-06,
+ "loss": 2.5824,
+ "step": 2110
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.5994577535759268,
+ "learning_rate": 2.626318569495134e-06,
+ "loss": 2.5628,
+ "step": 2111
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.5491923830903185,
+ "learning_rate": 2.550809349811334e-06,
+ "loss": 2.477,
+ "step": 2112
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.5548717311043736,
+ "learning_rate": 2.476398727176532e-06,
+ "loss": 2.6395,
+ "step": 2113
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.5145742075401096,
+ "learning_rate": 2.4030868659237204e-06,
+ "loss": 2.6146,
+ "step": 2114
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.5690303563584321,
+ "learning_rate": 2.3308739279593317e-06,
+ "loss": 2.5855,
+ "step": 2115
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.5680869325547985,
+ "learning_rate": 2.2597600727626845e-06,
+ "loss": 2.657,
+ "step": 2116
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.569408621593729,
+ "learning_rate": 2.1897454573860387e-06,
+ "loss": 2.6001,
+ "step": 2117
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.5975424714823333,
+ "learning_rate": 2.1208302364538746e-06,
+ "loss": 2.5924,
+ "step": 2118
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.5678170914754146,
+ "learning_rate": 2.0530145621627804e-06,
+ "loss": 2.5508,
+ "step": 2119
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.5969850906819759,
+ "learning_rate": 1.9862985842810653e-06,
+ "loss": 2.5829,
+ "step": 2120
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.5464213897357608,
+ "learning_rate": 1.920682450148259e-06,
+ "loss": 2.5638,
+ "step": 2121
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.533204945886039,
+ "learning_rate": 1.856166304675111e-06,
+ "loss": 2.5597,
+ "step": 2122
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.5387409086860829,
+ "learning_rate": 1.792750290342926e-06,
+ "loss": 2.598,
+ "step": 2123
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.5321870162745318,
+ "learning_rate": 1.7304345472035632e-06,
+ "loss": 2.4648,
+ "step": 2124
+ },
+ {
+ "epoch": 0.97,
+ "grad_norm": 0.615407469644478,
+ "learning_rate": 1.6692192128788253e-06,
+ "loss": 2.7057,
+ "step": 2125
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.5640937389787656,
+ "learning_rate": 1.6091044225604035e-06,
+ "loss": 2.6419,
+ "step": 2126
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.5526559917564546,
+ "learning_rate": 1.5500903090094888e-06,
+ "loss": 2.5395,
+ "step": 2127
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.5787960082357315,
+ "learning_rate": 1.492177002556383e-06,
+ "loss": 2.7413,
+ "step": 2128
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.5334815177188844,
+ "learning_rate": 1.4353646311004443e-06,
+ "loss": 2.5712,
+ "step": 2129
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.5770218013300737,
+ "learning_rate": 1.3796533201094752e-06,
+ "loss": 2.6552,
+ "step": 2130
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.5544288424487838,
+ "learning_rate": 1.3250431926197793e-06,
+ "loss": 2.5616,
+ "step": 2131
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.5760819198964946,
+ "learning_rate": 1.2715343692356607e-06,
+ "loss": 2.623,
+ "step": 2132
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.5338414500489966,
+ "learning_rate": 1.2191269681292582e-06,
+ "loss": 2.5759,
+ "step": 2133
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.5828763685421412,
+ "learning_rate": 1.1678211050402676e-06,
+ "loss": 2.6175,
+ "step": 2134
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.5652881569850006,
+ "learning_rate": 1.117616893275719e-06,
+ "loss": 2.5748,
+ "step": 2135
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.5469540710208443,
+ "learning_rate": 1.068514443709534e-06,
+ "loss": 2.5444,
+ "step": 2136
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.5368199842213015,
+ "learning_rate": 1.0205138647826905e-06,
+ "loss": 2.5753,
+ "step": 2137
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.5865570276536973,
+ "learning_rate": 9.73615262502503e-07,
+ "loss": 2.642,
+ "step": 2138
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.5605668168900698,
+ "learning_rate": 9.278187404426763e-07,
+ "loss": 2.6098,
+ "step": 2139
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.5466697240552202,
+ "learning_rate": 8.831243997431404e-07,
+ "loss": 2.6533,
+ "step": 2140
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.5398369965045885,
+ "learning_rate": 8.395323391094944e-07,
+ "loss": 2.6,
+ "step": 2141
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.5639106271845373,
+ "learning_rate": 7.970426548131183e-07,
+ "loss": 2.6474,
+ "step": 2142
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.5692805816724555,
+ "learning_rate": 7.556554406908389e-07,
+ "loss": 2.6883,
+ "step": 2143
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.582118094538976,
+ "learning_rate": 7.153707881446536e-07,
+ "loss": 2.6251,
+ "step": 2144
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.5917192021982479,
+ "learning_rate": 6.761887861417293e-07,
+ "loss": 2.5689,
+ "step": 2145
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.5584357193276607,
+ "learning_rate": 6.381095212139032e-07,
+ "loss": 2.5713,
+ "step": 2146
+ },
+ {
+ "epoch": 0.98,
+ "grad_norm": 0.5576038585260222,
+ "learning_rate": 6.011330774577384e-07,
+ "loss": 2.5544,
+ "step": 2147
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.5572460048731604,
+ "learning_rate": 5.652595365343016e-07,
+ "loss": 2.4765,
+ "step": 2148
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.5607024720979649,
+ "learning_rate": 5.304889776688859e-07,
+ "loss": 2.6253,
+ "step": 2149
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.5447052188232957,
+ "learning_rate": 4.968214776508994e-07,
+ "loss": 2.5313,
+ "step": 2150
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.5611454336654501,
+ "learning_rate": 4.6425711083375454e-07,
+ "loss": 2.6776,
+ "step": 2151
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.5466767361485623,
+ "learning_rate": 4.3279594913447906e-07,
+ "loss": 2.6717,
+ "step": 2152
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.5577187528376797,
+ "learning_rate": 4.02438062033883e-07,
+ "loss": 2.6044,
+ "step": 2153
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.5607446849435871,
+ "learning_rate": 3.7318351657616987e-07,
+ "loss": 2.5623,
+ "step": 2154
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.5517146351312021,
+ "learning_rate": 3.4503237736882573e-07,
+ "loss": 2.58,
+ "step": 2155
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.5603092292178588,
+ "learning_rate": 3.179847065825081e-07,
+ "loss": 2.5625,
+ "step": 2156
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.5693783620812112,
+ "learning_rate": 2.9204056395104594e-07,
+ "loss": 2.4685,
+ "step": 2157
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.5637952579264054,
+ "learning_rate": 2.672000067709956e-07,
+ "loss": 2.5847,
+ "step": 2158
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.5325411946842509,
+ "learning_rate": 2.4346308990175204e-07,
+ "loss": 2.5384,
+ "step": 2159
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.5609540323280334,
+ "learning_rate": 2.208298657653818e-07,
+ "loss": 2.6109,
+ "step": 2160
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.5588085941871965,
+ "learning_rate": 1.9930038434645692e-07,
+ "loss": 2.5886,
+ "step": 2161
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.5574497203529588,
+ "learning_rate": 1.7887469319205484e-07,
+ "loss": 2.6863,
+ "step": 2162
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.550018806988555,
+ "learning_rate": 1.5955283741142523e-07,
+ "loss": 2.673,
+ "step": 2163
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.5132920005683055,
+ "learning_rate": 1.4133485967615655e-07,
+ "loss": 2.4689,
+ "step": 2164
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.5446178222938799,
+ "learning_rate": 1.2422080021995407e-07,
+ "loss": 2.5621,
+ "step": 2165
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.5697766399990973,
+ "learning_rate": 1.082106968385288e-07,
+ "loss": 2.6783,
+ "step": 2166
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.5854048181430894,
+ "learning_rate": 9.330458488959748e-08,
+ "loss": 2.5511,
+ "step": 2167
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.5579299065791804,
+ "learning_rate": 7.950249729271608e-08,
+ "loss": 2.5943,
+ "step": 2168
+ },
+ {
+ "epoch": 0.99,
+ "grad_norm": 0.568465917328927,
+ "learning_rate": 6.680446452922429e-08,
+ "loss": 2.6458,
+ "step": 2169
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.5568779990325727,
+ "learning_rate": 5.521051464230098e-08,
+ "loss": 2.7003,
+ "step": 2170
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.576973229349878,
+ "learning_rate": 4.4720673236631206e-08,
+ "loss": 2.5554,
+ "step": 2171
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.6097422706224399,
+ "learning_rate": 3.53349634786837e-08,
+ "loss": 2.5406,
+ "step": 2172
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.560346128797687,
+ "learning_rate": 2.7053406096433365e-08,
+ "loss": 2.6486,
+ "step": 2173
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.554247394722822,
+ "learning_rate": 1.987601937930572e-08,
+ "loss": 2.5732,
+ "step": 2174
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.5801201749039946,
+ "learning_rate": 1.3802819178398984e-08,
+ "loss": 2.6181,
+ "step": 2175
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.5638533512464483,
+ "learning_rate": 8.833818906039959e-09,
+ "loss": 2.5228,
+ "step": 2176
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.5449136104275335,
+ "learning_rate": 4.969029536061598e-09,
+ "loss": 2.535,
+ "step": 2177
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.5326423483420331,
+ "learning_rate": 2.2084596038030037e-09,
+ "loss": 2.6149,
+ "step": 2178
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.5863816211561724,
+ "learning_rate": 5.521152057763601e-10,
+ "loss": 2.7574,
+ "step": 2179
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.5474329241291628,
+ "learning_rate": 0.0,
+ "loss": 2.603,
+ "step": 2180
+ },
+ {
+ "epoch": 1.0,
+ "step": 2180,
+ "total_flos": 5.89735773670998e+17,
+ "train_loss": 2.8288836673859064,
+ "train_runtime": 92131.8092,
+ "train_samples_per_second": 6.058,
+ "train_steps_per_second": 0.024
+ }
+ ],
+ "logging_steps": 1.0,
+ "max_steps": 2180,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 1,
+ "save_steps": 200,
+ "total_flos": 5.89735773670998e+17,
+ "train_batch_size": 8,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/06-13-24_XL_llava_llama7b_ft/llava_sdxl_stage2_llama7B b/06-13-24_XL_llava_llama7b_ft/llava_sdxl_stage2_llama7B
new file mode 160000
index 0000000000000000000000000000000000000000..1bd7720dbfa32706637af7188c6d622bea78f624
--- /dev/null
+++ b/06-13-24_XL_llava_llama7b_ft/llava_sdxl_stage2_llama7B
@@ -0,0 +1 @@
+Subproject commit 1bd7720dbfa32706637af7188c6d622bea78f624