Add files using upload-large-folder tool
Browse files- lr_sweep/hnet_xl_code_lr_1e-4/train.log +0 -0
- lr_sweep/hnet_xl_code_lr_2e-4/train.log +0 -0
- lr_sweep/hnet_xl_code_lr_5e-4/train.log +0 -0
- lr_sweep/hnet_xl_code_lr_5e-5/.hydra/config.yaml +55 -0
- lr_sweep/hnet_xl_code_lr_5e-5/.hydra/hydra.yaml +166 -0
- lr_sweep/hnet_xl_code_lr_5e-5/.hydra/overrides.yaml +6 -0
- lr_sweep/hnet_xl_code_lr_5e-5/train.log +0 -0
- lr_sweep/hnet_xl_code_lr_5e-5/wandb/debug-internal.log +41 -0
- lr_sweep/hnet_xl_code_lr_5e-5/wandb/debug.log +24 -0
- lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_174418-uk7c7595/files/code/code_completion_exp/train_hnet/train.py +284 -0
- lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_174418-uk7c7595/files/output.log +87 -0
- lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_174418-uk7c7595/files/requirements.txt +245 -0
- lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_174418-uk7c7595/files/wandb-metadata.json +69 -0
- lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_174418-uk7c7595/logs/debug-core.log +7 -0
- lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_174418-uk7c7595/logs/debug-internal.log +7 -0
- lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_174418-uk7c7595/logs/debug.log +19 -0
- lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_180603-5xd22ofy/files/code/code_completion_exp/train_hnet/train.py +284 -0
- lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_180603-5xd22ofy/files/config.yaml +151 -0
- lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_180603-5xd22ofy/files/output.log +0 -0
- lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_180603-5xd22ofy/files/requirements.txt +245 -0
- lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_180603-5xd22ofy/files/wandb-metadata.json +69 -0
- lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_180603-5xd22ofy/files/wandb-summary.json +1 -0
- lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_180603-5xd22ofy/logs/debug-core.log +16 -0
- lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_180603-5xd22ofy/logs/debug-internal.log +41 -0
- lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_180603-5xd22ofy/logs/debug.log +24 -0
- lr_sweep/pythia_1b_lr_1e-4/train.log +1114 -0
- lr_sweep/pythia_1b_lr_1e-5/.hydra/config.yaml +49 -0
- lr_sweep/pythia_1b_lr_1e-5/.hydra/hydra.yaml +167 -0
- lr_sweep/pythia_1b_lr_1e-5/.hydra/overrides.yaml +7 -0
- lr_sweep/pythia_1b_lr_1e-5/train.log +1259 -0
- lr_sweep/pythia_1b_lr_1e-5/wandb/debug-internal.log +13 -0
- lr_sweep/pythia_1b_lr_1e-5/wandb/debug.log +24 -0
- lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_175124-r2u423d8/files/code/code_completion_exp/train_pythia/train.py +606 -0
- lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_175124-r2u423d8/files/config.yaml +146 -0
- lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_175124-r2u423d8/files/output.log +87 -0
- lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_175124-r2u423d8/files/requirements.txt +245 -0
- lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_175124-r2u423d8/files/wandb-metadata.json +70 -0
- lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_175124-r2u423d8/files/wandb-summary.json +1 -0
- lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_175124-r2u423d8/logs/debug-core.log +16 -0
- lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_175124-r2u423d8/logs/debug-internal.log +13 -0
- lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_175124-r2u423d8/logs/debug.log +24 -0
- lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_175124-r2u423d8/run-r2u423d8.wandb +0 -0
- lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_180609-3z5g26qd/files/output.log +1056 -0
- lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_180609-3z5g26qd/files/requirements.txt +245 -0
- lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_180609-3z5g26qd/files/wandb-metadata.json +70 -0
- lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_180609-3z5g26qd/logs/debug-core.log +16 -0
- lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_180609-3z5g26qd/logs/debug-internal.log +13 -0
- lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_180609-3z5g26qd/logs/debug.log +24 -0
- lr_sweep/pythia_1b_lr_2e-5/train.log +1259 -0
- lr_sweep/pythia_1b_lr_5e-5/train.log +1201 -0
lr_sweep/hnet_xl_code_lr_1e-4/train.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
lr_sweep/hnet_xl_code_lr_2e-4/train.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
lr_sweep/hnet_xl_code_lr_5e-4/train.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
lr_sweep/hnet_xl_code_lr_5e-5/.hydra/config.yaml
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
model:
|
| 2 |
+
config_path: ${oc.env:PROJECT_ROOT}/hnet_project/configs/hnet_2stage_XL_code.json
|
| 3 |
+
checkpoint_path: ${oc.env:PROJECT_ROOT}/hnet_project/checkpoints/hnet_2stage_XL_code.pt
|
| 4 |
+
training:
|
| 5 |
+
epochs: 1
|
| 6 |
+
batch_size: 4
|
| 7 |
+
eval_batch_size: 24
|
| 8 |
+
gradient_accumulation_steps: 4
|
| 9 |
+
lr: 5.0e-05
|
| 10 |
+
weight_decay: 0.1
|
| 11 |
+
betas:
|
| 12 |
+
- 0.9
|
| 13 |
+
- 0.95
|
| 14 |
+
eps: 1.0e-08
|
| 15 |
+
lr_scheduler: wsd
|
| 16 |
+
warmup_ratio: 0.1
|
| 17 |
+
decay_ratio: 0.2
|
| 18 |
+
warmup_steps: 100
|
| 19 |
+
min_lr_ratio: 0.1
|
| 20 |
+
lr_multiplier:
|
| 21 |
+
- 2.0
|
| 22 |
+
- 1.5
|
| 23 |
+
- 1.0
|
| 24 |
+
load_balancing_weight: 0.01
|
| 25 |
+
load_balancing_N: 4.0
|
| 26 |
+
max_grad_norm: 1.0
|
| 27 |
+
use_amp: true
|
| 28 |
+
resume: false
|
| 29 |
+
resume_checkpoint: null
|
| 30 |
+
warmup_model: true
|
| 31 |
+
data:
|
| 32 |
+
path: /workspace/byte-llms-code/code_completion_exp/datasets/data_V4_full
|
| 33 |
+
max_context_len: 4096
|
| 34 |
+
max_target_len: 256
|
| 35 |
+
num_workers: 0
|
| 36 |
+
pin_memory: true
|
| 37 |
+
max_train_samples: null
|
| 38 |
+
max_val_samples: 2000
|
| 39 |
+
logging:
|
| 40 |
+
log_interval: 10
|
| 41 |
+
save_interval: 0
|
| 42 |
+
eval_interval: 2000
|
| 43 |
+
save_every_epoch: false
|
| 44 |
+
tracking:
|
| 45 |
+
enabled: true
|
| 46 |
+
backend: wandb
|
| 47 |
+
project: code-completion_lr-sweep
|
| 48 |
+
run_name: hnet_xl_code_lr_5e-5
|
| 49 |
+
entity: null
|
| 50 |
+
base_url: https://wandb.platun0v.ru
|
| 51 |
+
local_dir: ${paths.output_dir}
|
| 52 |
+
paths:
|
| 53 |
+
output_dir: /workspace/byte-llms-code/outputs/lr_sweep/hnet_xl_code_lr_5e-5
|
| 54 |
+
seed: 42
|
| 55 |
+
device: cuda
|
lr_sweep/hnet_xl_code_lr_5e-5/.hydra/hydra.yaml
ADDED
|
@@ -0,0 +1,166 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
hydra:
|
| 2 |
+
run:
|
| 3 |
+
dir: ${paths.output_dir}
|
| 4 |
+
sweep:
|
| 5 |
+
dir: outputs/multirun/${now:%Y-%m-%d}/${now:%H-%M-%S}
|
| 6 |
+
subdir: ${hydra.job.num}
|
| 7 |
+
launcher:
|
| 8 |
+
_target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher
|
| 9 |
+
sweeper:
|
| 10 |
+
_target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper
|
| 11 |
+
max_batch_size: null
|
| 12 |
+
params: null
|
| 13 |
+
help:
|
| 14 |
+
app_name: ${hydra.job.name}
|
| 15 |
+
header: '${hydra.help.app_name} is powered by Hydra.
|
| 16 |
+
|
| 17 |
+
'
|
| 18 |
+
footer: 'Powered by Hydra (https://hydra.cc)
|
| 19 |
+
|
| 20 |
+
Use --hydra-help to view Hydra specific help
|
| 21 |
+
|
| 22 |
+
'
|
| 23 |
+
template: '${hydra.help.header}
|
| 24 |
+
|
| 25 |
+
== Configuration groups ==
|
| 26 |
+
|
| 27 |
+
Compose your configuration from those groups (group=option)
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
$APP_CONFIG_GROUPS
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
== Config ==
|
| 34 |
+
|
| 35 |
+
Override anything in the config (foo.bar=value)
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
$CONFIG
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
${hydra.help.footer}
|
| 42 |
+
|
| 43 |
+
'
|
| 44 |
+
hydra_help:
|
| 45 |
+
template: 'Hydra (${hydra.runtime.version})
|
| 46 |
+
|
| 47 |
+
See https://hydra.cc for more info.
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
== Flags ==
|
| 51 |
+
|
| 52 |
+
$FLAGS_HELP
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
== Configuration groups ==
|
| 56 |
+
|
| 57 |
+
Compose your configuration from those groups (For example, append hydra/job_logging=disabled
|
| 58 |
+
to command line)
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
$HYDRA_CONFIG_GROUPS
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
Use ''--cfg hydra'' to Show the Hydra config.
|
| 65 |
+
|
| 66 |
+
'
|
| 67 |
+
hydra_help: ???
|
| 68 |
+
hydra_logging:
|
| 69 |
+
version: 1
|
| 70 |
+
formatters:
|
| 71 |
+
simple:
|
| 72 |
+
format: '[%(asctime)s][HYDRA] %(message)s'
|
| 73 |
+
handlers:
|
| 74 |
+
console:
|
| 75 |
+
class: logging.StreamHandler
|
| 76 |
+
formatter: simple
|
| 77 |
+
stream: ext://sys.stdout
|
| 78 |
+
root:
|
| 79 |
+
level: INFO
|
| 80 |
+
handlers:
|
| 81 |
+
- console
|
| 82 |
+
loggers:
|
| 83 |
+
logging_example:
|
| 84 |
+
level: DEBUG
|
| 85 |
+
disable_existing_loggers: false
|
| 86 |
+
job_logging:
|
| 87 |
+
version: 1
|
| 88 |
+
formatters:
|
| 89 |
+
simple:
|
| 90 |
+
format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
|
| 91 |
+
handlers:
|
| 92 |
+
console:
|
| 93 |
+
class: logging.StreamHandler
|
| 94 |
+
formatter: simple
|
| 95 |
+
stream: ext://sys.stdout
|
| 96 |
+
file:
|
| 97 |
+
class: logging.FileHandler
|
| 98 |
+
formatter: simple
|
| 99 |
+
filename: ${hydra.runtime.output_dir}/${hydra.job.name}.log
|
| 100 |
+
root:
|
| 101 |
+
level: INFO
|
| 102 |
+
handlers:
|
| 103 |
+
- console
|
| 104 |
+
- file
|
| 105 |
+
disable_existing_loggers: false
|
| 106 |
+
env: {}
|
| 107 |
+
mode: RUN
|
| 108 |
+
searchpath: []
|
| 109 |
+
callbacks: {}
|
| 110 |
+
output_subdir: .hydra
|
| 111 |
+
overrides:
|
| 112 |
+
hydra:
|
| 113 |
+
- hydra.mode=RUN
|
| 114 |
+
task:
|
| 115 |
+
- tracking=wandb
|
| 116 |
+
- tracking.project=code-completion_lr-sweep
|
| 117 |
+
- tracking.run_name=hnet_xl_code_lr_5e-5
|
| 118 |
+
- training.lr=5e-5
|
| 119 |
+
- paths.output_dir=/workspace/byte-llms-code/outputs/lr_sweep/hnet_xl_code_lr_5e-5
|
| 120 |
+
- data.path=/workspace/byte-llms-code/code_completion_exp/datasets/data_V4_full
|
| 121 |
+
job:
|
| 122 |
+
name: train
|
| 123 |
+
chdir: false
|
| 124 |
+
override_dirname: data.path=/workspace/byte-llms-code/code_completion_exp/datasets/data_V4_full,paths.output_dir=/workspace/byte-llms-code/outputs/lr_sweep/hnet_xl_code_lr_5e-5,tracking.project=code-completion_lr-sweep,tracking.run_name=hnet_xl_code_lr_5e-5,tracking=wandb,training.lr=5e-5
|
| 125 |
+
id: ???
|
| 126 |
+
num: ???
|
| 127 |
+
config_name: config
|
| 128 |
+
env_set: {}
|
| 129 |
+
env_copy: []
|
| 130 |
+
config:
|
| 131 |
+
override_dirname:
|
| 132 |
+
kv_sep: '='
|
| 133 |
+
item_sep: ','
|
| 134 |
+
exclude_keys: []
|
| 135 |
+
runtime:
|
| 136 |
+
version: 1.3.2
|
| 137 |
+
version_base: '1.3'
|
| 138 |
+
cwd: /workspace/byte-llms-code/code_completion_exp/train_hnet
|
| 139 |
+
config_sources:
|
| 140 |
+
- path: hydra.conf
|
| 141 |
+
schema: pkg
|
| 142 |
+
provider: hydra
|
| 143 |
+
- path: /workspace/byte-llms-code/code_completion_exp/train_hnet/configs
|
| 144 |
+
schema: file
|
| 145 |
+
provider: main
|
| 146 |
+
- path: ''
|
| 147 |
+
schema: structured
|
| 148 |
+
provider: schema
|
| 149 |
+
output_dir: /workspace/byte-llms-code/outputs/lr_sweep/hnet_xl_code_lr_5e-5
|
| 150 |
+
choices:
|
| 151 |
+
paths: default
|
| 152 |
+
tracking: wandb
|
| 153 |
+
logging: default
|
| 154 |
+
data: default
|
| 155 |
+
training: default
|
| 156 |
+
model: hnet_xl_code
|
| 157 |
+
hydra/env: default
|
| 158 |
+
hydra/callbacks: null
|
| 159 |
+
hydra/job_logging: default
|
| 160 |
+
hydra/hydra_logging: default
|
| 161 |
+
hydra/hydra_help: default
|
| 162 |
+
hydra/help: default
|
| 163 |
+
hydra/sweeper: basic
|
| 164 |
+
hydra/launcher: basic
|
| 165 |
+
hydra/output: default
|
| 166 |
+
verbose: false
|
lr_sweep/hnet_xl_code_lr_5e-5/.hydra/overrides.yaml
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
- tracking=wandb
|
| 2 |
+
- tracking.project=code-completion_lr-sweep
|
| 3 |
+
- tracking.run_name=hnet_xl_code_lr_5e-5
|
| 4 |
+
- training.lr=5e-5
|
| 5 |
+
- paths.output_dir=/workspace/byte-llms-code/outputs/lr_sweep/hnet_xl_code_lr_5e-5
|
| 6 |
+
- data.path=/workspace/byte-llms-code/code_completion_exp/datasets/data_V4_full
|
lr_sweep/hnet_xl_code_lr_5e-5/train.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
lr_sweep/hnet_xl_code_lr_5e-5/wandb/debug-internal.log
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{"time":"2026-04-25T18:06:03.397377041Z","level":"INFO","msg":"stream: starting","core version":"0.24.0"}
|
| 2 |
+
{"time":"2026-04-25T18:06:03.763903636Z","level":"INFO","msg":"stream: created new stream","id":"5xd22ofy"}
|
| 3 |
+
{"time":"2026-04-25T18:06:03.76396939Z","level":"INFO","msg":"handler: started","stream_id":"5xd22ofy"}
|
| 4 |
+
{"time":"2026-04-25T18:06:03.764060758Z","level":"INFO","msg":"stream: started","id":"5xd22ofy"}
|
| 5 |
+
{"time":"2026-04-25T18:06:03.764070052Z","level":"INFO","msg":"writer: started","stream_id":"5xd22ofy"}
|
| 6 |
+
{"time":"2026-04-25T18:06:03.764074071Z","level":"INFO","msg":"sender: started","stream_id":"5xd22ofy"}
|
| 7 |
+
{"time":"2026-04-25T18:06:03.894739834Z","level":"ERROR","msg":"git repo not found","error":"repository does not exist"}
|
| 8 |
+
{"time":"2026-04-25T18:50:19.195555067Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 9 |
+
{"time":"2026-04-25T18:51:04.195795437Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 10 |
+
{"time":"2026-04-25T18:51:19.181133591Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 11 |
+
{"time":"2026-04-25T18:56:04.214926057Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 12 |
+
{"time":"2026-04-25T19:01:04.252685213Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 13 |
+
{"time":"2026-04-25T19:02:19.340852054Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 14 |
+
{"time":"2026-04-25T19:02:49.250024047Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 15 |
+
{"time":"2026-04-25T19:03:04.189138296Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 16 |
+
{"time":"2026-04-25T19:03:19.184641802Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 17 |
+
{"time":"2026-04-25T19:03:49.245246272Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 18 |
+
{"time":"2026-04-25T19:04:04.204755219Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 19 |
+
{"time":"2026-04-25T19:05:34.344593348Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 20 |
+
{"time":"2026-04-25T19:07:04.245410066Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 21 |
+
{"time":"2026-04-25T19:09:04.200628758Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 22 |
+
{"time":"2026-04-25T19:11:04.246084299Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 23 |
+
{"time":"2026-04-25T19:12:49.152243469Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 24 |
+
{"time":"2026-04-25T19:13:04.624716271Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 25 |
+
{"time":"2026-04-25T19:13:34.190147444Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 26 |
+
{"time":"2026-04-25T19:13:49.189117021Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 27 |
+
{"time":"2026-04-25T19:14:04.191888213Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 28 |
+
{"time":"2026-04-25T19:18:19.190447211Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 29 |
+
{"time":"2026-04-25T19:18:34.241234031Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 30 |
+
{"time":"2026-04-25T19:18:49.61536326Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 31 |
+
{"time":"2026-04-25T19:20:04.282694457Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 32 |
+
{"time":"2026-04-25T19:20:34.200078948Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 33 |
+
{"time":"2026-04-25T19:21:04.655700817Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 34 |
+
{"time":"2026-04-25T19:22:04.245885251Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 35 |
+
{"time":"2026-04-25T19:24:19.3404455Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 36 |
+
{"time":"2026-04-25T20:07:11.317595318Z","level":"INFO","msg":"fileTransfer: Close: file transfer manager closed"}
|
| 37 |
+
{"time":"2026-04-25T20:07:11.47211371Z","level":"INFO","msg":"handler: operation stats","stats":{}}
|
| 38 |
+
{"time":"2026-04-25T20:07:11.474506622Z","level":"INFO","msg":"stream: closing","id":"5xd22ofy"}
|
| 39 |
+
{"time":"2026-04-25T20:07:11.47451524Z","level":"INFO","msg":"handler: closed","stream_id":"5xd22ofy"}
|
| 40 |
+
{"time":"2026-04-25T20:07:11.474586904Z","level":"INFO","msg":"sender: closed","stream_id":"5xd22ofy"}
|
| 41 |
+
{"time":"2026-04-25T20:07:11.4745917Z","level":"INFO","msg":"stream: closed","id":"5xd22ofy"}
|
lr_sweep/hnet_xl_code_lr_5e-5/wandb/debug.log
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
2026-04-25 18:06:03,107 INFO MainThread:65184 [wandb_setup.py:_flush():81] Current SDK version is 0.24.0
|
| 2 |
+
2026-04-25 18:06:03,107 INFO MainThread:65184 [wandb_setup.py:_flush():81] Configure stats pid to 65184
|
| 3 |
+
2026-04-25 18:06:03,107 INFO MainThread:65184 [wandb_setup.py:_flush():81] Loading settings from environment variables
|
| 4 |
+
2026-04-25 18:06:03,107 INFO MainThread:65184 [wandb_init.py:setup_run_log_directory():717] Logging user logs to /workspace/byte-llms-code/outputs/lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_180603-5xd22ofy/logs/debug.log
|
| 5 |
+
2026-04-25 18:06:03,107 INFO MainThread:65184 [wandb_init.py:setup_run_log_directory():718] Logging internal logs to /workspace/byte-llms-code/outputs/lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_180603-5xd22ofy/logs/debug-internal.log
|
| 6 |
+
2026-04-25 18:06:03,107 INFO MainThread:65184 [wandb_init.py:init():844] calling init triggers
|
| 7 |
+
2026-04-25 18:06:03,107 INFO MainThread:65184 [wandb_init.py:init():849] wandb.init called with sweep_config: {}
|
| 8 |
+
config: {'model': {'config_path': '/workspace/byte-llms-code/hnet_project/configs/hnet_2stage_XL_code.json', 'checkpoint_path': '/workspace/byte-llms-code/hnet_project/checkpoints/hnet_2stage_XL_code.pt'}, 'training': {'epochs': 1, 'batch_size': 4, 'eval_batch_size': 24, 'gradient_accumulation_steps': 4, 'lr': 5e-05, 'weight_decay': 0.1, 'betas': [0.9, 0.95], 'eps': 1e-08, 'lr_scheduler': 'wsd', 'warmup_ratio': 0.1, 'decay_ratio': 0.2, 'warmup_steps': 100, 'min_lr_ratio': 0.1, 'lr_multiplier': [2.0, 1.5, 1.0], 'load_balancing_weight': 0.01, 'load_balancing_N': 4.0, 'max_grad_norm': 1.0, 'use_amp': True, 'resume': False, 'resume_checkpoint': None, 'warmup_model': True}, 'data': {'path': '/workspace/byte-llms-code/code_completion_exp/datasets/data_V4_full', 'max_context_len': 4096, 'max_target_len': 256, 'num_workers': 0, 'pin_memory': True, 'max_train_samples': None, 'max_val_samples': 2000}, 'logging': {'log_interval': 10, 'save_interval': 0, 'eval_interval': 2000, 'save_every_epoch': False}, 'tracking': {'enabled': True, 'backend': 'wandb', 'project': 'code-completion_lr-sweep', 'run_name': 'hnet_xl_code_lr_5e-5', 'entity': None, 'base_url': 'https://wandb.platun0v.ru', 'local_dir': '/workspace/byte-llms-code/outputs/lr_sweep/hnet_xl_code_lr_5e-5'}, 'paths': {'output_dir': '/workspace/byte-llms-code/outputs/lr_sweep/hnet_xl_code_lr_5e-5'}, 'seed': 42, 'device': 'cuda', '_wandb': {'code_path': 'code/code_completion_exp/train_hnet/train.py'}}
|
| 9 |
+
2026-04-25 18:06:03,107 INFO MainThread:65184 [wandb_init.py:init():892] starting backend
|
| 10 |
+
2026-04-25 18:06:03,377 INFO MainThread:65184 [wandb_init.py:init():895] sending inform_init request
|
| 11 |
+
2026-04-25 18:06:03,396 INFO MainThread:65184 [wandb_init.py:init():903] backend started and connected
|
| 12 |
+
2026-04-25 18:06:03,398 INFO MainThread:65184 [wandb_init.py:init():973] updated telemetry
|
| 13 |
+
2026-04-25 18:06:03,413 INFO MainThread:65184 [wandb_init.py:init():997] communicating run to backend with 90.0 second timeout
|
| 14 |
+
2026-04-25 18:06:03,893 INFO MainThread:65184 [wandb_init.py:init():1044] starting run threads in backend
|
| 15 |
+
2026-04-25 18:06:04,051 INFO MainThread:65184 [wandb_run.py:_console_start():2529] atexit reg
|
| 16 |
+
2026-04-25 18:06:04,051 INFO MainThread:65184 [wandb_run.py:_redirect():2377] redirect: wrap_raw
|
| 17 |
+
2026-04-25 18:06:04,051 INFO MainThread:65184 [wandb_run.py:_redirect():2446] Wrapping output streams.
|
| 18 |
+
2026-04-25 18:06:04,051 INFO MainThread:65184 [wandb_run.py:_redirect():2469] Redirects installed.
|
| 19 |
+
2026-04-25 18:06:04,054 INFO MainThread:65184 [wandb_init.py:init():1084] run started, returning control to user process
|
| 20 |
+
2026-04-25 20:07:10,198 INFO MainThread:65184 [wandb_run.py:_finish():2295] finishing run nikita/code-completion_lr-sweep/5xd22ofy
|
| 21 |
+
2026-04-25 20:07:10,198 INFO MainThread:65184 [wandb_run.py:_atexit_cleanup():2494] got exitcode: 0
|
| 22 |
+
2026-04-25 20:07:10,199 INFO MainThread:65184 [wandb_run.py:_restore():2476] restore
|
| 23 |
+
2026-04-25 20:07:10,199 INFO MainThread:65184 [wandb_run.py:_restore():2482] restore done
|
| 24 |
+
2026-04-25 20:07:11,474 INFO MainThread:65184 [wandb_run.py:_footer_sync_info():3870] logging synced files
|
lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_174418-uk7c7595/files/code/code_completion_exp/train_hnet/train.py
ADDED
|
@@ -0,0 +1,284 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Training Pipeline для HNet модели на задаче Code Completion.
|
| 3 |
+
|
| 4 |
+
Конфигурация через Hydra + OmegaConf, логирование в Trackio.
|
| 5 |
+
Поддержка DDP через Accelerate для multi-GPU тренировки.
|
| 6 |
+
|
| 7 |
+
Использование:
|
| 8 |
+
# Базовый запуск (single GPU)
|
| 9 |
+
python train.py
|
| 10 |
+
|
| 11 |
+
# Multi-GPU с Accelerate
|
| 12 |
+
accelerate launch train.py
|
| 13 |
+
|
| 14 |
+
# Multi-GPU с указанием количества GPU
|
| 15 |
+
accelerate launch --num_processes=4 train.py
|
| 16 |
+
|
| 17 |
+
# Переопределение параметров через CLI
|
| 18 |
+
python train.py training.lr=1e-4 training.epochs=5
|
| 19 |
+
|
| 20 |
+
# Выбор другого конфига модели
|
| 21 |
+
python train.py model=hnet_small
|
| 22 |
+
|
| 23 |
+
# Multirun (sweep)
|
| 24 |
+
python train.py --multirun training.lr=1e-4,3e-4,1e-3
|
| 25 |
+
|
| 26 |
+
# Без логирования
|
| 27 |
+
python train.py tracking.enabled=false
|
| 28 |
+
"""
|
| 29 |
+
|
| 30 |
+
import os
|
| 31 |
+
import math
|
| 32 |
+
from pathlib import Path
|
| 33 |
+
|
| 34 |
+
import torch
|
| 35 |
+
import hydra
|
| 36 |
+
from hydra.core.hydra_config import HydraConfig
|
| 37 |
+
from omegaconf import DictConfig, OmegaConf
|
| 38 |
+
from accelerate import Accelerator
|
| 39 |
+
from accelerate.utils import set_seed as accelerate_set_seed
|
| 40 |
+
|
| 41 |
+
# HNet imports
|
| 42 |
+
from hnet.load_utils import load_from_pretrained, load_from_config
|
| 43 |
+
from hnet.utils.tokenizers import ByteTokenizer
|
| 44 |
+
from hnet.utils.train import group_params
|
| 45 |
+
|
| 46 |
+
# Ensure repo root is on sys.path (needed when running from subdirectory)
|
| 47 |
+
import sys
|
| 48 |
+
sys.path.insert(0, str(Path(__file__).resolve().parents[2]))
|
| 49 |
+
|
| 50 |
+
# Shared training library
|
| 51 |
+
from training_lib.utils import log_message
|
| 52 |
+
from training_lib.checkpointing import save_checkpoint, load_checkpoint
|
| 53 |
+
from training_lib.schedulers import get_lr_scheduler
|
| 54 |
+
from training_lib.tracking import init_tracking, finish_tracking
|
| 55 |
+
from training_lib.hnet.train_loop import train_epoch
|
| 56 |
+
from training_lib.hnet.data import create_dataloaders
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
@hydra.main(version_base=None, config_path="configs", config_name="config")
|
| 60 |
+
def main(cfg: DictConfig):
|
| 61 |
+
"""Глав��ая функция тренировки с поддержкой DDP чере�� Accelerate."""
|
| 62 |
+
|
| 63 |
+
# === Accelerator Setup ===
|
| 64 |
+
mixed_precision = "bf16" if cfg.training.use_amp else "no"
|
| 65 |
+
|
| 66 |
+
accelerator = Accelerator(
|
| 67 |
+
mixed_precision=mixed_precision,
|
| 68 |
+
gradient_accumulation_steps=cfg.training.gradient_accumulation_steps,
|
| 69 |
+
)
|
| 70 |
+
|
| 71 |
+
# === Setup ===
|
| 72 |
+
accelerate_set_seed(cfg.seed)
|
| 73 |
+
|
| 74 |
+
if cfg.paths.output_dir is None:
|
| 75 |
+
cfg.paths.output_dir = HydraConfig.get().runtime.output_dir
|
| 76 |
+
|
| 77 |
+
OmegaConf.resolve(cfg)
|
| 78 |
+
|
| 79 |
+
log_message(
|
| 80 |
+
f"CUDA_VISIBLE_DEVICES: {os.environ.get('CUDA_VISIBLE_DEVICES', 'not set')}",
|
| 81 |
+
cfg,
|
| 82 |
+
accelerator,
|
| 83 |
+
)
|
| 84 |
+
log_message(f"Number of processes: {accelerator.num_processes}", cfg, accelerator)
|
| 85 |
+
log_message(f"Process index: {accelerator.process_index}", cfg, accelerator)
|
| 86 |
+
log_message(f"Mixed precision: {mixed_precision}", cfg, accelerator)
|
| 87 |
+
|
| 88 |
+
log_message("=" * 60, cfg, accelerator)
|
| 89 |
+
log_message(
|
| 90 |
+
"HNet Training Pipeline (Hydra + Trackio + Accelerate)", cfg, accelerator
|
| 91 |
+
)
|
| 92 |
+
log_message("=" * 60, cfg, accelerator)
|
| 93 |
+
log_message(f"Config:\n{OmegaConf.to_yaml(cfg)}", cfg, accelerator)
|
| 94 |
+
|
| 95 |
+
# === Trackio Init ===
|
| 96 |
+
init_tracking(cfg, accelerator)
|
| 97 |
+
|
| 98 |
+
# === Tokenizer ===
|
| 99 |
+
log_message("Initializing tokenizer...", cfg, accelerator)
|
| 100 |
+
tokenizer = ByteTokenizer()
|
| 101 |
+
|
| 102 |
+
# === Model ===
|
| 103 |
+
log_message("Loading model...", cfg, accelerator)
|
| 104 |
+
if cfg.model.checkpoint_path:
|
| 105 |
+
model = load_from_pretrained(
|
| 106 |
+
model_path=cfg.model.checkpoint_path,
|
| 107 |
+
model_config_path=cfg.model.config_path,
|
| 108 |
+
)
|
| 109 |
+
log_message(f"Loaded pretrained: {cfg.model.checkpoint_path}", cfg, accelerator)
|
| 110 |
+
else:
|
| 111 |
+
model = load_from_config(
|
| 112 |
+
model_config_path=cfg.model.config_path,
|
| 113 |
+
device="cpu",
|
| 114 |
+
)
|
| 115 |
+
model.init_weights()
|
| 116 |
+
log_message("Initialized from scratch", cfg, accelerator)
|
| 117 |
+
|
| 118 |
+
model.train()
|
| 119 |
+
|
| 120 |
+
# LR multiplier для разны�� стадий (до prepare!)
|
| 121 |
+
lr_multiplier = list(cfg.training.lr_multiplier)
|
| 122 |
+
model.apply_lr_multiplier(lr_multiplier)
|
| 123 |
+
log_message(f"Applied LR multipliers: {lr_multiplier}", cfg, accelerator)
|
| 124 |
+
|
| 125 |
+
# Warmup для Triton kernels
|
| 126 |
+
if cfg.training.warmup_model:
|
| 127 |
+
log_message("Warming up model...", cfg, accelerator)
|
| 128 |
+
model = model.to(accelerator.device)
|
| 129 |
+
model.warmup(verbose=accelerator.is_main_process)
|
| 130 |
+
|
| 131 |
+
# Log model info
|
| 132 |
+
total_params = sum(p.numel() for p in model.parameters())
|
| 133 |
+
trainable_params = sum(p.numel() for p in model.parameters() if p.requires_grad)
|
| 134 |
+
log_message(f"Total params: {total_params:,}", cfg, accelerator)
|
| 135 |
+
log_message(f"Trainable params: {trainable_params:,}", cfg, accelerator)
|
| 136 |
+
|
| 137 |
+
# === Data ===
|
| 138 |
+
log_message("Creating dataloaders...", cfg, accelerator)
|
| 139 |
+
dataloaders = create_dataloaders(cfg, tokenizer)
|
| 140 |
+
|
| 141 |
+
train_dataloader = dataloaders["train"]
|
| 142 |
+
val_dataloader = dataloaders.get("validation", None)
|
| 143 |
+
|
| 144 |
+
log_message(
|
| 145 |
+
f"Train dataset size: {len(train_dataloader.dataset)}", cfg, accelerator
|
| 146 |
+
)
|
| 147 |
+
log_message(
|
| 148 |
+
f"Train batches per epoch (before DDP split): {len(train_dataloader)}",
|
| 149 |
+
cfg,
|
| 150 |
+
accelerator,
|
| 151 |
+
)
|
| 152 |
+
|
| 153 |
+
if val_dataloader:
|
| 154 |
+
log_message(
|
| 155 |
+
f"Validation dataset size: {len(val_dataloader.dataset)}", cfg, accelerator
|
| 156 |
+
)
|
| 157 |
+
log_message(f"Validation batches: {len(val_dataloader)}", cfg, accelerator)
|
| 158 |
+
else:
|
| 159 |
+
log_message("No validation dataset found", cfg, accelerator)
|
| 160 |
+
|
| 161 |
+
# === Optimizer ===
|
| 162 |
+
log_message("Creating optimizer...", cfg, accelerator)
|
| 163 |
+
param_groups = group_params(model)
|
| 164 |
+
|
| 165 |
+
for group in param_groups:
|
| 166 |
+
if "lr" not in group:
|
| 167 |
+
group["lr"] = cfg.training.lr
|
| 168 |
+
else:
|
| 169 |
+
group["lr"] = cfg.training.lr * group.get("lr_multiplier", 1.0)
|
| 170 |
+
if "weight_decay" not in group:
|
| 171 |
+
group["weight_decay"] = cfg.training.weight_decay
|
| 172 |
+
|
| 173 |
+
optimizer = torch.optim.AdamW(
|
| 174 |
+
param_groups,
|
| 175 |
+
lr=cfg.training.lr,
|
| 176 |
+
betas=tuple(cfg.training.betas),
|
| 177 |
+
eps=cfg.training.eps,
|
| 178 |
+
)
|
| 179 |
+
|
| 180 |
+
# === Scheduler ===
|
| 181 |
+
steps_per_epoch = math.ceil(len(train_dataloader) / accelerator.num_processes)
|
| 182 |
+
total_steps = (
|
| 183 |
+
cfg.training.epochs
|
| 184 |
+
* steps_per_epoch
|
| 185 |
+
// cfg.training.gradient_accumulation_steps
|
| 186 |
+
)
|
| 187 |
+
scheduler = get_lr_scheduler(optimizer, cfg, total_steps)
|
| 188 |
+
|
| 189 |
+
log_message(
|
| 190 |
+
f"Total steps: {total_steps}, Steps per epoch: {steps_per_epoch}",
|
| 191 |
+
cfg,
|
| 192 |
+
accelerator,
|
| 193 |
+
)
|
| 194 |
+
|
| 195 |
+
# === Accelerate Prepare ===
|
| 196 |
+
log_message(
|
| 197 |
+
"Preparing model, optimizer, and dataloaders with Accelerate...",
|
| 198 |
+
cfg,
|
| 199 |
+
accelerator,
|
| 200 |
+
)
|
| 201 |
+
|
| 202 |
+
if val_dataloader is not None:
|
| 203 |
+
model, optimizer, train_dataloader, val_dataloader, scheduler = (
|
| 204 |
+
accelerator.prepare(
|
| 205 |
+
model, optimizer, train_dataloader, val_dataloader, scheduler
|
| 206 |
+
)
|
| 207 |
+
)
|
| 208 |
+
else:
|
| 209 |
+
model, optimizer, train_dataloader, scheduler = accelerator.prepare(
|
| 210 |
+
model, optimizer, train_dataloader, scheduler
|
| 211 |
+
)
|
| 212 |
+
|
| 213 |
+
log_message(
|
| 214 |
+
f"Train batches per epoch (after DDP split): {len(train_dataloader)}",
|
| 215 |
+
cfg,
|
| 216 |
+
accelerator,
|
| 217 |
+
)
|
| 218 |
+
|
| 219 |
+
# === Resume ===
|
| 220 |
+
global_step = 0
|
| 221 |
+
start_epoch = 1
|
| 222 |
+
|
| 223 |
+
if cfg.training.resume and cfg.training.resume_checkpoint:
|
| 224 |
+
global_step, start_epoch = load_checkpoint(
|
| 225 |
+
model,
|
| 226 |
+
optimizer,
|
| 227 |
+
scheduler,
|
| 228 |
+
cfg.training.resume_checkpoint,
|
| 229 |
+
cfg,
|
| 230 |
+
accelerator,
|
| 231 |
+
)
|
| 232 |
+
start_epoch += 1
|
| 233 |
+
|
| 234 |
+
# === Training Loop ===
|
| 235 |
+
log_message("Starting training...", cfg, accelerator)
|
| 236 |
+
|
| 237 |
+
best_val_loss = float("inf")
|
| 238 |
+
|
| 239 |
+
try:
|
| 240 |
+
for epoch in range(start_epoch, cfg.training.epochs + 1):
|
| 241 |
+
log_message(f"\n{'=' * 60}", cfg, accelerator)
|
| 242 |
+
log_message(f"EPOCH {epoch}/{cfg.training.epochs}", cfg, accelerator)
|
| 243 |
+
log_message(f"{'=' * 60}", cfg, accelerator)
|
| 244 |
+
|
| 245 |
+
global_step, best_val_loss = train_epoch(
|
| 246 |
+
model=model,
|
| 247 |
+
dataloader=train_dataloader,
|
| 248 |
+
optimizer=optimizer,
|
| 249 |
+
scheduler=scheduler,
|
| 250 |
+
cfg=cfg,
|
| 251 |
+
epoch=epoch,
|
| 252 |
+
global_step=global_step,
|
| 253 |
+
accelerator=accelerator,
|
| 254 |
+
val_dataloader=val_dataloader,
|
| 255 |
+
best_val_loss=best_val_loss,
|
| 256 |
+
)
|
| 257 |
+
|
| 258 |
+
if cfg.logging.save_every_epoch:
|
| 259 |
+
save_checkpoint(
|
| 260 |
+
model, optimizer, scheduler, global_step, epoch, cfg, accelerator
|
| 261 |
+
)
|
| 262 |
+
|
| 263 |
+
except KeyboardInterrupt:
|
| 264 |
+
log_message("Training interrupted by user", cfg, accelerator)
|
| 265 |
+
save_checkpoint(
|
| 266 |
+
model, optimizer, scheduler, global_step, epoch, cfg, accelerator
|
| 267 |
+
)
|
| 268 |
+
|
| 269 |
+
# === Final Save ===
|
| 270 |
+
log_message("\nTraining completed!", cfg, accelerator)
|
| 271 |
+
|
| 272 |
+
if accelerator.is_main_process:
|
| 273 |
+
final_model_path = Path(cfg.paths.output_dir) / "model_final.pt"
|
| 274 |
+
unwrapped_model = accelerator.unwrap_model(model)
|
| 275 |
+
torch.save(unwrapped_model.state_dict(), final_model_path)
|
| 276 |
+
log_message(f"Final model: {final_model_path}", cfg, accelerator)
|
| 277 |
+
|
| 278 |
+
accelerator.wait_for_everyone()
|
| 279 |
+
accelerator.end_training()
|
| 280 |
+
finish_tracking()
|
| 281 |
+
|
| 282 |
+
|
| 283 |
+
if __name__ == "__main__":
|
| 284 |
+
main()
|
lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_174418-uk7c7595/files/output.log
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[2026-04-25 17:44:19] Initializing tokenizer...
|
| 2 |
+
[2026-04-25 17:44:19] Loading model...
|
| 3 |
+
[2026-04-25 17:44:24] Loaded pretrained: /workspace/byte-llms-code/hnet_project/checkpoints/hnet_2stage_XL_code.pt
|
| 4 |
+
[2026-04-25 17:44:24] Applied LR multipliers: [2.0, 1.5, 1.0]
|
| 5 |
+
[2026-04-25 17:44:24] Warming up model...
|
| 6 |
+
[WARMUP] Starting warmup (compiling Triton kernels)...
|
| 7 |
+
[WARMUP] Forward: 61.050s, Backward: 136.330s
|
| 8 |
+
[WARMUP] Warmup complete. Subsequent passes will be fast.
|
| 9 |
+
[2026-04-25 17:47:41] Total params: 1,654,090,112
|
| 10 |
+
[2026-04-25 17:47:41] Trainable params: 1,654,090,112
|
| 11 |
+
[2026-04-25 17:47:41] Creating dataloaders...
|
| 12 |
+
[2026-04-25 17:47:41] Train dataset size: 20000
|
| 13 |
+
[2026-04-25 17:47:41] Train batches per epoch (before DDP split): 5000
|
| 14 |
+
[2026-04-25 17:47:41] Validation dataset size: 2000
|
| 15 |
+
[2026-04-25 17:47:41] Validation batches: 84
|
| 16 |
+
[2026-04-25 17:47:41] Creating optimizer...
|
| 17 |
+
[2026-04-25 17:47:41] Total steps: 625, Steps per epoch: 2500
|
| 18 |
+
[2026-04-25 17:47:41] Preparing model, optimizer, and dataloaders with Accelerate...
|
| 19 |
+
[2026-04-25 17:47:42] Train batches per epoch (after DDP split): 2500
|
| 20 |
+
[2026-04-25 17:47:42] Starting training...
|
| 21 |
+
[2026-04-25 17:47:42]
|
| 22 |
+
============================================================
|
| 23 |
+
[2026-04-25 17:47:42] EPOCH 1/1
|
| 24 |
+
[2026-04-25 17:47:42] ============================================================
|
| 25 |
+
[2026-04-25 17:50:46] Epoch 1 | Step 10 | Loss: 0.6596 | LM: 0.6184 | LB: 1.1668 | CL0: 2.7 | CL1: 2.1 | HR0: 0.371/SR0: 0.367 | HR1: 0.472/SR1: 0.455 | LR: 1.95e-05
|
| 26 |
+
[2026-04-25 17:50:54] Epoch 1 | Step 20 | Loss: 0.6101 | LM: 0.6312 | LB: 1.1650 | CL0: 2.8 | CL1: 2.1 | HR0: 0.364/SR0: 0.361 | HR1: 0.476/SR1: 0.459 | LR: 3.40e-05
|
| 27 |
+
[2026-04-25 17:51:01] Epoch 1 | Step 30 | Loss: 0.5549 | LM: 0.5675 | LB: 1.1666 | CL0: 2.8 | CL1: 2.1 | HR0: 0.363/SR0: 0.360 | HR1: 0.478/SR1: 0.460 | LR: 4.85e-05
|
| 28 |
+
[2026-04-25 17:51:09] Epoch 1 | Step 40 | Loss: 0.5140 | LM: 0.5316 | LB: 1.1688 | CL0: 2.7 | CL1: 2.1 | HR0: 0.367/SR0: 0.364 | HR1: 0.478/SR1: 0.459 | LR: 5.00e-05
|
| 29 |
+
[2026-04-25 17:51:16] Epoch 1 | Step 50 | Loss: 0.4827 | LM: 0.4866 | LB: 1.1683 | CL0: 2.8 | CL1: 2.1 | HR0: 0.365/SR0: 0.362 | HR1: 0.479/SR1: 0.460 | LR: 5.00e-05
|
| 30 |
+
[2026-04-25 17:51:23] Epoch 1 | Step 60 | Loss: 0.4620 | LM: 0.4543 | LB: 1.1683 | CL0: 2.8 | CL1: 2.1 | HR0: 0.362/SR0: 0.360 | HR1: 0.481/SR1: 0.461 | LR: 5.00e-05
|
| 31 |
+
[2026-04-25 17:51:31] Epoch 1 | Step 70 | Loss: 0.4428 | LM: 0.4376 | LB: 1.1672 | CL0: 2.8 | CL1: 2.1 | HR0: 0.362/SR0: 0.359 | HR1: 0.480/SR1: 0.461 | LR: 5.00e-05
|
| 32 |
+
[2026-04-25 17:51:38] Epoch 1 | Step 80 | Loss: 0.4263 | LM: 0.4253 | LB: 1.1680 | CL0: 2.8 | CL1: 2.1 | HR0: 0.361/SR0: 0.359 | HR1: 0.481/SR1: 0.462 | LR: 5.00e-05
|
| 33 |
+
[2026-04-25 17:51:45] Epoch 1 | Step 90 | Loss: 0.4136 | LM: 0.4185 | LB: 1.1681 | CL0: 2.8 | CL1: 2.1 | HR0: 0.362/SR0: 0.360 | HR1: 0.481/SR1: 0.461 | LR: 5.00e-05
|
| 34 |
+
[2026-04-25 17:51:52] Epoch 1 | Step 100 | Loss: 0.4098 | LM: 0.4064 | LB: 1.1692 | CL0: 2.8 | CL1: 2.1 | HR0: 0.363/SR0: 0.360 | HR1: 0.482/SR1: 0.462 | LR: 5.00e-05
|
| 35 |
+
[2026-04-25 17:52:00] Epoch 1 | Step 110 | Loss: 0.4052 | LM: 0.4049 | LB: 1.1687 | CL0: 2.8 | CL1: 2.1 | HR0: 0.363/SR0: 0.361 | HR1: 0.481/SR1: 0.461 | LR: 5.00e-05
|
| 36 |
+
[2026-04-25 17:52:07] Epoch 1 | Step 120 | Loss: 0.4008 | LM: 0.3999 | LB: 1.1679 | CL0: 2.8 | CL1: 2.1 | HR0: 0.363/SR0: 0.361 | HR1: 0.481/SR1: 0.461 | LR: 5.00e-05
|
| 37 |
+
[2026-04-25 17:52:14] Epoch 1 | Step 130 | Loss: 0.3894 | LM: 0.3850 | LB: 1.1673 | CL0: 2.8 | CL1: 2.1 | HR0: 0.363/SR0: 0.360 | HR1: 0.481/SR1: 0.460 | LR: 5.00e-05
|
| 38 |
+
[2026-04-25 17:52:21] Epoch 1 | Step 140 | Loss: 0.3838 | LM: 0.3834 | LB: 1.1663 | CL0: 2.8 | CL1: 2.1 | HR0: 0.363/SR0: 0.360 | HR1: 0.480/SR1: 0.460 | LR: 5.00e-05
|
| 39 |
+
[2026-04-25 17:52:29] Epoch 1 | Step 150 | Loss: 0.3826 | LM: 0.3778 | LB: 1.1671 | CL0: 2.8 | CL1: 2.1 | HR0: 0.363/SR0: 0.361 | HR1: 0.480/SR1: 0.460 | LR: 5.00e-05
|
| 40 |
+
[2026-04-25 17:52:36] Epoch 1 | Step 160 | Loss: 0.3760 | LM: 0.3788 | LB: 1.1665 | CL0: 2.8 | CL1: 2.1 | HR0: 0.363/SR0: 0.361 | HR1: 0.480/SR1: 0.459 | LR: 5.00e-05
|
| 41 |
+
[2026-04-25 17:52:46] Epoch 1 | Step 170 | Loss: 0.3717 | LM: 0.3687 | LB: 1.1659 | CL0: 2.8 | CL1: 2.1 | HR0: 0.363/SR0: 0.361 | HR1: 0.480/SR1: 0.459 | LR: 5.00e-05
|
| 42 |
+
[2026-04-25 17:52:53] Epoch 1 | Step 180 | Loss: 0.3693 | LM: 0.3638 | LB: 1.1666 | CL0: 2.8 | CL1: 2.1 | HR0: 0.363/SR0: 0.360 | HR1: 0.480/SR1: 0.460 | LR: 5.00e-05
|
| 43 |
+
[2026-04-25 17:53:01] Epoch 1 | Step 190 | Loss: 0.3656 | LM: 0.3626 | LB: 1.1659 | CL0: 2.8 | CL1: 2.1 | HR0: 0.361/SR0: 0.359 | HR1: 0.480/SR1: 0.460 | LR: 5.00e-05
|
| 44 |
+
[2026-04-25 17:53:08] Epoch 1 | Step 200 | Loss: 0.3625 | LM: 0.3600 | LB: 1.1654 | CL0: 2.8 | CL1: 2.1 | HR0: 0.361/SR0: 0.359 | HR1: 0.480/SR1: 0.460 | LR: 5.00e-05
|
| 45 |
+
[2026-04-25 17:53:15] Epoch 1 | Step 210 | Loss: 0.3592 | LM: 0.3533 | LB: 1.1643 | CL0: 2.8 | CL1: 2.1 | HR0: 0.360/SR0: 0.358 | HR1: 0.479/SR1: 0.459 | LR: 5.00e-05
|
| 46 |
+
[2026-04-25 17:53:23] Epoch 1 | Step 220 | Loss: 0.3587 | LM: 0.3455 | LB: 1.1640 | CL0: 2.8 | CL1: 2.1 | HR0: 0.360/SR0: 0.358 | HR1: 0.479/SR1: 0.459 | LR: 5.00e-05
|
| 47 |
+
[2026-04-25 17:53:30] Epoch 1 | Step 230 | Loss: 0.3579 | LM: 0.3475 | LB: 1.1646 | CL0: 2.8 | CL1: 2.1 | HR0: 0.360/SR0: 0.358 | HR1: 0.480/SR1: 0.459 | LR: 5.00e-05
|
| 48 |
+
[2026-04-25 17:53:37] Epoch 1 | Step 240 | Loss: 0.3575 | LM: 0.3465 | LB: 1.1638 | CL0: 2.8 | CL1: 2.1 | HR0: 0.359/SR0: 0.357 | HR1: 0.480/SR1: 0.459 | LR: 5.00e-05
|
| 49 |
+
[2026-04-25 17:53:44] Epoch 1 | Step 250 | Loss: 0.3542 | LM: 0.3427 | LB: 1.1638 | CL0: 2.8 | CL1: 2.1 | HR0: 0.359/SR0: 0.357 | HR1: 0.479/SR1: 0.459 | LR: 5.00e-05
|
| 50 |
+
[2026-04-25 17:53:52] Epoch 1 | Step 260 | Loss: 0.3546 | LM: 0.3436 | LB: 1.1633 | CL0: 2.8 | CL1: 2.1 | HR0: 0.359/SR0: 0.357 | HR1: 0.479/SR1: 0.459 | LR: 4.72e-05
|
| 51 |
+
[2026-04-25 17:53:59] Epoch 1 | Step 270 | Loss: 0.3533 | LM: 0.3435 | LB: 1.1625 | CL0: 2.8 | CL1: 2.1 | HR0: 0.358/SR0: 0.357 | HR1: 0.478/SR1: 0.458 | LR: 3.96e-05
|
| 52 |
+
[2026-04-25 17:54:06] Epoch 1 | Step 280 | Loss: 0.3517 | LM: 0.3432 | LB: 1.1627 | CL0: 2.8 | CL1: 2.1 | HR0: 0.358/SR0: 0.357 | HR1: 0.479/SR1: 0.458 | LR: 2.89e-05
|
| 53 |
+
[2026-04-25 17:54:14] Epoch 1 | Step 290 | Loss: 0.3502 | LM: 0.3400 | LB: 1.1621 | CL0: 2.8 | CL1: 2.1 | HR0: 0.358/SR0: 0.357 | HR1: 0.478/SR1: 0.458 | LR: 1.79e-05
|
| 54 |
+
[2026-04-25 17:54:21] Epoch 1 | Step 300 | Loss: 0.3480 | LM: 0.3369 | LB: 1.1615 | CL0: 2.8 | CL1: 2.1 | HR0: 0.358/SR0: 0.357 | HR1: 0.478/SR1: 0.457 | LR: 9.30e-06
|
| 55 |
+
[2026-04-25 17:54:28] Epoch 1 | Step 310 | Loss: 0.3471 | LM: 0.3338 | LB: 1.1607 | CL0: 2.8 | CL1: 2.1 | HR0: 0.358/SR0: 0.357 | HR1: 0.477/SR1: 0.457 | LR: 5.18e-06
|
| 56 |
+
[2026-04-25 17:54:36] Epoch 1 | Step 320 | Loss: 0.3462 | LM: 0.3325 | LB: 1.1606 | CL0: 2.8 | CL1: 2.1 | HR0: 0.358/SR0: 0.356 | HR1: 0.477/SR1: 0.457 | LR: 5.00e-06
|
| 57 |
+
[2026-04-25 17:54:43] Epoch 1 | Step 330 | Loss: 0.3450 | LM: 0.3310 | LB: 1.1607 | CL0: 2.8 | CL1: 2.1 | HR0: 0.358/SR0: 0.356 | HR1: 0.477/SR1: 0.457 | LR: 5.00e-06
|
| 58 |
+
[2026-04-25 17:54:51] Epoch 1 | Step 340 | Loss: 0.3444 | LM: 0.3292 | LB: 1.1610 | CL0: 2.8 | CL1: 2.1 | HR0: 0.358/SR0: 0.357 | HR1: 0.477/SR1: 0.457 | LR: 5.00e-06
|
| 59 |
+
[2026-04-25 17:54:58] Epoch 1 | Step 350 | Loss: 0.3419 | LM: 0.3263 | LB: 1.1613 | CL0: 2.8 | CL1: 2.1 | HR0: 0.359/SR0: 0.357 | HR1: 0.478/SR1: 0.457 | LR: 5.00e-06
|
| 60 |
+
[2026-04-25 17:55:05] Epoch 1 | Step 360 | Loss: 0.3400 | LM: 0.3260 | LB: 1.1614 | CL0: 2.8 | CL1: 2.1 | HR0: 0.359/SR0: 0.357 | HR1: 0.477/SR1: 0.457 | LR: 5.00e-06
|
| 61 |
+
[2026-04-25 17:55:13] Epoch 1 | Step 370 | Loss: 0.3383 | LM: 0.3238 | LB: 1.1614 | CL0: 2.8 | CL1: 2.1 | HR0: 0.359/SR0: 0.357 | HR1: 0.477/SR1: 0.457 | LR: 5.00e-06
|
| 62 |
+
[2026-04-25 17:55:20] Epoch 1 | Step 380 | Loss: 0.3383 | LM: 0.3261 | LB: 1.1614 | CL0: 2.8 | CL1: 2.1 | HR0: 0.359/SR0: 0.357 | HR1: 0.477/SR1: 0.457 | LR: 5.00e-06
|
| 63 |
+
[2026-04-25 17:55:27] Epoch 1 | Step 390 | Loss: 0.3374 | LM: 0.3237 | LB: 1.1612 | CL0: 2.8 | CL1: 2.1 | HR0: 0.359/SR0: 0.357 | HR1: 0.477/SR1: 0.457 | LR: 5.00e-06
|
| 64 |
+
[2026-04-25 17:55:35] Epoch 1 | Step 400 | Loss: 0.3361 | LM: 0.3226 | LB: 1.1613 | CL0: 2.8 | CL1: 2.1 | HR0: 0.359/SR0: 0.357 | HR1: 0.477/SR1: 0.457 | LR: 5.00e-06
|
| 65 |
+
[2026-04-25 17:55:42] Epoch 1 | Step 410 | Loss: 0.3359 | LM: 0.3221 | LB: 1.1613 | CL0: 2.8 | CL1: 2.1 | HR0: 0.359/SR0: 0.357 | HR1: 0.477/SR1: 0.457 | LR: 5.00e-06
|
| 66 |
+
[2026-04-25 17:55:49] Epoch 1 | Step 420 | Loss: 0.3345 | LM: 0.3206 | LB: 1.1611 | CL0: 2.8 | CL1: 2.1 | HR0: 0.359/SR0: 0.357 | HR1: 0.477/SR1: 0.456 | LR: 5.00e-06
|
| 67 |
+
[2026-04-25 17:55:56] Epoch 1 | Step 430 | Loss: 0.3337 | LM: 0.3198 | LB: 1.1606 | CL0: 2.8 | CL1: 2.1 | HR0: 0.358/SR0: 0.357 | HR1: 0.477/SR1: 0.456 | LR: 5.00e-06
|
| 68 |
+
[2026-04-25 17:56:03] Epoch 1 | Step 440 | Loss: 0.3332 | LM: 0.3203 | LB: 1.1609 | CL0: 2.8 | CL1: 2.1 | HR0: 0.359/SR0: 0.357 | HR1: 0.477/SR1: 0.456 | LR: 5.00e-06
|
| 69 |
+
[2026-04-25 17:56:11] Epoch 1 | Step 450 | Loss: 0.3327 | LM: 0.3191 | LB: 1.1610 | CL0: 2.8 | CL1: 2.1 | HR0: 0.359/SR0: 0.357 | HR1: 0.477/SR1: 0.456 | LR: 5.00e-06
|
| 70 |
+
[2026-04-25 17:56:18] Epoch 1 | Step 460 | Loss: 0.3328 | LM: 0.3204 | LB: 1.1609 | CL0: 2.8 | CL1: 2.1 | HR0: 0.359/SR0: 0.357 | HR1: 0.477/SR1: 0.456 | LR: 5.00e-06
|
| 71 |
+
[2026-04-25 17:56:25] Epoch 1 | Step 470 | Loss: 0.3332 | LM: 0.3214 | LB: 1.1614 | CL0: 2.8 | CL1: 2.1 | HR0: 0.359/SR0: 0.357 | HR1: 0.478/SR1: 0.457 | LR: 5.00e-06
|
| 72 |
+
[2026-04-25 17:56:33] Epoch 1 | Step 480 | Loss: 0.3328 | LM: 0.3211 | LB: 1.1616 | CL0: 2.8 | CL1: 2.1 | HR0: 0.359/SR0: 0.357 | HR1: 0.478/SR1: 0.457 | LR: 5.00e-06
|
| 73 |
+
[2026-04-25 17:56:40] Epoch 1 | Step 490 | Loss: 0.3328 | LM: 0.3213 | LB: 1.1619 | CL0: 2.8 | CL1: 2.1 | HR0: 0.359/SR0: 0.358 | HR1: 0.478/SR1: 0.457 | LR: 5.00e-06
|
| 74 |
+
[2026-04-25 17:56:47] Epoch 1 | Step 500 | Loss: 0.3315 | LM: 0.3215 | LB: 1.1621 | CL0: 2.8 | CL1: 2.1 | HR0: 0.359/SR0: 0.358 | HR1: 0.478/SR1: 0.457 | LR: 5.00e-06
|
| 75 |
+
[2026-04-25 17:56:54] Epoch 1 | Step 510 | Loss: 0.3310 | LM: 0.3211 | LB: 1.1620 | CL0: 2.8 | CL1: 2.1 | HR0: 0.359/SR0: 0.358 | HR1: 0.478/SR1: 0.457 | LR: 5.00e-06
|
| 76 |
+
[2026-04-25 17:57:01] Epoch 1 | Step 520 | Loss: 0.3307 | LM: 0.3198 | LB: 1.1616 | CL0: 2.8 | CL1: 2.1 | HR0: 0.359/SR0: 0.357 | HR1: 0.478/SR1: 0.457 | LR: 5.00e-06
|
| 77 |
+
[2026-04-25 17:57:09] Epoch 1 | Step 530 | Loss: 0.3303 | LM: 0.3196 | LB: 1.1611 | CL0: 2.8 | CL1: 2.1 | HR0: 0.358/SR0: 0.357 | HR1: 0.478/SR1: 0.456 | LR: 5.00e-06
|
| 78 |
+
[2026-04-25 17:57:16] Epoch 1 | Step 540 | Loss: 0.3312 | LM: 0.3209 | LB: 1.1610 | CL0: 2.8 | CL1: 2.1 | HR0: 0.358/SR0: 0.357 | HR1: 0.478/SR1: 0.456 | LR: 5.00e-06
|
| 79 |
+
[2026-04-25 17:57:23] Epoch 1 | Step 550 | Loss: 0.3309 | LM: 0.3201 | LB: 1.1611 | CL0: 2.8 | CL1: 2.1 | HR0: 0.358/SR0: 0.357 | HR1: 0.478/SR1: 0.456 | LR: 5.00e-06
|
| 80 |
+
[2026-04-25 17:57:31] Epoch 1 | Step 560 | Loss: 0.3304 | LM: 0.3206 | LB: 1.1615 | CL0: 2.8 | CL1: 2.1 | HR0: 0.359/SR0: 0.357 | HR1: 0.478/SR1: 0.456 | LR: 5.00e-06
|
| 81 |
+
[2026-04-25 17:57:38] Epoch 1 | Step 570 | Loss: 0.3304 | LM: 0.3197 | LB: 1.1612 | CL0: 2.8 | CL1: 2.1 | HR0: 0.359/SR0: 0.357 | HR1: 0.478/SR1: 0.456 | LR: 5.00e-06
|
| 82 |
+
[2026-04-25 17:57:45] Epoch 1 | Step 580 | Loss: 0.3298 | LM: 0.3189 | LB: 1.1612 | CL0: 2.8 | CL1: 2.1 | HR0: 0.359/SR0: 0.357 | HR1: 0.478/SR1: 0.456 | LR: 5.00e-06
|
| 83 |
+
[2026-04-25 17:57:53] Epoch 1 | Step 590 | Loss: 0.3302 | LM: 0.3205 | LB: 1.1613 | CL0: 2.8 | CL1: 2.1 | HR0: 0.359/SR0: 0.357 | HR1: 0.478/SR1: 0.456 | LR: 5.00e-06
|
| 84 |
+
[2026-04-25 17:58:00] Epoch 1 | Step 600 | Loss: 0.3302 | LM: 0.3214 | LB: 1.1614 | CL0: 2.8 | CL1: 2.1 | HR0: 0.359/SR0: 0.357 | HR1: 0.478/SR1: 0.456 | LR: 5.00e-06
|
| 85 |
+
[2026-04-25 17:58:08] Epoch 1 | Step 610 | Loss: 0.3300 | LM: 0.3205 | LB: 1.1610 | CL0: 2.8 | CL1: 2.1 | HR0: 0.359/SR0: 0.357 | HR1: 0.477/SR1: 0.456 | LR: 5.00e-06
|
| 86 |
+
[2026-04-25 17:58:16] Epoch 1 | Step 620 | Loss: 0.3296 | LM: 0.3199 | LB: 1.1609 | CL0: 2.8 | CL1: 2.1 | HR0: 0.359/SR0: 0.357 | HR1: 0.477/SR1: 0.456 | LR: 5.00e-06
|
| 87 |
+
[2026-04-25 17:58:17] Training interrupted by user
|
lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_174418-uk7c7595/files/requirements.txt
ADDED
|
@@ -0,0 +1,245 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
setuptools==78.1.1
|
| 2 |
+
wheel==0.45.1
|
| 3 |
+
pip==25.2
|
| 4 |
+
webencodings==0.5.1
|
| 5 |
+
triton==3.2.0
|
| 6 |
+
pytz==2025.2
|
| 7 |
+
pydub==0.25.1
|
| 8 |
+
pure_eval==0.2.3
|
| 9 |
+
ptyprocess==0.7.0
|
| 10 |
+
nvidia-ml-py==13.590.48
|
| 11 |
+
nvidia-cusparselt-cu12==0.6.2
|
| 12 |
+
mpmath==1.3.0
|
| 13 |
+
ipython-genutils==0.2.0
|
| 14 |
+
fastjsonschema==2.21.2
|
| 15 |
+
brotli==1.2.0
|
| 16 |
+
antlr4-python3-runtime==4.9.3
|
| 17 |
+
xxhash==3.6.0
|
| 18 |
+
widgetsnbextension==4.0.14
|
| 19 |
+
websocket-client==1.9.0
|
| 20 |
+
webcolors==24.11.1
|
| 21 |
+
wcwidth==0.2.14
|
| 22 |
+
urllib3==2.5.0
|
| 23 |
+
uri-template==1.3.0
|
| 24 |
+
tzdata==2025.2
|
| 25 |
+
typing_extensions==4.15.0
|
| 26 |
+
types-python-dateutil==2.9.0.20251008
|
| 27 |
+
traitlets==5.14.3
|
| 28 |
+
tqdm==4.67.1
|
| 29 |
+
tornado==6.5.2
|
| 30 |
+
tomlkit==0.13.3
|
| 31 |
+
tinycss2==1.4.0
|
| 32 |
+
tabulate==0.9.0
|
| 33 |
+
sympy==1.13.1
|
| 34 |
+
soupsieve==2.8
|
| 35 |
+
sniffio==1.3.1
|
| 36 |
+
smmap==5.0.2
|
| 37 |
+
six==1.17.0
|
| 38 |
+
shellingham==1.5.4
|
| 39 |
+
Send2Trash==1.8.3
|
| 40 |
+
semantic-version==2.10.0
|
| 41 |
+
safetensors==0.6.2
|
| 42 |
+
rpds-py==0.27.1
|
| 43 |
+
rfc3986-validator==0.1.1
|
| 44 |
+
regex==2025.9.18
|
| 45 |
+
pyzmq==27.1.0
|
| 46 |
+
PyYAML==6.0.3
|
| 47 |
+
python-multipart==0.0.22
|
| 48 |
+
python-json-logger==4.0.0
|
| 49 |
+
python-dotenv==1.2.1
|
| 50 |
+
pyparsing==3.2.5
|
| 51 |
+
PyJWT==2.8.0
|
| 52 |
+
Pygments==2.19.2
|
| 53 |
+
pycparser==2.23
|
| 54 |
+
pyarrow==22.0.0
|
| 55 |
+
psutil==7.1.0
|
| 56 |
+
protobuf==6.33.4
|
| 57 |
+
propcache==0.4.1
|
| 58 |
+
prometheus_client==0.23.1
|
| 59 |
+
portalocker==3.2.0
|
| 60 |
+
platformdirs==4.5.0
|
| 61 |
+
pillow==11.3.0
|
| 62 |
+
pexpect==4.9.0
|
| 63 |
+
pathspec==1.0.4
|
| 64 |
+
parso==0.8.5
|
| 65 |
+
pandocfilters==1.5.1
|
| 66 |
+
packaging==25.0
|
| 67 |
+
orjson==3.11.6
|
| 68 |
+
opt_einsum==3.4.0
|
| 69 |
+
nvidia-nvtx-cu12==12.4.127
|
| 70 |
+
nvidia-nvjitlink-cu12==12.4.127
|
| 71 |
+
nvidia-nccl-cu12==2.21.5
|
| 72 |
+
nvidia-curand-cu12==10.3.5.147
|
| 73 |
+
nvidia-cufile-cu12==1.13.1.3
|
| 74 |
+
nvidia-cufft-cu12==11.2.1.3
|
| 75 |
+
nvidia-cuda-runtime-cu12==12.4.127
|
| 76 |
+
nvidia-cuda-nvrtc-cu12==12.4.127
|
| 77 |
+
nvidia-cuda-cupti-cu12==12.4.127
|
| 78 |
+
nvidia-cublas-cu12==12.4.5.8
|
| 79 |
+
numpy==2.3.3
|
| 80 |
+
ninja==1.13.0
|
| 81 |
+
networkx==3.5
|
| 82 |
+
nest-asyncio==1.6.0
|
| 83 |
+
narwhals==2.15.0
|
| 84 |
+
mypy_extensions==1.1.0
|
| 85 |
+
multidict==6.7.0
|
| 86 |
+
mistune==3.1.4
|
| 87 |
+
mdurl==0.1.2
|
| 88 |
+
MarkupSafe==3.0.3
|
| 89 |
+
lxml==6.0.2
|
| 90 |
+
librt==0.8.0
|
| 91 |
+
lark==1.3.0
|
| 92 |
+
kiwisolver==1.4.9
|
| 93 |
+
jupyterlab_widgets==3.0.15
|
| 94 |
+
jupyterlab_pygments==0.3.0
|
| 95 |
+
jsonpointer==3.0.0
|
| 96 |
+
json5==0.12.1
|
| 97 |
+
itsdangerous==2.2.0
|
| 98 |
+
idna==3.10
|
| 99 |
+
hf-xet==1.1.10
|
| 100 |
+
h11==0.16.0
|
| 101 |
+
groovy==0.1.2
|
| 102 |
+
fsspec==2025.9.0
|
| 103 |
+
frozenlist==1.8.0
|
| 104 |
+
fqdn==1.5.1
|
| 105 |
+
fonttools==4.60.1
|
| 106 |
+
filelock==3.19.1
|
| 107 |
+
ffmpy==1.0.0
|
| 108 |
+
executing==2.2.1
|
| 109 |
+
einops==0.8.1
|
| 110 |
+
dill==0.4.0
|
| 111 |
+
defusedxml==0.7.1
|
| 112 |
+
decorator==5.2.1
|
| 113 |
+
debugpy==1.8.17
|
| 114 |
+
dacite==1.9.2
|
| 115 |
+
cycler==0.12.1
|
| 116 |
+
comm==0.2.3
|
| 117 |
+
colorama==0.4.6
|
| 118 |
+
click==8.3.1
|
| 119 |
+
charset-normalizer==3.4.3
|
| 120 |
+
certifi==2025.10.5
|
| 121 |
+
bleach==6.2.0
|
| 122 |
+
babel==2.17.0
|
| 123 |
+
attrs==25.4.0
|
| 124 |
+
async-lru==2.0.5
|
| 125 |
+
asttokens==3.0.0
|
| 126 |
+
annotated-types==0.7.0
|
| 127 |
+
annotated-doc==0.0.4
|
| 128 |
+
aiohappyeyeballs==2.6.1
|
| 129 |
+
aiofiles==24.1.0
|
| 130 |
+
yarl==1.22.0
|
| 131 |
+
uvicorn==0.40.0
|
| 132 |
+
typing-inspection==0.4.2
|
| 133 |
+
terminado==0.18.1
|
| 134 |
+
stack-data==0.6.3
|
| 135 |
+
sentry-sdk==2.50.0
|
| 136 |
+
scipy==1.17.0
|
| 137 |
+
sacrebleu==2.6.0
|
| 138 |
+
rfc3987-syntax==1.1.0
|
| 139 |
+
rfc3339-validator==0.1.4
|
| 140 |
+
requests==2.32.5
|
| 141 |
+
reportlab==4.4.9
|
| 142 |
+
referencing==0.36.2
|
| 143 |
+
python-dateutil==2.9.0.post0
|
| 144 |
+
pydantic_core==2.41.5
|
| 145 |
+
prompt_toolkit==3.0.52
|
| 146 |
+
plotly==6.5.2
|
| 147 |
+
pathlib2==2.3.7.post1
|
| 148 |
+
orderedmultidict==1.0.2
|
| 149 |
+
optree==0.17.0
|
| 150 |
+
omegaconf==2.3.0
|
| 151 |
+
nvidia-cusparse-cu12==12.3.1.170
|
| 152 |
+
nvidia-cudnn-cu12==9.1.0.70
|
| 153 |
+
mypy==1.19.1
|
| 154 |
+
multiprocess==0.70.16
|
| 155 |
+
matplotlib-inline==0.1.7
|
| 156 |
+
markdown-it-py==4.0.0
|
| 157 |
+
jupyter_core==5.8.1
|
| 158 |
+
Jinja2==3.1.6
|
| 159 |
+
jedi==0.19.2
|
| 160 |
+
ipython_pygments_lexers==1.1.1
|
| 161 |
+
httpcore==1.0.9
|
| 162 |
+
gitdb==4.0.12
|
| 163 |
+
ftfy==6.3.1
|
| 164 |
+
contourpy==1.3.3
|
| 165 |
+
cffi==2.0.0
|
| 166 |
+
beautifulsoup4==4.14.2
|
| 167 |
+
anyio==4.11.0
|
| 168 |
+
aiosignal==1.4.0
|
| 169 |
+
starlette==0.50.0
|
| 170 |
+
rich==14.2.0
|
| 171 |
+
pydantic==2.12.5
|
| 172 |
+
pandas==2.3.3
|
| 173 |
+
nvidia-cusolver-cu12==11.6.1.9
|
| 174 |
+
matplotlib==3.10.7
|
| 175 |
+
jupyter_server_terminals==0.5.3
|
| 176 |
+
jupyter_client==8.6.3
|
| 177 |
+
jsonschema-specifications==2025.9.1
|
| 178 |
+
ipython==9.6.0
|
| 179 |
+
hydra-core==1.3.2
|
| 180 |
+
huggingface-hub==0.35.3
|
| 181 |
+
httpx==0.28.1
|
| 182 |
+
GitPython==3.1.46
|
| 183 |
+
furl==2.1.4
|
| 184 |
+
cryptography==46.0.4
|
| 185 |
+
arrow==1.3.0
|
| 186 |
+
argon2-cffi-bindings==25.1.0
|
| 187 |
+
aiohttp==3.13.1
|
| 188 |
+
wandb==0.24.0
|
| 189 |
+
typer==0.21.1
|
| 190 |
+
torch==2.6.0
|
| 191 |
+
tokenizers==0.22.1
|
| 192 |
+
seaborn==0.13.2
|
| 193 |
+
safehttpx==0.1.7
|
| 194 |
+
jsonschema==4.25.1
|
| 195 |
+
joypy==0.2.6
|
| 196 |
+
isoduration==20.11.0
|
| 197 |
+
ipywidgets==8.1.7
|
| 198 |
+
ipykernel==6.30.1
|
| 199 |
+
gradio_client==2.0.3
|
| 200 |
+
fastapi==0.128.0
|
| 201 |
+
Authlib==1.6.6
|
| 202 |
+
argon2-cffi==25.1.0
|
| 203 |
+
transformers==4.57.6
|
| 204 |
+
nbformat==5.10.4
|
| 205 |
+
mlstm_kernels==2.0.2
|
| 206 |
+
jupyter-console==6.6.3
|
| 207 |
+
gradio==6.5.1
|
| 208 |
+
datasets==4.3.0
|
| 209 |
+
clearml==1.16.4
|
| 210 |
+
accelerate==1.10.1
|
| 211 |
+
xlstm==2.0.4
|
| 212 |
+
nbclient==0.10.2
|
| 213 |
+
jupyter-events==0.12.0
|
| 214 |
+
trackio==0.15.0
|
| 215 |
+
nbconvert==7.16.6
|
| 216 |
+
jupyter_server==2.17.0
|
| 217 |
+
notebook_shim==0.2.4
|
| 218 |
+
jupyterlab_server==2.27.3
|
| 219 |
+
jupyter-lsp==2.3.0
|
| 220 |
+
nbclassic==1.3.3
|
| 221 |
+
jupyterlab==4.4.9
|
| 222 |
+
notebook==7.4.7
|
| 223 |
+
jupyter_contrib_core==0.4.2
|
| 224 |
+
jupyter==1.1.1
|
| 225 |
+
jupyter_nbextensions_configurator==0.6.4
|
| 226 |
+
causal-conv1d==1.5.0.post8
|
| 227 |
+
flash_attn==2.7.4.post1
|
| 228 |
+
mamba-ssm==2.2.4
|
| 229 |
+
hnet==0.0.1
|
| 230 |
+
autocommand==2.2.2
|
| 231 |
+
backports.tarfile==1.2.0
|
| 232 |
+
importlib_metadata==8.0.0
|
| 233 |
+
inflect==7.3.1
|
| 234 |
+
jaraco.collections==5.1.0
|
| 235 |
+
jaraco.context==5.3.0
|
| 236 |
+
jaraco.functools==4.0.1
|
| 237 |
+
jaraco.text==3.12.1
|
| 238 |
+
more-itertools==10.3.0
|
| 239 |
+
packaging==24.2
|
| 240 |
+
platformdirs==4.2.2
|
| 241 |
+
tomli==2.0.1
|
| 242 |
+
typeguard==4.3.0
|
| 243 |
+
typing_extensions==4.12.2
|
| 244 |
+
wheel==0.45.1
|
| 245 |
+
zipp==3.19.2
|
lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_174418-uk7c7595/files/wandb-metadata.json
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"os": "Linux-5.4.0-176-generic-x86_64-with-glibc2.35",
|
| 3 |
+
"python": "CPython 3.12.0",
|
| 4 |
+
"startedAt": "2026-04-25T17:44:18.465447Z",
|
| 5 |
+
"args": [
|
| 6 |
+
"tracking=wandb",
|
| 7 |
+
"tracking.project=code-completion_lr-sweep",
|
| 8 |
+
"tracking.run_name=hnet_xl_code_lr_5e-5",
|
| 9 |
+
"training.lr=5e-5",
|
| 10 |
+
"paths.output_dir=/workspace/byte-llms-code/outputs/lr_sweep/hnet_xl_code_lr_5e-5",
|
| 11 |
+
"data.path=/workspace/byte-llms-code/code_completion_exp/datasets/data_V4_full"
|
| 12 |
+
],
|
| 13 |
+
"program": "/workspace/byte-llms-code/code_completion_exp/train_hnet/train.py",
|
| 14 |
+
"codePath": "code_completion_exp/train_hnet/train.py",
|
| 15 |
+
"codePathLocal": "train.py",
|
| 16 |
+
"git": {
|
| 17 |
+
"remote": "https://github.com/naryst/byte-llms-code.git",
|
| 18 |
+
"commit": "f111e13281aa0dc58e24302edab5b0d5c2024586"
|
| 19 |
+
},
|
| 20 |
+
"email": "nikita@local.ru",
|
| 21 |
+
"root": "/workspace/byte-llms-code/outputs/lr_sweep/hnet_xl_code_lr_5e-5",
|
| 22 |
+
"host": "7504e518d24a",
|
| 23 |
+
"executable": "/venv/bytellm/bin/python",
|
| 24 |
+
"cpu_count": 64,
|
| 25 |
+
"cpu_count_logical": 128,
|
| 26 |
+
"gpu": "NVIDIA H100 80GB HBM3",
|
| 27 |
+
"gpu_count": 4,
|
| 28 |
+
"disk": {
|
| 29 |
+
"/": {
|
| 30 |
+
"total": "265214230528",
|
| 31 |
+
"used": "37550460928"
|
| 32 |
+
}
|
| 33 |
+
},
|
| 34 |
+
"memory": {
|
| 35 |
+
"total": "1081679683584"
|
| 36 |
+
},
|
| 37 |
+
"gpu_nvidia": [
|
| 38 |
+
{
|
| 39 |
+
"name": "NVIDIA H100 80GB HBM3",
|
| 40 |
+
"memoryTotal": "85520809984",
|
| 41 |
+
"cudaCores": 16896,
|
| 42 |
+
"architecture": "Hopper",
|
| 43 |
+
"uuid": "GPU-b60cdcab-2033-2009-41de-be646c953a20"
|
| 44 |
+
},
|
| 45 |
+
{
|
| 46 |
+
"name": "NVIDIA H100 80GB HBM3",
|
| 47 |
+
"memoryTotal": "85520809984",
|
| 48 |
+
"cudaCores": 16896,
|
| 49 |
+
"architecture": "Hopper",
|
| 50 |
+
"uuid": "GPU-9982b420-4520-4238-c378-ec5a46015474"
|
| 51 |
+
},
|
| 52 |
+
{
|
| 53 |
+
"name": "NVIDIA H100 80GB HBM3",
|
| 54 |
+
"memoryTotal": "85520809984",
|
| 55 |
+
"cudaCores": 16896,
|
| 56 |
+
"architecture": "Hopper",
|
| 57 |
+
"uuid": "GPU-e26ebaac-aaa6-3eed-17ab-a3dce303a76f"
|
| 58 |
+
},
|
| 59 |
+
{
|
| 60 |
+
"name": "NVIDIA H100 80GB HBM3",
|
| 61 |
+
"memoryTotal": "85520809984",
|
| 62 |
+
"cudaCores": 16896,
|
| 63 |
+
"architecture": "Hopper",
|
| 64 |
+
"uuid": "GPU-9dfc6dba-0be6-4a10-1027-336cc0e65134"
|
| 65 |
+
}
|
| 66 |
+
],
|
| 67 |
+
"cudaVersion": "12.2",
|
| 68 |
+
"writerId": "1notu1l0xu2d67oqace4jir1yjh26dit"
|
| 69 |
+
}
|
lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_174418-uk7c7595/logs/debug-core.log
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{"time":"2026-04-25T17:44:18.540564412Z","level":"INFO","msg":"main: starting server","port-filename":"/tmp/tmp91n55tmn/port-47021.txt","pid":47021,"log-level":0,"disable-analytics":false,"shutdown-on-parent-exit":false,"enable-dcgm-profiling":false}
|
| 2 |
+
{"time":"2026-04-25T17:44:18.540941428Z","level":"INFO","msg":"server: will exit if parent process dies","ppid":47021}
|
| 3 |
+
{"time":"2026-04-25T17:44:18.540938282Z","level":"INFO","msg":"server: accepting connections","addr":{"Name":"/tmp/wandb-47021-47077-3008769149/socket","Net":"unix"}}
|
| 4 |
+
{"time":"2026-04-25T17:44:18.728016872Z","level":"INFO","msg":"connection: ManageConnectionData: new connection created","id":"1(@)"}
|
| 5 |
+
{"time":"2026-04-25T17:44:18.748060238Z","level":"INFO","msg":"handleInformInit: received","streamId":"uk7c7595","id":"1(@)"}
|
| 6 |
+
{"time":"2026-04-25T17:44:19.47983191Z","level":"INFO","msg":"handleInformInit: stream started","streamId":"uk7c7595","id":"1(@)"}
|
| 7 |
+
{"time":"2026-04-25T17:58:23.288225616Z","level":"INFO","msg":"server: parent process exited, terminating service process"}
|
lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_174418-uk7c7595/logs/debug-internal.log
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{"time":"2026-04-25T17:44:18.748161446Z","level":"INFO","msg":"stream: starting","core version":"0.24.0"}
|
| 2 |
+
{"time":"2026-04-25T17:44:19.479680661Z","level":"INFO","msg":"stream: created new stream","id":"uk7c7595"}
|
| 3 |
+
{"time":"2026-04-25T17:44:19.479738093Z","level":"INFO","msg":"handler: started","stream_id":"uk7c7595"}
|
| 4 |
+
{"time":"2026-04-25T17:44:19.479825274Z","level":"INFO","msg":"stream: started","id":"uk7c7595"}
|
| 5 |
+
{"time":"2026-04-25T17:44:19.479836317Z","level":"INFO","msg":"writer: started","stream_id":"uk7c7595"}
|
| 6 |
+
{"time":"2026-04-25T17:44:19.479839133Z","level":"INFO","msg":"sender: started","stream_id":"uk7c7595"}
|
| 7 |
+
{"time":"2026-04-25T17:44:19.625754194Z","level":"ERROR","msg":"git repo not found","error":"repository does not exist"}
|
lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_174418-uk7c7595/logs/debug.log
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
2026-04-25 17:44:18,466 INFO MainThread:47021 [wandb_setup.py:_flush():81] Current SDK version is 0.24.0
|
| 2 |
+
2026-04-25 17:44:18,466 INFO MainThread:47021 [wandb_setup.py:_flush():81] Configure stats pid to 47021
|
| 3 |
+
2026-04-25 17:44:18,466 INFO MainThread:47021 [wandb_setup.py:_flush():81] Loading settings from environment variables
|
| 4 |
+
2026-04-25 17:44:18,466 INFO MainThread:47021 [wandb_init.py:setup_run_log_directory():717] Logging user logs to /workspace/byte-llms-code/outputs/lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_174418-uk7c7595/logs/debug.log
|
| 5 |
+
2026-04-25 17:44:18,466 INFO MainThread:47021 [wandb_init.py:setup_run_log_directory():718] Logging internal logs to /workspace/byte-llms-code/outputs/lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_174418-uk7c7595/logs/debug-internal.log
|
| 6 |
+
2026-04-25 17:44:18,466 INFO MainThread:47021 [wandb_init.py:init():844] calling init triggers
|
| 7 |
+
2026-04-25 17:44:18,467 INFO MainThread:47021 [wandb_init.py:init():849] wandb.init called with sweep_config: {}
|
| 8 |
+
config: {'model': {'config_path': '/workspace/byte-llms-code/hnet_project/configs/hnet_2stage_XL_code.json', 'checkpoint_path': '/workspace/byte-llms-code/hnet_project/checkpoints/hnet_2stage_XL_code.pt'}, 'training': {'epochs': 1, 'batch_size': 4, 'eval_batch_size': 24, 'gradient_accumulation_steps': 4, 'lr': 5e-05, 'weight_decay': 0.1, 'betas': [0.9, 0.95], 'eps': 1e-08, 'lr_scheduler': 'wsd', 'warmup_ratio': 0.1, 'decay_ratio': 0.2, 'warmup_steps': 100, 'min_lr_ratio': 0.1, 'lr_multiplier': [2.0, 1.5, 1.0], 'load_balancing_weight': 0.01, 'load_balancing_N': 4.0, 'max_grad_norm': 1.0, 'use_amp': True, 'resume': False, 'resume_checkpoint': None, 'warmup_model': True}, 'data': {'path': '/workspace/byte-llms-code/code_completion_exp/datasets/data_V4_full', 'max_context_len': 4096, 'max_target_len': 256, 'num_workers': 0, 'pin_memory': True, 'max_train_samples': 20000, 'max_val_samples': 2000}, 'logging': {'log_interval': 10, 'save_interval': 3000, 'eval_interval': 1000, 'save_every_epoch': True}, 'tracking': {'enabled': True, 'backend': 'wandb', 'project': 'code-completion_lr-sweep', 'run_name': 'hnet_xl_code_lr_5e-5', 'entity': None, 'base_url': 'https://wandb.platun0v.ru', 'local_dir': '/workspace/byte-llms-code/outputs/lr_sweep/hnet_xl_code_lr_5e-5'}, 'paths': {'output_dir': '/workspace/byte-llms-code/outputs/lr_sweep/hnet_xl_code_lr_5e-5'}, 'seed': 42, 'device': 'cuda', '_wandb': {'code_path': 'code/code_completion_exp/train_hnet/train.py'}}
|
| 9 |
+
2026-04-25 17:44:18,467 INFO MainThread:47021 [wandb_init.py:init():892] starting backend
|
| 10 |
+
2026-04-25 17:44:18,728 INFO MainThread:47021 [wandb_init.py:init():895] sending inform_init request
|
| 11 |
+
2026-04-25 17:44:18,746 INFO MainThread:47021 [wandb_init.py:init():903] backend started and connected
|
| 12 |
+
2026-04-25 17:44:18,749 INFO MainThread:47021 [wandb_init.py:init():973] updated telemetry
|
| 13 |
+
2026-04-25 17:44:18,765 INFO MainThread:47021 [wandb_init.py:init():997] communicating run to backend with 90.0 second timeout
|
| 14 |
+
2026-04-25 17:44:19,625 INFO MainThread:47021 [wandb_init.py:init():1044] starting run threads in backend
|
| 15 |
+
2026-04-25 17:44:19,782 INFO MainThread:47021 [wandb_run.py:_console_start():2529] atexit reg
|
| 16 |
+
2026-04-25 17:44:19,782 INFO MainThread:47021 [wandb_run.py:_redirect():2377] redirect: wrap_raw
|
| 17 |
+
2026-04-25 17:44:19,782 INFO MainThread:47021 [wandb_run.py:_redirect():2446] Wrapping output streams.
|
| 18 |
+
2026-04-25 17:44:19,782 INFO MainThread:47021 [wandb_run.py:_redirect():2469] Redirects installed.
|
| 19 |
+
2026-04-25 17:44:19,785 INFO MainThread:47021 [wandb_init.py:init():1084] run started, returning control to user process
|
lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_180603-5xd22ofy/files/code/code_completion_exp/train_hnet/train.py
ADDED
|
@@ -0,0 +1,284 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Training Pipeline для HNet модели на задаче Code Completion.
|
| 3 |
+
|
| 4 |
+
Конфигурация через Hydra + OmegaConf, логирование в Trackio.
|
| 5 |
+
Поддержка DDP через Accelerate для multi-GPU тренировки.
|
| 6 |
+
|
| 7 |
+
Использование:
|
| 8 |
+
# Базовый запуск (single GPU)
|
| 9 |
+
python train.py
|
| 10 |
+
|
| 11 |
+
# Multi-GPU с Accelerate
|
| 12 |
+
accelerate launch train.py
|
| 13 |
+
|
| 14 |
+
# Multi-GPU с указанием количества GPU
|
| 15 |
+
accelerate launch --num_processes=4 train.py
|
| 16 |
+
|
| 17 |
+
# Переопределение параметров через CLI
|
| 18 |
+
python train.py training.lr=1e-4 training.epochs=5
|
| 19 |
+
|
| 20 |
+
# Выбор другого конфига модели
|
| 21 |
+
python train.py model=hnet_small
|
| 22 |
+
|
| 23 |
+
# Multirun (sweep)
|
| 24 |
+
python train.py --multirun training.lr=1e-4,3e-4,1e-3
|
| 25 |
+
|
| 26 |
+
# Без логирования
|
| 27 |
+
python train.py tracking.enabled=false
|
| 28 |
+
"""
|
| 29 |
+
|
| 30 |
+
import os
|
| 31 |
+
import math
|
| 32 |
+
from pathlib import Path
|
| 33 |
+
|
| 34 |
+
import torch
|
| 35 |
+
import hydra
|
| 36 |
+
from hydra.core.hydra_config import HydraConfig
|
| 37 |
+
from omegaconf import DictConfig, OmegaConf
|
| 38 |
+
from accelerate import Accelerator
|
| 39 |
+
from accelerate.utils import set_seed as accelerate_set_seed
|
| 40 |
+
|
| 41 |
+
# HNet imports
|
| 42 |
+
from hnet.load_utils import load_from_pretrained, load_from_config
|
| 43 |
+
from hnet.utils.tokenizers import ByteTokenizer
|
| 44 |
+
from hnet.utils.train import group_params
|
| 45 |
+
|
| 46 |
+
# Ensure repo root is on sys.path (needed when running from subdirectory)
|
| 47 |
+
import sys
|
| 48 |
+
sys.path.insert(0, str(Path(__file__).resolve().parents[2]))
|
| 49 |
+
|
| 50 |
+
# Shared training library
|
| 51 |
+
from training_lib.utils import log_message
|
| 52 |
+
from training_lib.checkpointing import save_checkpoint, load_checkpoint
|
| 53 |
+
from training_lib.schedulers import get_lr_scheduler
|
| 54 |
+
from training_lib.tracking import init_tracking, finish_tracking
|
| 55 |
+
from training_lib.hnet.train_loop import train_epoch
|
| 56 |
+
from training_lib.hnet.data import create_dataloaders
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
@hydra.main(version_base=None, config_path="configs", config_name="config")
|
| 60 |
+
def main(cfg: DictConfig):
|
| 61 |
+
"""Глав��ая функция тренировки с поддержкой DDP чере�� Accelerate."""
|
| 62 |
+
|
| 63 |
+
# === Accelerator Setup ===
|
| 64 |
+
mixed_precision = "bf16" if cfg.training.use_amp else "no"
|
| 65 |
+
|
| 66 |
+
accelerator = Accelerator(
|
| 67 |
+
mixed_precision=mixed_precision,
|
| 68 |
+
gradient_accumulation_steps=cfg.training.gradient_accumulation_steps,
|
| 69 |
+
)
|
| 70 |
+
|
| 71 |
+
# === Setup ===
|
| 72 |
+
accelerate_set_seed(cfg.seed)
|
| 73 |
+
|
| 74 |
+
if cfg.paths.output_dir is None:
|
| 75 |
+
cfg.paths.output_dir = HydraConfig.get().runtime.output_dir
|
| 76 |
+
|
| 77 |
+
OmegaConf.resolve(cfg)
|
| 78 |
+
|
| 79 |
+
log_message(
|
| 80 |
+
f"CUDA_VISIBLE_DEVICES: {os.environ.get('CUDA_VISIBLE_DEVICES', 'not set')}",
|
| 81 |
+
cfg,
|
| 82 |
+
accelerator,
|
| 83 |
+
)
|
| 84 |
+
log_message(f"Number of processes: {accelerator.num_processes}", cfg, accelerator)
|
| 85 |
+
log_message(f"Process index: {accelerator.process_index}", cfg, accelerator)
|
| 86 |
+
log_message(f"Mixed precision: {mixed_precision}", cfg, accelerator)
|
| 87 |
+
|
| 88 |
+
log_message("=" * 60, cfg, accelerator)
|
| 89 |
+
log_message(
|
| 90 |
+
"HNet Training Pipeline (Hydra + Trackio + Accelerate)", cfg, accelerator
|
| 91 |
+
)
|
| 92 |
+
log_message("=" * 60, cfg, accelerator)
|
| 93 |
+
log_message(f"Config:\n{OmegaConf.to_yaml(cfg)}", cfg, accelerator)
|
| 94 |
+
|
| 95 |
+
# === Trackio Init ===
|
| 96 |
+
init_tracking(cfg, accelerator)
|
| 97 |
+
|
| 98 |
+
# === Tokenizer ===
|
| 99 |
+
log_message("Initializing tokenizer...", cfg, accelerator)
|
| 100 |
+
tokenizer = ByteTokenizer()
|
| 101 |
+
|
| 102 |
+
# === Model ===
|
| 103 |
+
log_message("Loading model...", cfg, accelerator)
|
| 104 |
+
if cfg.model.checkpoint_path:
|
| 105 |
+
model = load_from_pretrained(
|
| 106 |
+
model_path=cfg.model.checkpoint_path,
|
| 107 |
+
model_config_path=cfg.model.config_path,
|
| 108 |
+
)
|
| 109 |
+
log_message(f"Loaded pretrained: {cfg.model.checkpoint_path}", cfg, accelerator)
|
| 110 |
+
else:
|
| 111 |
+
model = load_from_config(
|
| 112 |
+
model_config_path=cfg.model.config_path,
|
| 113 |
+
device="cpu",
|
| 114 |
+
)
|
| 115 |
+
model.init_weights()
|
| 116 |
+
log_message("Initialized from scratch", cfg, accelerator)
|
| 117 |
+
|
| 118 |
+
model.train()
|
| 119 |
+
|
| 120 |
+
# LR multiplier для разны�� стадий (до prepare!)
|
| 121 |
+
lr_multiplier = list(cfg.training.lr_multiplier)
|
| 122 |
+
model.apply_lr_multiplier(lr_multiplier)
|
| 123 |
+
log_message(f"Applied LR multipliers: {lr_multiplier}", cfg, accelerator)
|
| 124 |
+
|
| 125 |
+
# Warmup для Triton kernels
|
| 126 |
+
if cfg.training.warmup_model:
|
| 127 |
+
log_message("Warming up model...", cfg, accelerator)
|
| 128 |
+
model = model.to(accelerator.device)
|
| 129 |
+
model.warmup(verbose=accelerator.is_main_process)
|
| 130 |
+
|
| 131 |
+
# Log model info
|
| 132 |
+
total_params = sum(p.numel() for p in model.parameters())
|
| 133 |
+
trainable_params = sum(p.numel() for p in model.parameters() if p.requires_grad)
|
| 134 |
+
log_message(f"Total params: {total_params:,}", cfg, accelerator)
|
| 135 |
+
log_message(f"Trainable params: {trainable_params:,}", cfg, accelerator)
|
| 136 |
+
|
| 137 |
+
# === Data ===
|
| 138 |
+
log_message("Creating dataloaders...", cfg, accelerator)
|
| 139 |
+
dataloaders = create_dataloaders(cfg, tokenizer)
|
| 140 |
+
|
| 141 |
+
train_dataloader = dataloaders["train"]
|
| 142 |
+
val_dataloader = dataloaders.get("validation", None)
|
| 143 |
+
|
| 144 |
+
log_message(
|
| 145 |
+
f"Train dataset size: {len(train_dataloader.dataset)}", cfg, accelerator
|
| 146 |
+
)
|
| 147 |
+
log_message(
|
| 148 |
+
f"Train batches per epoch (before DDP split): {len(train_dataloader)}",
|
| 149 |
+
cfg,
|
| 150 |
+
accelerator,
|
| 151 |
+
)
|
| 152 |
+
|
| 153 |
+
if val_dataloader:
|
| 154 |
+
log_message(
|
| 155 |
+
f"Validation dataset size: {len(val_dataloader.dataset)}", cfg, accelerator
|
| 156 |
+
)
|
| 157 |
+
log_message(f"Validation batches: {len(val_dataloader)}", cfg, accelerator)
|
| 158 |
+
else:
|
| 159 |
+
log_message("No validation dataset found", cfg, accelerator)
|
| 160 |
+
|
| 161 |
+
# === Optimizer ===
|
| 162 |
+
log_message("Creating optimizer...", cfg, accelerator)
|
| 163 |
+
param_groups = group_params(model)
|
| 164 |
+
|
| 165 |
+
for group in param_groups:
|
| 166 |
+
if "lr" not in group:
|
| 167 |
+
group["lr"] = cfg.training.lr
|
| 168 |
+
else:
|
| 169 |
+
group["lr"] = cfg.training.lr * group.get("lr_multiplier", 1.0)
|
| 170 |
+
if "weight_decay" not in group:
|
| 171 |
+
group["weight_decay"] = cfg.training.weight_decay
|
| 172 |
+
|
| 173 |
+
optimizer = torch.optim.AdamW(
|
| 174 |
+
param_groups,
|
| 175 |
+
lr=cfg.training.lr,
|
| 176 |
+
betas=tuple(cfg.training.betas),
|
| 177 |
+
eps=cfg.training.eps,
|
| 178 |
+
)
|
| 179 |
+
|
| 180 |
+
# === Scheduler ===
|
| 181 |
+
steps_per_epoch = math.ceil(len(train_dataloader) / accelerator.num_processes)
|
| 182 |
+
total_steps = (
|
| 183 |
+
cfg.training.epochs
|
| 184 |
+
* steps_per_epoch
|
| 185 |
+
// cfg.training.gradient_accumulation_steps
|
| 186 |
+
)
|
| 187 |
+
scheduler = get_lr_scheduler(optimizer, cfg, total_steps)
|
| 188 |
+
|
| 189 |
+
log_message(
|
| 190 |
+
f"Total steps: {total_steps}, Steps per epoch: {steps_per_epoch}",
|
| 191 |
+
cfg,
|
| 192 |
+
accelerator,
|
| 193 |
+
)
|
| 194 |
+
|
| 195 |
+
# === Accelerate Prepare ===
|
| 196 |
+
log_message(
|
| 197 |
+
"Preparing model, optimizer, and dataloaders with Accelerate...",
|
| 198 |
+
cfg,
|
| 199 |
+
accelerator,
|
| 200 |
+
)
|
| 201 |
+
|
| 202 |
+
if val_dataloader is not None:
|
| 203 |
+
model, optimizer, train_dataloader, val_dataloader, scheduler = (
|
| 204 |
+
accelerator.prepare(
|
| 205 |
+
model, optimizer, train_dataloader, val_dataloader, scheduler
|
| 206 |
+
)
|
| 207 |
+
)
|
| 208 |
+
else:
|
| 209 |
+
model, optimizer, train_dataloader, scheduler = accelerator.prepare(
|
| 210 |
+
model, optimizer, train_dataloader, scheduler
|
| 211 |
+
)
|
| 212 |
+
|
| 213 |
+
log_message(
|
| 214 |
+
f"Train batches per epoch (after DDP split): {len(train_dataloader)}",
|
| 215 |
+
cfg,
|
| 216 |
+
accelerator,
|
| 217 |
+
)
|
| 218 |
+
|
| 219 |
+
# === Resume ===
|
| 220 |
+
global_step = 0
|
| 221 |
+
start_epoch = 1
|
| 222 |
+
|
| 223 |
+
if cfg.training.resume and cfg.training.resume_checkpoint:
|
| 224 |
+
global_step, start_epoch = load_checkpoint(
|
| 225 |
+
model,
|
| 226 |
+
optimizer,
|
| 227 |
+
scheduler,
|
| 228 |
+
cfg.training.resume_checkpoint,
|
| 229 |
+
cfg,
|
| 230 |
+
accelerator,
|
| 231 |
+
)
|
| 232 |
+
start_epoch += 1
|
| 233 |
+
|
| 234 |
+
# === Training Loop ===
|
| 235 |
+
log_message("Starting training...", cfg, accelerator)
|
| 236 |
+
|
| 237 |
+
best_val_loss = float("inf")
|
| 238 |
+
|
| 239 |
+
try:
|
| 240 |
+
for epoch in range(start_epoch, cfg.training.epochs + 1):
|
| 241 |
+
log_message(f"\n{'=' * 60}", cfg, accelerator)
|
| 242 |
+
log_message(f"EPOCH {epoch}/{cfg.training.epochs}", cfg, accelerator)
|
| 243 |
+
log_message(f"{'=' * 60}", cfg, accelerator)
|
| 244 |
+
|
| 245 |
+
global_step, best_val_loss = train_epoch(
|
| 246 |
+
model=model,
|
| 247 |
+
dataloader=train_dataloader,
|
| 248 |
+
optimizer=optimizer,
|
| 249 |
+
scheduler=scheduler,
|
| 250 |
+
cfg=cfg,
|
| 251 |
+
epoch=epoch,
|
| 252 |
+
global_step=global_step,
|
| 253 |
+
accelerator=accelerator,
|
| 254 |
+
val_dataloader=val_dataloader,
|
| 255 |
+
best_val_loss=best_val_loss,
|
| 256 |
+
)
|
| 257 |
+
|
| 258 |
+
if cfg.logging.save_every_epoch:
|
| 259 |
+
save_checkpoint(
|
| 260 |
+
model, optimizer, scheduler, global_step, epoch, cfg, accelerator
|
| 261 |
+
)
|
| 262 |
+
|
| 263 |
+
except KeyboardInterrupt:
|
| 264 |
+
log_message("Training interrupted by user", cfg, accelerator)
|
| 265 |
+
save_checkpoint(
|
| 266 |
+
model, optimizer, scheduler, global_step, epoch, cfg, accelerator
|
| 267 |
+
)
|
| 268 |
+
|
| 269 |
+
# === Final Save ===
|
| 270 |
+
log_message("\nTraining completed!", cfg, accelerator)
|
| 271 |
+
|
| 272 |
+
if accelerator.is_main_process:
|
| 273 |
+
final_model_path = Path(cfg.paths.output_dir) / "model_final.pt"
|
| 274 |
+
unwrapped_model = accelerator.unwrap_model(model)
|
| 275 |
+
torch.save(unwrapped_model.state_dict(), final_model_path)
|
| 276 |
+
log_message(f"Final model: {final_model_path}", cfg, accelerator)
|
| 277 |
+
|
| 278 |
+
accelerator.wait_for_everyone()
|
| 279 |
+
accelerator.end_training()
|
| 280 |
+
finish_tracking()
|
| 281 |
+
|
| 282 |
+
|
| 283 |
+
if __name__ == "__main__":
|
| 284 |
+
main()
|
lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_180603-5xd22ofy/files/config.yaml
ADDED
|
@@ -0,0 +1,151 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
_wandb:
|
| 2 |
+
value:
|
| 3 |
+
cli_version: 0.24.0
|
| 4 |
+
code_path: code/code_completion_exp/train_hnet/train.py
|
| 5 |
+
e:
|
| 6 |
+
ng0f4uiv4waaucdcyjb64wcr0cxifhf8:
|
| 7 |
+
args:
|
| 8 |
+
- tracking=wandb
|
| 9 |
+
- tracking.project=code-completion_lr-sweep
|
| 10 |
+
- tracking.run_name=hnet_xl_code_lr_5e-5
|
| 11 |
+
- training.lr=5e-5
|
| 12 |
+
- paths.output_dir=/workspace/byte-llms-code/outputs/lr_sweep/hnet_xl_code_lr_5e-5
|
| 13 |
+
- data.path=/workspace/byte-llms-code/code_completion_exp/datasets/data_V4_full
|
| 14 |
+
codePath: code_completion_exp/train_hnet/train.py
|
| 15 |
+
codePathLocal: train.py
|
| 16 |
+
cpu_count: 64
|
| 17 |
+
cpu_count_logical: 128
|
| 18 |
+
cudaVersion: "12.2"
|
| 19 |
+
disk:
|
| 20 |
+
/:
|
| 21 |
+
total: "265214230528"
|
| 22 |
+
used: "91343581184"
|
| 23 |
+
email: nikita@local.ru
|
| 24 |
+
executable: /venv/bytellm/bin/python
|
| 25 |
+
git:
|
| 26 |
+
commit: f111e13281aa0dc58e24302edab5b0d5c2024586
|
| 27 |
+
remote: https://github.com/naryst/byte-llms-code.git
|
| 28 |
+
gpu: NVIDIA H100 80GB HBM3
|
| 29 |
+
gpu_count: 4
|
| 30 |
+
gpu_nvidia:
|
| 31 |
+
- architecture: Hopper
|
| 32 |
+
cudaCores: 16896
|
| 33 |
+
memoryTotal: "85520809984"
|
| 34 |
+
name: NVIDIA H100 80GB HBM3
|
| 35 |
+
uuid: GPU-b60cdcab-2033-2009-41de-be646c953a20
|
| 36 |
+
- architecture: Hopper
|
| 37 |
+
cudaCores: 16896
|
| 38 |
+
memoryTotal: "85520809984"
|
| 39 |
+
name: NVIDIA H100 80GB HBM3
|
| 40 |
+
uuid: GPU-9982b420-4520-4238-c378-ec5a46015474
|
| 41 |
+
- architecture: Hopper
|
| 42 |
+
cudaCores: 16896
|
| 43 |
+
memoryTotal: "85520809984"
|
| 44 |
+
name: NVIDIA H100 80GB HBM3
|
| 45 |
+
uuid: GPU-e26ebaac-aaa6-3eed-17ab-a3dce303a76f
|
| 46 |
+
- architecture: Hopper
|
| 47 |
+
cudaCores: 16896
|
| 48 |
+
memoryTotal: "85520809984"
|
| 49 |
+
name: NVIDIA H100 80GB HBM3
|
| 50 |
+
uuid: GPU-9dfc6dba-0be6-4a10-1027-336cc0e65134
|
| 51 |
+
host: 7504e518d24a
|
| 52 |
+
memory:
|
| 53 |
+
total: "1081679683584"
|
| 54 |
+
os: Linux-5.4.0-176-generic-x86_64-with-glibc2.35
|
| 55 |
+
program: /workspace/byte-llms-code/code_completion_exp/train_hnet/train.py
|
| 56 |
+
python: CPython 3.12.0
|
| 57 |
+
root: /workspace/byte-llms-code/outputs/lr_sweep/hnet_xl_code_lr_5e-5
|
| 58 |
+
startedAt: "2026-04-25T18:06:03.105894Z"
|
| 59 |
+
writerId: ng0f4uiv4waaucdcyjb64wcr0cxifhf8
|
| 60 |
+
m: []
|
| 61 |
+
python_version: 3.12.0
|
| 62 |
+
t:
|
| 63 |
+
"1":
|
| 64 |
+
- 1
|
| 65 |
+
- 11
|
| 66 |
+
- 49
|
| 67 |
+
- 50
|
| 68 |
+
- 51
|
| 69 |
+
- 71
|
| 70 |
+
- 105
|
| 71 |
+
"2":
|
| 72 |
+
- 1
|
| 73 |
+
- 11
|
| 74 |
+
- 49
|
| 75 |
+
- 50
|
| 76 |
+
- 51
|
| 77 |
+
- 71
|
| 78 |
+
- 105
|
| 79 |
+
"3":
|
| 80 |
+
- 2
|
| 81 |
+
- 13
|
| 82 |
+
- 16
|
| 83 |
+
- 61
|
| 84 |
+
"4": 3.12.0
|
| 85 |
+
"5": 0.24.0
|
| 86 |
+
"6": 4.57.6
|
| 87 |
+
"12": 0.24.0
|
| 88 |
+
"13": linux-x86_64
|
| 89 |
+
data:
|
| 90 |
+
value:
|
| 91 |
+
max_context_len: 4096
|
| 92 |
+
max_target_len: 256
|
| 93 |
+
max_train_samples: null
|
| 94 |
+
max_val_samples: 2000
|
| 95 |
+
num_workers: 0
|
| 96 |
+
path: /workspace/byte-llms-code/code_completion_exp/datasets/data_V4_full
|
| 97 |
+
pin_memory: true
|
| 98 |
+
device:
|
| 99 |
+
value: cuda
|
| 100 |
+
logging:
|
| 101 |
+
value:
|
| 102 |
+
eval_interval: 2000
|
| 103 |
+
log_interval: 10
|
| 104 |
+
save_every_epoch: false
|
| 105 |
+
save_interval: 0
|
| 106 |
+
model:
|
| 107 |
+
value:
|
| 108 |
+
checkpoint_path: /workspace/byte-llms-code/hnet_project/checkpoints/hnet_2stage_XL_code.pt
|
| 109 |
+
config_path: /workspace/byte-llms-code/hnet_project/configs/hnet_2stage_XL_code.json
|
| 110 |
+
paths:
|
| 111 |
+
value:
|
| 112 |
+
output_dir: /workspace/byte-llms-code/outputs/lr_sweep/hnet_xl_code_lr_5e-5
|
| 113 |
+
seed:
|
| 114 |
+
value: 42
|
| 115 |
+
tracking:
|
| 116 |
+
value:
|
| 117 |
+
backend: wandb
|
| 118 |
+
base_url: https://wandb.platun0v.ru
|
| 119 |
+
enabled: true
|
| 120 |
+
entity: null
|
| 121 |
+
local_dir: /workspace/byte-llms-code/outputs/lr_sweep/hnet_xl_code_lr_5e-5
|
| 122 |
+
project: code-completion_lr-sweep
|
| 123 |
+
run_name: hnet_xl_code_lr_5e-5
|
| 124 |
+
training:
|
| 125 |
+
value:
|
| 126 |
+
batch_size: 4
|
| 127 |
+
betas:
|
| 128 |
+
- 0.9
|
| 129 |
+
- 0.95
|
| 130 |
+
decay_ratio: 0.2
|
| 131 |
+
epochs: 1
|
| 132 |
+
eps: 1e-08
|
| 133 |
+
eval_batch_size: 24
|
| 134 |
+
gradient_accumulation_steps: 4
|
| 135 |
+
load_balancing_N: 4
|
| 136 |
+
load_balancing_weight: 0.01
|
| 137 |
+
lr: 5e-05
|
| 138 |
+
lr_multiplier:
|
| 139 |
+
- 2
|
| 140 |
+
- 1.5
|
| 141 |
+
- 1
|
| 142 |
+
lr_scheduler: wsd
|
| 143 |
+
max_grad_norm: 1
|
| 144 |
+
min_lr_ratio: 0.1
|
| 145 |
+
resume: false
|
| 146 |
+
resume_checkpoint: null
|
| 147 |
+
use_amp: true
|
| 148 |
+
warmup_model: true
|
| 149 |
+
warmup_ratio: 0.1
|
| 150 |
+
warmup_steps: 100
|
| 151 |
+
weight_decay: 0.1
|
lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_180603-5xd22ofy/files/output.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_180603-5xd22ofy/files/requirements.txt
ADDED
|
@@ -0,0 +1,245 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
setuptools==78.1.1
|
| 2 |
+
wheel==0.45.1
|
| 3 |
+
pip==25.2
|
| 4 |
+
webencodings==0.5.1
|
| 5 |
+
triton==3.2.0
|
| 6 |
+
pytz==2025.2
|
| 7 |
+
pydub==0.25.1
|
| 8 |
+
pure_eval==0.2.3
|
| 9 |
+
ptyprocess==0.7.0
|
| 10 |
+
nvidia-ml-py==13.590.48
|
| 11 |
+
nvidia-cusparselt-cu12==0.6.2
|
| 12 |
+
mpmath==1.3.0
|
| 13 |
+
ipython-genutils==0.2.0
|
| 14 |
+
fastjsonschema==2.21.2
|
| 15 |
+
brotli==1.2.0
|
| 16 |
+
antlr4-python3-runtime==4.9.3
|
| 17 |
+
xxhash==3.6.0
|
| 18 |
+
widgetsnbextension==4.0.14
|
| 19 |
+
websocket-client==1.9.0
|
| 20 |
+
webcolors==24.11.1
|
| 21 |
+
wcwidth==0.2.14
|
| 22 |
+
urllib3==2.5.0
|
| 23 |
+
uri-template==1.3.0
|
| 24 |
+
tzdata==2025.2
|
| 25 |
+
typing_extensions==4.15.0
|
| 26 |
+
types-python-dateutil==2.9.0.20251008
|
| 27 |
+
traitlets==5.14.3
|
| 28 |
+
tqdm==4.67.1
|
| 29 |
+
tornado==6.5.2
|
| 30 |
+
tomlkit==0.13.3
|
| 31 |
+
tinycss2==1.4.0
|
| 32 |
+
tabulate==0.9.0
|
| 33 |
+
sympy==1.13.1
|
| 34 |
+
soupsieve==2.8
|
| 35 |
+
sniffio==1.3.1
|
| 36 |
+
smmap==5.0.2
|
| 37 |
+
six==1.17.0
|
| 38 |
+
shellingham==1.5.4
|
| 39 |
+
Send2Trash==1.8.3
|
| 40 |
+
semantic-version==2.10.0
|
| 41 |
+
safetensors==0.6.2
|
| 42 |
+
rpds-py==0.27.1
|
| 43 |
+
rfc3986-validator==0.1.1
|
| 44 |
+
regex==2025.9.18
|
| 45 |
+
pyzmq==27.1.0
|
| 46 |
+
PyYAML==6.0.3
|
| 47 |
+
python-multipart==0.0.22
|
| 48 |
+
python-json-logger==4.0.0
|
| 49 |
+
python-dotenv==1.2.1
|
| 50 |
+
pyparsing==3.2.5
|
| 51 |
+
PyJWT==2.8.0
|
| 52 |
+
Pygments==2.19.2
|
| 53 |
+
pycparser==2.23
|
| 54 |
+
pyarrow==22.0.0
|
| 55 |
+
psutil==7.1.0
|
| 56 |
+
protobuf==6.33.4
|
| 57 |
+
propcache==0.4.1
|
| 58 |
+
prometheus_client==0.23.1
|
| 59 |
+
portalocker==3.2.0
|
| 60 |
+
platformdirs==4.5.0
|
| 61 |
+
pillow==11.3.0
|
| 62 |
+
pexpect==4.9.0
|
| 63 |
+
pathspec==1.0.4
|
| 64 |
+
parso==0.8.5
|
| 65 |
+
pandocfilters==1.5.1
|
| 66 |
+
packaging==25.0
|
| 67 |
+
orjson==3.11.6
|
| 68 |
+
opt_einsum==3.4.0
|
| 69 |
+
nvidia-nvtx-cu12==12.4.127
|
| 70 |
+
nvidia-nvjitlink-cu12==12.4.127
|
| 71 |
+
nvidia-nccl-cu12==2.21.5
|
| 72 |
+
nvidia-curand-cu12==10.3.5.147
|
| 73 |
+
nvidia-cufile-cu12==1.13.1.3
|
| 74 |
+
nvidia-cufft-cu12==11.2.1.3
|
| 75 |
+
nvidia-cuda-runtime-cu12==12.4.127
|
| 76 |
+
nvidia-cuda-nvrtc-cu12==12.4.127
|
| 77 |
+
nvidia-cuda-cupti-cu12==12.4.127
|
| 78 |
+
nvidia-cublas-cu12==12.4.5.8
|
| 79 |
+
numpy==2.3.3
|
| 80 |
+
ninja==1.13.0
|
| 81 |
+
networkx==3.5
|
| 82 |
+
nest-asyncio==1.6.0
|
| 83 |
+
narwhals==2.15.0
|
| 84 |
+
mypy_extensions==1.1.0
|
| 85 |
+
multidict==6.7.0
|
| 86 |
+
mistune==3.1.4
|
| 87 |
+
mdurl==0.1.2
|
| 88 |
+
MarkupSafe==3.0.3
|
| 89 |
+
lxml==6.0.2
|
| 90 |
+
librt==0.8.0
|
| 91 |
+
lark==1.3.0
|
| 92 |
+
kiwisolver==1.4.9
|
| 93 |
+
jupyterlab_widgets==3.0.15
|
| 94 |
+
jupyterlab_pygments==0.3.0
|
| 95 |
+
jsonpointer==3.0.0
|
| 96 |
+
json5==0.12.1
|
| 97 |
+
itsdangerous==2.2.0
|
| 98 |
+
idna==3.10
|
| 99 |
+
hf-xet==1.1.10
|
| 100 |
+
h11==0.16.0
|
| 101 |
+
groovy==0.1.2
|
| 102 |
+
fsspec==2025.9.0
|
| 103 |
+
frozenlist==1.8.0
|
| 104 |
+
fqdn==1.5.1
|
| 105 |
+
fonttools==4.60.1
|
| 106 |
+
filelock==3.19.1
|
| 107 |
+
ffmpy==1.0.0
|
| 108 |
+
executing==2.2.1
|
| 109 |
+
einops==0.8.1
|
| 110 |
+
dill==0.4.0
|
| 111 |
+
defusedxml==0.7.1
|
| 112 |
+
decorator==5.2.1
|
| 113 |
+
debugpy==1.8.17
|
| 114 |
+
dacite==1.9.2
|
| 115 |
+
cycler==0.12.1
|
| 116 |
+
comm==0.2.3
|
| 117 |
+
colorama==0.4.6
|
| 118 |
+
click==8.3.1
|
| 119 |
+
charset-normalizer==3.4.3
|
| 120 |
+
certifi==2025.10.5
|
| 121 |
+
bleach==6.2.0
|
| 122 |
+
babel==2.17.0
|
| 123 |
+
attrs==25.4.0
|
| 124 |
+
async-lru==2.0.5
|
| 125 |
+
asttokens==3.0.0
|
| 126 |
+
annotated-types==0.7.0
|
| 127 |
+
annotated-doc==0.0.4
|
| 128 |
+
aiohappyeyeballs==2.6.1
|
| 129 |
+
aiofiles==24.1.0
|
| 130 |
+
yarl==1.22.0
|
| 131 |
+
uvicorn==0.40.0
|
| 132 |
+
typing-inspection==0.4.2
|
| 133 |
+
terminado==0.18.1
|
| 134 |
+
stack-data==0.6.3
|
| 135 |
+
sentry-sdk==2.50.0
|
| 136 |
+
scipy==1.17.0
|
| 137 |
+
sacrebleu==2.6.0
|
| 138 |
+
rfc3987-syntax==1.1.0
|
| 139 |
+
rfc3339-validator==0.1.4
|
| 140 |
+
requests==2.32.5
|
| 141 |
+
reportlab==4.4.9
|
| 142 |
+
referencing==0.36.2
|
| 143 |
+
python-dateutil==2.9.0.post0
|
| 144 |
+
pydantic_core==2.41.5
|
| 145 |
+
prompt_toolkit==3.0.52
|
| 146 |
+
plotly==6.5.2
|
| 147 |
+
pathlib2==2.3.7.post1
|
| 148 |
+
orderedmultidict==1.0.2
|
| 149 |
+
optree==0.17.0
|
| 150 |
+
omegaconf==2.3.0
|
| 151 |
+
nvidia-cusparse-cu12==12.3.1.170
|
| 152 |
+
nvidia-cudnn-cu12==9.1.0.70
|
| 153 |
+
mypy==1.19.1
|
| 154 |
+
multiprocess==0.70.16
|
| 155 |
+
matplotlib-inline==0.1.7
|
| 156 |
+
markdown-it-py==4.0.0
|
| 157 |
+
jupyter_core==5.8.1
|
| 158 |
+
Jinja2==3.1.6
|
| 159 |
+
jedi==0.19.2
|
| 160 |
+
ipython_pygments_lexers==1.1.1
|
| 161 |
+
httpcore==1.0.9
|
| 162 |
+
gitdb==4.0.12
|
| 163 |
+
ftfy==6.3.1
|
| 164 |
+
contourpy==1.3.3
|
| 165 |
+
cffi==2.0.0
|
| 166 |
+
beautifulsoup4==4.14.2
|
| 167 |
+
anyio==4.11.0
|
| 168 |
+
aiosignal==1.4.0
|
| 169 |
+
starlette==0.50.0
|
| 170 |
+
rich==14.2.0
|
| 171 |
+
pydantic==2.12.5
|
| 172 |
+
pandas==2.3.3
|
| 173 |
+
nvidia-cusolver-cu12==11.6.1.9
|
| 174 |
+
matplotlib==3.10.7
|
| 175 |
+
jupyter_server_terminals==0.5.3
|
| 176 |
+
jupyter_client==8.6.3
|
| 177 |
+
jsonschema-specifications==2025.9.1
|
| 178 |
+
ipython==9.6.0
|
| 179 |
+
hydra-core==1.3.2
|
| 180 |
+
huggingface-hub==0.35.3
|
| 181 |
+
httpx==0.28.1
|
| 182 |
+
GitPython==3.1.46
|
| 183 |
+
furl==2.1.4
|
| 184 |
+
cryptography==46.0.4
|
| 185 |
+
arrow==1.3.0
|
| 186 |
+
argon2-cffi-bindings==25.1.0
|
| 187 |
+
aiohttp==3.13.1
|
| 188 |
+
wandb==0.24.0
|
| 189 |
+
typer==0.21.1
|
| 190 |
+
torch==2.6.0
|
| 191 |
+
tokenizers==0.22.1
|
| 192 |
+
seaborn==0.13.2
|
| 193 |
+
safehttpx==0.1.7
|
| 194 |
+
jsonschema==4.25.1
|
| 195 |
+
joypy==0.2.6
|
| 196 |
+
isoduration==20.11.0
|
| 197 |
+
ipywidgets==8.1.7
|
| 198 |
+
ipykernel==6.30.1
|
| 199 |
+
gradio_client==2.0.3
|
| 200 |
+
fastapi==0.128.0
|
| 201 |
+
Authlib==1.6.6
|
| 202 |
+
argon2-cffi==25.1.0
|
| 203 |
+
transformers==4.57.6
|
| 204 |
+
nbformat==5.10.4
|
| 205 |
+
mlstm_kernels==2.0.2
|
| 206 |
+
jupyter-console==6.6.3
|
| 207 |
+
gradio==6.5.1
|
| 208 |
+
datasets==4.3.0
|
| 209 |
+
clearml==1.16.4
|
| 210 |
+
accelerate==1.10.1
|
| 211 |
+
xlstm==2.0.4
|
| 212 |
+
nbclient==0.10.2
|
| 213 |
+
jupyter-events==0.12.0
|
| 214 |
+
trackio==0.15.0
|
| 215 |
+
nbconvert==7.16.6
|
| 216 |
+
jupyter_server==2.17.0
|
| 217 |
+
notebook_shim==0.2.4
|
| 218 |
+
jupyterlab_server==2.27.3
|
| 219 |
+
jupyter-lsp==2.3.0
|
| 220 |
+
nbclassic==1.3.3
|
| 221 |
+
jupyterlab==4.4.9
|
| 222 |
+
notebook==7.4.7
|
| 223 |
+
jupyter_contrib_core==0.4.2
|
| 224 |
+
jupyter==1.1.1
|
| 225 |
+
jupyter_nbextensions_configurator==0.6.4
|
| 226 |
+
causal-conv1d==1.5.0.post8
|
| 227 |
+
flash_attn==2.7.4.post1
|
| 228 |
+
mamba-ssm==2.2.4
|
| 229 |
+
hnet==0.0.1
|
| 230 |
+
autocommand==2.2.2
|
| 231 |
+
backports.tarfile==1.2.0
|
| 232 |
+
importlib_metadata==8.0.0
|
| 233 |
+
inflect==7.3.1
|
| 234 |
+
jaraco.collections==5.1.0
|
| 235 |
+
jaraco.context==5.3.0
|
| 236 |
+
jaraco.functools==4.0.1
|
| 237 |
+
jaraco.text==3.12.1
|
| 238 |
+
more-itertools==10.3.0
|
| 239 |
+
packaging==24.2
|
| 240 |
+
platformdirs==4.2.2
|
| 241 |
+
tomli==2.0.1
|
| 242 |
+
typeguard==4.3.0
|
| 243 |
+
typing_extensions==4.12.2
|
| 244 |
+
wheel==0.45.1
|
| 245 |
+
zipp==3.19.2
|
lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_180603-5xd22ofy/files/wandb-metadata.json
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"os": "Linux-5.4.0-176-generic-x86_64-with-glibc2.35",
|
| 3 |
+
"python": "CPython 3.12.0",
|
| 4 |
+
"startedAt": "2026-04-25T18:06:03.105894Z",
|
| 5 |
+
"args": [
|
| 6 |
+
"tracking=wandb",
|
| 7 |
+
"tracking.project=code-completion_lr-sweep",
|
| 8 |
+
"tracking.run_name=hnet_xl_code_lr_5e-5",
|
| 9 |
+
"training.lr=5e-5",
|
| 10 |
+
"paths.output_dir=/workspace/byte-llms-code/outputs/lr_sweep/hnet_xl_code_lr_5e-5",
|
| 11 |
+
"data.path=/workspace/byte-llms-code/code_completion_exp/datasets/data_V4_full"
|
| 12 |
+
],
|
| 13 |
+
"program": "/workspace/byte-llms-code/code_completion_exp/train_hnet/train.py",
|
| 14 |
+
"codePath": "code_completion_exp/train_hnet/train.py",
|
| 15 |
+
"codePathLocal": "train.py",
|
| 16 |
+
"git": {
|
| 17 |
+
"remote": "https://github.com/naryst/byte-llms-code.git",
|
| 18 |
+
"commit": "f111e13281aa0dc58e24302edab5b0d5c2024586"
|
| 19 |
+
},
|
| 20 |
+
"email": "nikita@local.ru",
|
| 21 |
+
"root": "/workspace/byte-llms-code/outputs/lr_sweep/hnet_xl_code_lr_5e-5",
|
| 22 |
+
"host": "7504e518d24a",
|
| 23 |
+
"executable": "/venv/bytellm/bin/python",
|
| 24 |
+
"cpu_count": 64,
|
| 25 |
+
"cpu_count_logical": 128,
|
| 26 |
+
"gpu": "NVIDIA H100 80GB HBM3",
|
| 27 |
+
"gpu_count": 4,
|
| 28 |
+
"disk": {
|
| 29 |
+
"/": {
|
| 30 |
+
"total": "265214230528",
|
| 31 |
+
"used": "91343581184"
|
| 32 |
+
}
|
| 33 |
+
},
|
| 34 |
+
"memory": {
|
| 35 |
+
"total": "1081679683584"
|
| 36 |
+
},
|
| 37 |
+
"gpu_nvidia": [
|
| 38 |
+
{
|
| 39 |
+
"name": "NVIDIA H100 80GB HBM3",
|
| 40 |
+
"memoryTotal": "85520809984",
|
| 41 |
+
"cudaCores": 16896,
|
| 42 |
+
"architecture": "Hopper",
|
| 43 |
+
"uuid": "GPU-b60cdcab-2033-2009-41de-be646c953a20"
|
| 44 |
+
},
|
| 45 |
+
{
|
| 46 |
+
"name": "NVIDIA H100 80GB HBM3",
|
| 47 |
+
"memoryTotal": "85520809984",
|
| 48 |
+
"cudaCores": 16896,
|
| 49 |
+
"architecture": "Hopper",
|
| 50 |
+
"uuid": "GPU-9982b420-4520-4238-c378-ec5a46015474"
|
| 51 |
+
},
|
| 52 |
+
{
|
| 53 |
+
"name": "NVIDIA H100 80GB HBM3",
|
| 54 |
+
"memoryTotal": "85520809984",
|
| 55 |
+
"cudaCores": 16896,
|
| 56 |
+
"architecture": "Hopper",
|
| 57 |
+
"uuid": "GPU-e26ebaac-aaa6-3eed-17ab-a3dce303a76f"
|
| 58 |
+
},
|
| 59 |
+
{
|
| 60 |
+
"name": "NVIDIA H100 80GB HBM3",
|
| 61 |
+
"memoryTotal": "85520809984",
|
| 62 |
+
"cudaCores": 16896,
|
| 63 |
+
"architecture": "Hopper",
|
| 64 |
+
"uuid": "GPU-9dfc6dba-0be6-4a10-1027-336cc0e65134"
|
| 65 |
+
}
|
| 66 |
+
],
|
| 67 |
+
"cudaVersion": "12.2",
|
| 68 |
+
"writerId": "ng0f4uiv4waaucdcyjb64wcr0cxifhf8"
|
| 69 |
+
}
|
lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_180603-5xd22ofy/files/wandb-summary.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"val/lb_loss":1.113722094467708,"_runtime":7266,"val/perplexity":1.326906686011945,"epoch/chunk_len_stage1":2.227686108994433,"train/lb_loss":1.151836633682251,"train/chunk_len_stage1":2.2276076026932334,"best/step":8000,"epoch/time":7214.254835128784,"epoch/lb_loss":1.1299939348855128,"train/step_time":0.7182671308517456,"best/val_perplexity":1.326906686011945,"train/loss":0.20530645921826363,"best/val_loss":0.2948576903768948,"train/hard_boundary_ratio_stage1":0.4513587379491782,"_wandb":{"runtime":7266},"train/lm_loss":0.2364824414253235,"epoch/soft_boundary_ratio_stage0":0.3556014254103862,"epoch/hard_boundary_ratio_stage0":0.35774536202324886,"epoch/chunk_len_stage0":2.816682264044774,"epoch/lm_loss":0.2867856953784603,"train/soft_boundary_ratio_stage1":0.42676311351101504,"epoch/soft_boundary_ratio_stage1":0.42674918260615063,"train/hard_boundary_ratio_stage0":0.3577355250559582,"val/loss":0.2948576903768948,"train/chunk_len_stage0":2.8167589370674735,"val/lm_loss":0.2837204684813817,"epoch/hard_boundary_ratio_stage1":0.4513428061250343,"train/soft_boundary_ratio_stage0":0.35559405692447654,"train/loss_avg":0.2974112755494562,"train/lr":5e-06,"train/epoch":1,"epoch/loss":0.2974204394287771,"val/time":4.8183159828186035,"_step":9880,"_timestamp":1.777147627576771e+09}
|
lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_180603-5xd22ofy/logs/debug-core.log
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{"time":"2026-04-25T18:06:03.189254033Z","level":"INFO","msg":"main: starting server","port-filename":"/tmp/tmpv0c5_3r9/port-65184.txt","pid":65184,"log-level":0,"disable-analytics":false,"shutdown-on-parent-exit":false,"enable-dcgm-profiling":false}
|
| 2 |
+
{"time":"2026-04-25T18:06:03.18962283Z","level":"INFO","msg":"server: will exit if parent process dies","ppid":65184}
|
| 3 |
+
{"time":"2026-04-25T18:06:03.189620015Z","level":"INFO","msg":"server: accepting connections","addr":{"Name":"/tmp/wandb-65184-65308-2725689892/socket","Net":"unix"}}
|
| 4 |
+
{"time":"2026-04-25T18:06:03.377566439Z","level":"INFO","msg":"connection: ManageConnectionData: new connection created","id":"1(@)"}
|
| 5 |
+
{"time":"2026-04-25T18:06:03.39727817Z","level":"INFO","msg":"handleInformInit: received","streamId":"5xd22ofy","id":"1(@)"}
|
| 6 |
+
{"time":"2026-04-25T18:06:03.764066649Z","level":"INFO","msg":"handleInformInit: stream started","streamId":"5xd22ofy","id":"1(@)"}
|
| 7 |
+
{"time":"2026-04-25T20:07:11.474490247Z","level":"INFO","msg":"handleInformFinish: finish message received","streamId":"5xd22ofy","id":"1(@)"}
|
| 8 |
+
{"time":"2026-04-25T20:07:11.474897744Z","level":"INFO","msg":"handleInformFinish: stream closed","streamId":"5xd22ofy","id":"1(@)"}
|
| 9 |
+
{"time":"2026-04-25T20:07:11.484744574Z","level":"INFO","msg":"handleInformTeardown: server teardown initiated","id":"1(@)"}
|
| 10 |
+
{"time":"2026-04-25T20:07:11.484759834Z","level":"INFO","msg":"handleInformTeardown: server shutdown complete","id":"1(@)"}
|
| 11 |
+
{"time":"2026-04-25T20:07:11.484764157Z","level":"INFO","msg":"server is shutting down"}
|
| 12 |
+
{"time":"2026-04-25T20:07:11.484767545Z","level":"INFO","msg":"connection: closing","id":"1(@)"}
|
| 13 |
+
{"time":"2026-04-25T20:07:11.484805267Z","level":"INFO","msg":"server: listener closed","addr":{"Name":"/tmp/wandb-65184-65308-2725689892/socket","Net":"unix"}}
|
| 14 |
+
{"time":"2026-04-25T20:07:11.484812293Z","level":"INFO","msg":"connection: closed successfully","id":"1(@)"}
|
| 15 |
+
{"time":"2026-04-25T20:07:11.484830515Z","level":"INFO","msg":"connection: ManageConnectionData: connection closed","id":"1(@)"}
|
| 16 |
+
{"time":"2026-04-25T20:07:11.484836079Z","level":"INFO","msg":"server is closed"}
|
lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_180603-5xd22ofy/logs/debug-internal.log
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{"time":"2026-04-25T18:06:03.397377041Z","level":"INFO","msg":"stream: starting","core version":"0.24.0"}
|
| 2 |
+
{"time":"2026-04-25T18:06:03.763903636Z","level":"INFO","msg":"stream: created new stream","id":"5xd22ofy"}
|
| 3 |
+
{"time":"2026-04-25T18:06:03.76396939Z","level":"INFO","msg":"handler: started","stream_id":"5xd22ofy"}
|
| 4 |
+
{"time":"2026-04-25T18:06:03.764060758Z","level":"INFO","msg":"stream: started","id":"5xd22ofy"}
|
| 5 |
+
{"time":"2026-04-25T18:06:03.764070052Z","level":"INFO","msg":"writer: started","stream_id":"5xd22ofy"}
|
| 6 |
+
{"time":"2026-04-25T18:06:03.764074071Z","level":"INFO","msg":"sender: started","stream_id":"5xd22ofy"}
|
| 7 |
+
{"time":"2026-04-25T18:06:03.894739834Z","level":"ERROR","msg":"git repo not found","error":"repository does not exist"}
|
| 8 |
+
{"time":"2026-04-25T18:50:19.195555067Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 9 |
+
{"time":"2026-04-25T18:51:04.195795437Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 10 |
+
{"time":"2026-04-25T18:51:19.181133591Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 11 |
+
{"time":"2026-04-25T18:56:04.214926057Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 12 |
+
{"time":"2026-04-25T19:01:04.252685213Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 13 |
+
{"time":"2026-04-25T19:02:19.340852054Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 14 |
+
{"time":"2026-04-25T19:02:49.250024047Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 15 |
+
{"time":"2026-04-25T19:03:04.189138296Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 16 |
+
{"time":"2026-04-25T19:03:19.184641802Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 17 |
+
{"time":"2026-04-25T19:03:49.245246272Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 18 |
+
{"time":"2026-04-25T19:04:04.204755219Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 19 |
+
{"time":"2026-04-25T19:05:34.344593348Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 20 |
+
{"time":"2026-04-25T19:07:04.245410066Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 21 |
+
{"time":"2026-04-25T19:09:04.200628758Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 22 |
+
{"time":"2026-04-25T19:11:04.246084299Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 23 |
+
{"time":"2026-04-25T19:12:49.152243469Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 24 |
+
{"time":"2026-04-25T19:13:04.624716271Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 25 |
+
{"time":"2026-04-25T19:13:34.190147444Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 26 |
+
{"time":"2026-04-25T19:13:49.189117021Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 27 |
+
{"time":"2026-04-25T19:14:04.191888213Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 28 |
+
{"time":"2026-04-25T19:18:19.190447211Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 29 |
+
{"time":"2026-04-25T19:18:34.241234031Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 30 |
+
{"time":"2026-04-25T19:18:49.61536326Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 31 |
+
{"time":"2026-04-25T19:20:04.282694457Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 32 |
+
{"time":"2026-04-25T19:20:34.200078948Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 33 |
+
{"time":"2026-04-25T19:21:04.655700817Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 34 |
+
{"time":"2026-04-25T19:22:04.245885251Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 35 |
+
{"time":"2026-04-25T19:24:19.3404455Z","level":"INFO","msg":"api: retrying HTTP error","status":502,"url":"https://wandb.platun0v.ru/files/nikita/code-completion_lr-sweep/5xd22ofy/file_stream","body":"Bad Gateway"}
|
| 36 |
+
{"time":"2026-04-25T20:07:11.317595318Z","level":"INFO","msg":"fileTransfer: Close: file transfer manager closed"}
|
| 37 |
+
{"time":"2026-04-25T20:07:11.47211371Z","level":"INFO","msg":"handler: operation stats","stats":{}}
|
| 38 |
+
{"time":"2026-04-25T20:07:11.474506622Z","level":"INFO","msg":"stream: closing","id":"5xd22ofy"}
|
| 39 |
+
{"time":"2026-04-25T20:07:11.47451524Z","level":"INFO","msg":"handler: closed","stream_id":"5xd22ofy"}
|
| 40 |
+
{"time":"2026-04-25T20:07:11.474586904Z","level":"INFO","msg":"sender: closed","stream_id":"5xd22ofy"}
|
| 41 |
+
{"time":"2026-04-25T20:07:11.4745917Z","level":"INFO","msg":"stream: closed","id":"5xd22ofy"}
|
lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_180603-5xd22ofy/logs/debug.log
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
2026-04-25 18:06:03,107 INFO MainThread:65184 [wandb_setup.py:_flush():81] Current SDK version is 0.24.0
|
| 2 |
+
2026-04-25 18:06:03,107 INFO MainThread:65184 [wandb_setup.py:_flush():81] Configure stats pid to 65184
|
| 3 |
+
2026-04-25 18:06:03,107 INFO MainThread:65184 [wandb_setup.py:_flush():81] Loading settings from environment variables
|
| 4 |
+
2026-04-25 18:06:03,107 INFO MainThread:65184 [wandb_init.py:setup_run_log_directory():717] Logging user logs to /workspace/byte-llms-code/outputs/lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_180603-5xd22ofy/logs/debug.log
|
| 5 |
+
2026-04-25 18:06:03,107 INFO MainThread:65184 [wandb_init.py:setup_run_log_directory():718] Logging internal logs to /workspace/byte-llms-code/outputs/lr_sweep/hnet_xl_code_lr_5e-5/wandb/run-20260425_180603-5xd22ofy/logs/debug-internal.log
|
| 6 |
+
2026-04-25 18:06:03,107 INFO MainThread:65184 [wandb_init.py:init():844] calling init triggers
|
| 7 |
+
2026-04-25 18:06:03,107 INFO MainThread:65184 [wandb_init.py:init():849] wandb.init called with sweep_config: {}
|
| 8 |
+
config: {'model': {'config_path': '/workspace/byte-llms-code/hnet_project/configs/hnet_2stage_XL_code.json', 'checkpoint_path': '/workspace/byte-llms-code/hnet_project/checkpoints/hnet_2stage_XL_code.pt'}, 'training': {'epochs': 1, 'batch_size': 4, 'eval_batch_size': 24, 'gradient_accumulation_steps': 4, 'lr': 5e-05, 'weight_decay': 0.1, 'betas': [0.9, 0.95], 'eps': 1e-08, 'lr_scheduler': 'wsd', 'warmup_ratio': 0.1, 'decay_ratio': 0.2, 'warmup_steps': 100, 'min_lr_ratio': 0.1, 'lr_multiplier': [2.0, 1.5, 1.0], 'load_balancing_weight': 0.01, 'load_balancing_N': 4.0, 'max_grad_norm': 1.0, 'use_amp': True, 'resume': False, 'resume_checkpoint': None, 'warmup_model': True}, 'data': {'path': '/workspace/byte-llms-code/code_completion_exp/datasets/data_V4_full', 'max_context_len': 4096, 'max_target_len': 256, 'num_workers': 0, 'pin_memory': True, 'max_train_samples': None, 'max_val_samples': 2000}, 'logging': {'log_interval': 10, 'save_interval': 0, 'eval_interval': 2000, 'save_every_epoch': False}, 'tracking': {'enabled': True, 'backend': 'wandb', 'project': 'code-completion_lr-sweep', 'run_name': 'hnet_xl_code_lr_5e-5', 'entity': None, 'base_url': 'https://wandb.platun0v.ru', 'local_dir': '/workspace/byte-llms-code/outputs/lr_sweep/hnet_xl_code_lr_5e-5'}, 'paths': {'output_dir': '/workspace/byte-llms-code/outputs/lr_sweep/hnet_xl_code_lr_5e-5'}, 'seed': 42, 'device': 'cuda', '_wandb': {'code_path': 'code/code_completion_exp/train_hnet/train.py'}}
|
| 9 |
+
2026-04-25 18:06:03,107 INFO MainThread:65184 [wandb_init.py:init():892] starting backend
|
| 10 |
+
2026-04-25 18:06:03,377 INFO MainThread:65184 [wandb_init.py:init():895] sending inform_init request
|
| 11 |
+
2026-04-25 18:06:03,396 INFO MainThread:65184 [wandb_init.py:init():903] backend started and connected
|
| 12 |
+
2026-04-25 18:06:03,398 INFO MainThread:65184 [wandb_init.py:init():973] updated telemetry
|
| 13 |
+
2026-04-25 18:06:03,413 INFO MainThread:65184 [wandb_init.py:init():997] communicating run to backend with 90.0 second timeout
|
| 14 |
+
2026-04-25 18:06:03,893 INFO MainThread:65184 [wandb_init.py:init():1044] starting run threads in backend
|
| 15 |
+
2026-04-25 18:06:04,051 INFO MainThread:65184 [wandb_run.py:_console_start():2529] atexit reg
|
| 16 |
+
2026-04-25 18:06:04,051 INFO MainThread:65184 [wandb_run.py:_redirect():2377] redirect: wrap_raw
|
| 17 |
+
2026-04-25 18:06:04,051 INFO MainThread:65184 [wandb_run.py:_redirect():2446] Wrapping output streams.
|
| 18 |
+
2026-04-25 18:06:04,051 INFO MainThread:65184 [wandb_run.py:_redirect():2469] Redirects installed.
|
| 19 |
+
2026-04-25 18:06:04,054 INFO MainThread:65184 [wandb_init.py:init():1084] run started, returning control to user process
|
| 20 |
+
2026-04-25 20:07:10,198 INFO MainThread:65184 [wandb_run.py:_finish():2295] finishing run nikita/code-completion_lr-sweep/5xd22ofy
|
| 21 |
+
2026-04-25 20:07:10,198 INFO MainThread:65184 [wandb_run.py:_atexit_cleanup():2494] got exitcode: 0
|
| 22 |
+
2026-04-25 20:07:10,199 INFO MainThread:65184 [wandb_run.py:_restore():2476] restore
|
| 23 |
+
2026-04-25 20:07:10,199 INFO MainThread:65184 [wandb_run.py:_restore():2482] restore done
|
| 24 |
+
2026-04-25 20:07:11,474 INFO MainThread:65184 [wandb_run.py:_footer_sync_info():3870] logging synced files
|
lr_sweep/pythia_1b_lr_1e-4/train.log
ADDED
|
@@ -0,0 +1,1114 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[2026-04-25 20:13:31,855][accelerate.utils.other][WARNING] - Detected kernel version 5.4.0, which is below the recommended minimum of 5.5.0; this can cause the process to hang. It is recommended to upgrade the kernel to the minimum version or higher.
|
| 2 |
+
[2026-04-25 20:13:31] CUDA_VISIBLE_DEVICES: 2,3
|
| 3 |
+
[2026-04-25 20:13:31] Number of processes: 2
|
| 4 |
+
[2026-04-25 20:13:31] Process index: 0
|
| 5 |
+
[2026-04-25 20:13:31] Mixed precision: bf16
|
| 6 |
+
[2026-04-25 20:13:31] ============================================================
|
| 7 |
+
[2026-04-25 20:13:31] Pythia Training Pipeline (Hydra + Trackio + Accelerate)
|
| 8 |
+
[2026-04-25 20:13:31] ============================================================
|
| 9 |
+
[2026-04-25 20:13:31] Config:
|
| 10 |
+
model:
|
| 11 |
+
name: EleutherAI/pythia-1b
|
| 12 |
+
checkpoint_path: null
|
| 13 |
+
from_scratch: false
|
| 14 |
+
training:
|
| 15 |
+
epochs: 1
|
| 16 |
+
batch_size: 4
|
| 17 |
+
eval_batch_size: 12
|
| 18 |
+
gradient_accumulation_steps: 4
|
| 19 |
+
lr: 0.0001
|
| 20 |
+
weight_decay: 0.1
|
| 21 |
+
betas:
|
| 22 |
+
- 0.9
|
| 23 |
+
- 0.95
|
| 24 |
+
eps: 1.0e-08
|
| 25 |
+
lr_scheduler: wsd
|
| 26 |
+
warmup_ratio: 0.1
|
| 27 |
+
decay_ratio: 0.2
|
| 28 |
+
warmup_steps: 100
|
| 29 |
+
min_lr_ratio: 0.1
|
| 30 |
+
max_grad_norm: 1.0
|
| 31 |
+
use_amp: true
|
| 32 |
+
resume: false
|
| 33 |
+
resume_checkpoint: null
|
| 34 |
+
data:
|
| 35 |
+
path: /workspace/byte-llms-code/code_completion_exp/datasets/data_V4_full
|
| 36 |
+
max_context_len: 4096
|
| 37 |
+
max_target_len: 256
|
| 38 |
+
num_workers: 4
|
| 39 |
+
pin_memory: true
|
| 40 |
+
max_train_samples: null
|
| 41 |
+
max_val_samples: 2000
|
| 42 |
+
logging:
|
| 43 |
+
log_interval: 10
|
| 44 |
+
save_interval: 0
|
| 45 |
+
eval_interval: 2000
|
| 46 |
+
save_every_epoch: false
|
| 47 |
+
tracking:
|
| 48 |
+
enabled: true
|
| 49 |
+
backend: wandb
|
| 50 |
+
project: code-completion_lr-sweep
|
| 51 |
+
run_name: pythia_1b_lr_1e-4
|
| 52 |
+
entity: null
|
| 53 |
+
base_url: https://wandb.platun0v.ru
|
| 54 |
+
local_dir: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-4
|
| 55 |
+
paths:
|
| 56 |
+
output_dir: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-4
|
| 57 |
+
seed: 42
|
| 58 |
+
device: cuda
|
| 59 |
+
|
| 60 |
+
[2026-04-25 20:13:34] Initializing tokenizer...
|
| 61 |
+
[2026-04-25 20:13:34] Loading model...
|
| 62 |
+
[2026-04-25 20:13:37] Loaded pretrained: EleutherAI/pythia-1b
|
| 63 |
+
[2026-04-25 20:13:37] Total params: 1,011,781,632
|
| 64 |
+
[2026-04-25 20:13:37] Trainable params: 1,011,781,632
|
| 65 |
+
[2026-04-25 20:13:37] Creating dataloaders...
|
| 66 |
+
[2026-04-25 20:13:37] Train dataset size: 316397
|
| 67 |
+
[2026-04-25 20:13:37] Train batches per epoch (before DDP split): 79100
|
| 68 |
+
[2026-04-25 20:13:37] Validation dataset size: 2000
|
| 69 |
+
[2026-04-25 20:13:37] Validation batches: 167
|
| 70 |
+
[2026-04-25 20:13:37] Creating optimizer...
|
| 71 |
+
[2026-04-25 20:13:37] Total steps: 9887, Steps per epoch: 39550
|
| 72 |
+
[2026-04-25 20:13:37] Preparing model, optimizer, and dataloaders with Accelerate...
|
| 73 |
+
[2026-04-25 20:13:39] Train batches per epoch (after DDP split): 39550
|
| 74 |
+
[2026-04-25 20:13:39] Starting training...
|
| 75 |
+
[2026-04-25 20:13:39]
|
| 76 |
+
============================================================
|
| 77 |
+
[2026-04-25 20:13:39] EPOCH 1/1
|
| 78 |
+
[2026-04-25 20:13:39] ============================================================
|
| 79 |
+
[2026-04-25 20:13:42] Epoch 1 | Step 10 | Loss: 1.5821 | LR: 1.18e-05
|
| 80 |
+
[2026-04-25 20:13:45] Epoch 1 | Step 20 | Loss: 1.4030 | LR: 1.36e-05
|
| 81 |
+
[2026-04-25 20:13:47] Epoch 1 | Step 30 | Loss: 1.2973 | LR: 1.55e-05
|
| 82 |
+
[2026-04-25 20:13:50] Epoch 1 | Step 40 | Loss: 1.2578 | LR: 1.73e-05
|
| 83 |
+
[2026-04-25 20:13:53] Epoch 1 | Step 50 | Loss: 1.2119 | LR: 1.91e-05
|
| 84 |
+
[2026-04-25 20:13:56] Epoch 1 | Step 60 | Loss: 1.1840 | LR: 2.09e-05
|
| 85 |
+
[2026-04-25 20:13:58] Epoch 1 | Step 70 | Loss: 1.1538 | LR: 2.28e-05
|
| 86 |
+
[2026-04-25 20:14:01] Epoch 1 | Step 80 | Loss: 1.1532 | LR: 2.46e-05
|
| 87 |
+
[2026-04-25 20:14:03] Epoch 1 | Step 90 | Loss: 1.1438 | LR: 2.64e-05
|
| 88 |
+
[2026-04-25 20:14:06] Epoch 1 | Step 100 | Loss: 1.1344 | LR: 2.82e-05
|
| 89 |
+
[2026-04-25 20:14:09] Epoch 1 | Step 110 | Loss: 1.1396 | LR: 3.00e-05
|
| 90 |
+
[2026-04-25 20:14:11] Epoch 1 | Step 120 | Loss: 1.1391 | LR: 3.19e-05
|
| 91 |
+
[2026-04-25 20:14:14] Epoch 1 | Step 130 | Loss: 1.1465 | LR: 3.37e-05
|
| 92 |
+
[2026-04-25 20:14:16] Epoch 1 | Step 140 | Loss: 1.1553 | LR: 3.55e-05
|
| 93 |
+
[2026-04-25 20:14:19] Epoch 1 | Step 150 | Loss: 1.1471 | LR: 3.73e-05
|
| 94 |
+
[2026-04-25 20:14:21] Epoch 1 | Step 160 | Loss: 1.1421 | LR: 3.91e-05
|
| 95 |
+
[2026-04-25 20:14:24] Epoch 1 | Step 170 | Loss: 1.1439 | LR: 4.10e-05
|
| 96 |
+
[2026-04-25 20:14:26] Epoch 1 | Step 180 | Loss: 1.1374 | LR: 4.28e-05
|
| 97 |
+
[2026-04-25 20:14:29] Epoch 1 | Step 190 | Loss: 1.1415 | LR: 4.46e-05
|
| 98 |
+
[2026-04-25 20:14:32] Epoch 1 | Step 200 | Loss: 1.1444 | LR: 4.64e-05
|
| 99 |
+
[2026-04-25 20:14:34] Epoch 1 | Step 210 | Loss: 1.1538 | LR: 4.83e-05
|
| 100 |
+
[2026-04-25 20:14:37] Epoch 1 | Step 220 | Loss: 1.1587 | LR: 5.01e-05
|
| 101 |
+
[2026-04-25 20:14:39] Epoch 1 | Step 230 | Loss: 1.1555 | LR: 5.19e-05
|
| 102 |
+
[2026-04-25 20:14:42] Epoch 1 | Step 240 | Loss: 1.1568 | LR: 5.37e-05
|
| 103 |
+
[2026-04-25 20:14:44] Epoch 1 | Step 250 | Loss: 1.1584 | LR: 5.55e-05
|
| 104 |
+
[2026-04-25 20:14:47] Epoch 1 | Step 260 | Loss: 1.1665 | LR: 5.74e-05
|
| 105 |
+
[2026-04-25 20:14:49] Epoch 1 | Step 270 | Loss: 1.1680 | LR: 5.92e-05
|
| 106 |
+
[2026-04-25 20:14:52] Epoch 1 | Step 280 | Loss: 1.1668 | LR: 6.10e-05
|
| 107 |
+
[2026-04-25 20:14:55] Epoch 1 | Step 290 | Loss: 1.1693 | LR: 6.28e-05
|
| 108 |
+
[2026-04-25 20:14:57] Epoch 1 | Step 300 | Loss: 1.1729 | LR: 6.47e-05
|
| 109 |
+
[2026-04-25 20:15:00] Epoch 1 | Step 310 | Loss: 1.1764 | LR: 6.65e-05
|
| 110 |
+
[2026-04-25 20:15:02] Epoch 1 | Step 320 | Loss: 1.1767 | LR: 6.83e-05
|
| 111 |
+
[2026-04-25 20:15:05] Epoch 1 | Step 330 | Loss: 1.1793 | LR: 7.01e-05
|
| 112 |
+
[2026-04-25 20:15:08] Epoch 1 | Step 340 | Loss: 1.1832 | LR: 7.19e-05
|
| 113 |
+
[2026-04-25 20:15:10] Epoch 1 | Step 350 | Loss: 1.1884 | LR: 7.38e-05
|
| 114 |
+
[2026-04-25 20:15:13] Epoch 1 | Step 360 | Loss: 1.1906 | LR: 7.56e-05
|
| 115 |
+
[2026-04-25 20:15:16] Epoch 1 | Step 370 | Loss: 1.1922 | LR: 7.74e-05
|
| 116 |
+
[2026-04-25 20:15:18] Epoch 1 | Step 380 | Loss: 1.1955 | LR: 7.92e-05
|
| 117 |
+
[2026-04-25 20:15:21] Epoch 1 | Step 390 | Loss: 1.1994 | LR: 8.11e-05
|
| 118 |
+
[2026-04-25 20:15:23] Epoch 1 | Step 400 | Loss: 1.2043 | LR: 8.29e-05
|
| 119 |
+
[2026-04-25 20:15:26] Epoch 1 | Step 410 | Loss: 1.2095 | LR: 8.47e-05
|
| 120 |
+
[2026-04-25 20:15:29] Epoch 1 | Step 420 | Loss: 1.2152 | LR: 8.65e-05
|
| 121 |
+
[2026-04-25 20:15:31] Epoch 1 | Step 430 | Loss: 1.2239 | LR: 8.83e-05
|
| 122 |
+
[2026-04-25 20:15:34] Epoch 1 | Step 440 | Loss: 1.2254 | LR: 9.02e-05
|
| 123 |
+
[2026-04-25 20:15:37] Epoch 1 | Step 450 | Loss: 1.2297 | LR: 9.20e-05
|
| 124 |
+
[2026-04-25 20:15:39] Epoch 1 | Step 460 | Loss: 1.2354 | LR: 9.38e-05
|
| 125 |
+
[2026-04-25 20:15:42] Epoch 1 | Step 470 | Loss: 1.2394 | LR: 9.56e-05
|
| 126 |
+
[2026-04-25 20:15:44] Epoch 1 | Step 480 | Loss: 1.2462 | LR: 9.74e-05
|
| 127 |
+
[2026-04-25 20:15:47] Epoch 1 | Step 490 | Loss: 1.2508 | LR: 9.93e-05
|
| 128 |
+
[2026-04-25 20:15:49] Epoch 1 | Step 500 | Loss: 1.2557 | LR: 1.00e-04
|
| 129 |
+
[2026-04-25 20:15:52] Epoch 1 | Step 510 | Loss: 1.2617 | LR: 1.00e-04
|
| 130 |
+
[2026-04-25 20:15:54] Epoch 1 | Step 520 | Loss: 1.2677 | LR: 1.00e-04
|
| 131 |
+
[2026-04-25 20:15:57] Epoch 1 | Step 530 | Loss: 1.2712 | LR: 1.00e-04
|
| 132 |
+
[2026-04-25 20:16:00] Epoch 1 | Step 540 | Loss: 1.2743 | LR: 1.00e-04
|
| 133 |
+
[2026-04-25 20:16:02] Epoch 1 | Step 550 | Loss: 1.2779 | LR: 1.00e-04
|
| 134 |
+
[2026-04-25 20:16:05] Epoch 1 | Step 560 | Loss: 1.2813 | LR: 1.00e-04
|
| 135 |
+
[2026-04-25 20:16:07] Epoch 1 | Step 570 | Loss: 1.2866 | LR: 1.00e-04
|
| 136 |
+
[2026-04-25 20:16:10] Epoch 1 | Step 580 | Loss: 1.2923 | LR: 1.00e-04
|
| 137 |
+
[2026-04-25 20:16:12] Epoch 1 | Step 590 | Loss: 1.2968 | LR: 1.00e-04
|
| 138 |
+
[2026-04-25 20:16:15] Epoch 1 | Step 600 | Loss: 1.3014 | LR: 1.00e-04
|
| 139 |
+
[2026-04-25 20:16:17] Epoch 1 | Step 610 | Loss: 1.3082 | LR: 1.00e-04
|
| 140 |
+
[2026-04-25 20:16:20] Epoch 1 | Step 620 | Loss: 1.3145 | LR: 1.00e-04
|
| 141 |
+
[2026-04-25 20:16:22] Epoch 1 | Step 630 | Loss: 1.3201 | LR: 1.00e-04
|
| 142 |
+
[2026-04-25 20:16:25] Epoch 1 | Step 640 | Loss: 1.3257 | LR: 1.00e-04
|
| 143 |
+
[2026-04-25 20:16:27] Epoch 1 | Step 650 | Loss: 1.3311 | LR: 1.00e-04
|
| 144 |
+
[2026-04-25 20:16:30] Epoch 1 | Step 660 | Loss: 1.3362 | LR: 1.00e-04
|
| 145 |
+
[2026-04-25 20:16:32] Epoch 1 | Step 670 | Loss: 1.3399 | LR: 1.00e-04
|
| 146 |
+
[2026-04-25 20:16:35] Epoch 1 | Step 680 | Loss: 1.3452 | LR: 1.00e-04
|
| 147 |
+
[2026-04-25 20:16:38] Epoch 1 | Step 690 | Loss: 1.3482 | LR: 1.00e-04
|
| 148 |
+
[2026-04-25 20:16:40] Epoch 1 | Step 700 | Loss: 1.3537 | LR: 1.00e-04
|
| 149 |
+
[2026-04-25 20:16:42] Epoch 1 | Step 710 | Loss: 1.3568 | LR: 1.00e-04
|
| 150 |
+
[2026-04-25 20:16:45] Epoch 1 | Step 720 | Loss: 1.3600 | LR: 1.00e-04
|
| 151 |
+
[2026-04-25 20:16:48] Epoch 1 | Step 730 | Loss: 1.3633 | LR: 1.00e-04
|
| 152 |
+
[2026-04-25 20:16:50] Epoch 1 | Step 740 | Loss: 1.3661 | LR: 1.00e-04
|
| 153 |
+
[2026-04-25 20:16:52] Epoch 1 | Step 750 | Loss: 1.3680 | LR: 1.00e-04
|
| 154 |
+
[2026-04-25 20:16:55] Epoch 1 | Step 760 | Loss: 1.3728 | LR: 1.00e-04
|
| 155 |
+
[2026-04-25 20:16:58] Epoch 1 | Step 770 | Loss: 1.3775 | LR: 1.00e-04
|
| 156 |
+
[2026-04-25 20:17:00] Epoch 1 | Step 780 | Loss: 1.3813 | LR: 1.00e-04
|
| 157 |
+
[2026-04-25 20:17:02] Epoch 1 | Step 790 | Loss: 1.3851 | LR: 1.00e-04
|
| 158 |
+
[2026-04-25 20:17:05] Epoch 1 | Step 800 | Loss: 1.3867 | LR: 1.00e-04
|
| 159 |
+
[2026-04-25 20:17:08] Epoch 1 | Step 810 | Loss: 1.3902 | LR: 1.00e-04
|
| 160 |
+
[2026-04-25 20:17:10] Epoch 1 | Step 820 | Loss: 1.3927 | LR: 1.00e-04
|
| 161 |
+
[2026-04-25 20:17:13] Epoch 1 | Step 830 | Loss: 1.3954 | LR: 1.00e-04
|
| 162 |
+
[2026-04-25 20:17:15] Epoch 1 | Step 840 | Loss: 1.3972 | LR: 1.00e-04
|
| 163 |
+
[2026-04-25 20:17:18] Epoch 1 | Step 850 | Loss: 1.3981 | LR: 1.00e-04
|
| 164 |
+
[2026-04-25 20:17:20] Epoch 1 | Step 860 | Loss: 1.4020 | LR: 1.00e-04
|
| 165 |
+
[2026-04-25 20:17:23] Epoch 1 | Step 870 | Loss: 1.4055 | LR: 1.00e-04
|
| 166 |
+
[2026-04-25 20:17:25] Epoch 1 | Step 880 | Loss: 1.4089 | LR: 1.00e-04
|
| 167 |
+
[2026-04-25 20:17:28] Epoch 1 | Step 890 | Loss: 1.4109 | LR: 1.00e-04
|
| 168 |
+
[2026-04-25 20:17:30] Epoch 1 | Step 900 | Loss: 1.4119 | LR: 1.00e-04
|
| 169 |
+
[2026-04-25 20:17:33] Epoch 1 | Step 910 | Loss: 1.4154 | LR: 1.00e-04
|
| 170 |
+
[2026-04-25 20:17:36] Epoch 1 | Step 920 | Loss: 1.4191 | LR: 1.00e-04
|
| 171 |
+
[2026-04-25 20:17:38] Epoch 1 | Step 930 | Loss: 1.4208 | LR: 1.00e-04
|
| 172 |
+
[2026-04-25 20:17:41] Epoch 1 | Step 940 | Loss: 1.4231 | LR: 1.00e-04
|
| 173 |
+
[2026-04-25 20:17:43] Epoch 1 | Step 950 | Loss: 1.4237 | LR: 1.00e-04
|
| 174 |
+
[2026-04-25 20:17:46] Epoch 1 | Step 960 | Loss: 1.4261 | LR: 1.00e-04
|
| 175 |
+
[2026-04-25 20:17:49] Epoch 1 | Step 970 | Loss: 1.4283 | LR: 1.00e-04
|
| 176 |
+
[2026-04-25 20:17:51] Epoch 1 | Step 980 | Loss: 1.4295 | LR: 1.00e-04
|
| 177 |
+
[2026-04-25 20:17:54] Epoch 1 | Step 990 | Loss: 1.4301 | LR: 1.00e-04
|
| 178 |
+
[2026-04-25 20:17:57] Epoch 1 | Step 1000 | Loss: 1.4326 | LR: 1.00e-04
|
| 179 |
+
[2026-04-25 20:17:59] Epoch 1 | Step 1010 | Loss: 1.4358 | LR: 1.00e-04
|
| 180 |
+
[2026-04-25 20:18:02] Epoch 1 | Step 1020 | Loss: 1.4375 | LR: 1.00e-04
|
| 181 |
+
[2026-04-25 20:18:04] Epoch 1 | Step 1030 | Loss: 1.4408 | LR: 1.00e-04
|
| 182 |
+
[2026-04-25 20:18:07] Epoch 1 | Step 1040 | Loss: 1.4410 | LR: 1.00e-04
|
| 183 |
+
[2026-04-25 20:18:09] Epoch 1 | Step 1050 | Loss: 1.4424 | LR: 1.00e-04
|
| 184 |
+
[2026-04-25 20:18:12] Epoch 1 | Step 1060 | Loss: 1.4426 | LR: 1.00e-04
|
| 185 |
+
[2026-04-25 20:18:14] Epoch 1 | Step 1070 | Loss: 1.4437 | LR: 1.00e-04
|
| 186 |
+
[2026-04-25 20:18:17] Epoch 1 | Step 1080 | Loss: 1.4466 | LR: 1.00e-04
|
| 187 |
+
[2026-04-25 20:18:19] Epoch 1 | Step 1090 | Loss: 1.4506 | LR: 1.00e-04
|
| 188 |
+
[2026-04-25 20:18:22] Epoch 1 | Step 1100 | Loss: 1.4514 | LR: 1.00e-04
|
| 189 |
+
[2026-04-25 20:18:24] Epoch 1 | Step 1110 | Loss: 1.4540 | LR: 1.00e-04
|
| 190 |
+
[2026-04-25 20:18:27] Epoch 1 | Step 1120 | Loss: 1.4568 | LR: 1.00e-04
|
| 191 |
+
[2026-04-25 20:18:29] Epoch 1 | Step 1130 | Loss: 1.4586 | LR: 1.00e-04
|
| 192 |
+
[2026-04-25 20:18:32] Epoch 1 | Step 1140 | Loss: 1.4596 | LR: 1.00e-04
|
| 193 |
+
[2026-04-25 20:18:35] Epoch 1 | Step 1150 | Loss: 1.4591 | LR: 1.00e-04
|
| 194 |
+
[2026-04-25 20:18:37] Epoch 1 | Step 1160 | Loss: 1.4616 | LR: 1.00e-04
|
| 195 |
+
[2026-04-25 20:18:40] Epoch 1 | Step 1170 | Loss: 1.4643 | LR: 1.00e-04
|
| 196 |
+
[2026-04-25 20:18:43] Epoch 1 | Step 1180 | Loss: 1.4654 | LR: 1.00e-04
|
| 197 |
+
[2026-04-25 20:18:45] Epoch 1 | Step 1190 | Loss: 1.4673 | LR: 1.00e-04
|
| 198 |
+
[2026-04-25 20:18:48] Epoch 1 | Step 1200 | Loss: 1.4685 | LR: 1.00e-04
|
| 199 |
+
[2026-04-25 20:18:50] Epoch 1 | Step 1210 | Loss: 1.4686 | LR: 1.00e-04
|
| 200 |
+
[2026-04-25 20:18:53] Epoch 1 | Step 1220 | Loss: 1.4682 | LR: 1.00e-04
|
| 201 |
+
[2026-04-25 20:18:55] Epoch 1 | Step 1230 | Loss: 1.4702 | LR: 1.00e-04
|
| 202 |
+
[2026-04-25 20:18:58] Epoch 1 | Step 1240 | Loss: 1.4718 | LR: 1.00e-04
|
| 203 |
+
[2026-04-25 20:19:01] Epoch 1 | Step 1250 | Loss: 1.4726 | LR: 1.00e-04
|
| 204 |
+
[2026-04-25 20:19:03] Epoch 1 | Step 1260 | Loss: 1.4732 | LR: 1.00e-04
|
| 205 |
+
[2026-04-25 20:19:06] Epoch 1 | Step 1270 | Loss: 1.4728 | LR: 1.00e-04
|
| 206 |
+
[2026-04-25 20:19:08] Epoch 1 | Step 1280 | Loss: 1.4744 | LR: 1.00e-04
|
| 207 |
+
[2026-04-25 20:19:11] Epoch 1 | Step 1290 | Loss: 1.4765 | LR: 1.00e-04
|
| 208 |
+
[2026-04-25 20:19:13] Epoch 1 | Step 1300 | Loss: 1.4774 | LR: 1.00e-04
|
| 209 |
+
[2026-04-25 20:19:16] Epoch 1 | Step 1310 | Loss: 1.4791 | LR: 1.00e-04
|
| 210 |
+
[2026-04-25 20:19:19] Epoch 1 | Step 1320 | Loss: 1.4803 | LR: 1.00e-04
|
| 211 |
+
[2026-04-25 20:19:21] Epoch 1 | Step 1330 | Loss: 1.4807 | LR: 1.00e-04
|
| 212 |
+
[2026-04-25 20:19:24] Epoch 1 | Step 1340 | Loss: 1.4818 | LR: 1.00e-04
|
| 213 |
+
[2026-04-25 20:19:26] Epoch 1 | Step 1350 | Loss: 1.4838 | LR: 1.00e-04
|
| 214 |
+
[2026-04-25 20:19:29] Epoch 1 | Step 1360 | Loss: 1.4849 | LR: 1.00e-04
|
| 215 |
+
[2026-04-25 20:19:31] Epoch 1 | Step 1370 | Loss: 1.4851 | LR: 1.00e-04
|
| 216 |
+
[2026-04-25 20:19:34] Epoch 1 | Step 1380 | Loss: 1.4870 | LR: 1.00e-04
|
| 217 |
+
[2026-04-25 20:19:37] Epoch 1 | Step 1390 | Loss: 1.4888 | LR: 1.00e-04
|
| 218 |
+
[2026-04-25 20:19:39] Epoch 1 | Step 1400 | Loss: 1.4898 | LR: 1.00e-04
|
| 219 |
+
[2026-04-25 20:19:42] Epoch 1 | Step 1410 | Loss: 1.4893 | LR: 1.00e-04
|
| 220 |
+
[2026-04-25 20:19:44] Epoch 1 | Step 1420 | Loss: 1.4903 | LR: 1.00e-04
|
| 221 |
+
[2026-04-25 20:19:47] Epoch 1 | Step 1430 | Loss: 1.4912 | LR: 1.00e-04
|
| 222 |
+
[2026-04-25 20:19:49] Epoch 1 | Step 1440 | Loss: 1.4920 | LR: 1.00e-04
|
| 223 |
+
[2026-04-25 20:19:52] Epoch 1 | Step 1450 | Loss: 1.4931 | LR: 1.00e-04
|
| 224 |
+
[2026-04-25 20:19:54] Epoch 1 | Step 1460 | Loss: 1.4931 | LR: 1.00e-04
|
| 225 |
+
[2026-04-25 20:19:57] Epoch 1 | Step 1470 | Loss: 1.4947 | LR: 1.00e-04
|
| 226 |
+
[2026-04-25 20:20:00] Epoch 1 | Step 1480 | Loss: 1.4959 | LR: 1.00e-04
|
| 227 |
+
[2026-04-25 20:20:02] Epoch 1 | Step 1490 | Loss: 1.4975 | LR: 1.00e-04
|
| 228 |
+
[2026-04-25 20:20:05] Epoch 1 | Step 1500 | Loss: 1.4981 | LR: 1.00e-04
|
| 229 |
+
[2026-04-25 20:20:07] Epoch 1 | Step 1510 | Loss: 1.4990 | LR: 1.00e-04
|
| 230 |
+
[2026-04-25 20:20:10] Epoch 1 | Step 1520 | Loss: 1.5006 | LR: 1.00e-04
|
| 231 |
+
[2026-04-25 20:20:12] Epoch 1 | Step 1530 | Loss: 1.5010 | LR: 1.00e-04
|
| 232 |
+
[2026-04-25 20:20:15] Epoch 1 | Step 1540 | Loss: 1.5027 | LR: 1.00e-04
|
| 233 |
+
[2026-04-25 20:20:17] Epoch 1 | Step 1550 | Loss: 1.5037 | LR: 1.00e-04
|
| 234 |
+
[2026-04-25 20:20:20] Epoch 1 | Step 1560 | Loss: 1.5039 | LR: 1.00e-04
|
| 235 |
+
[2026-04-25 20:20:22] Epoch 1 | Step 1570 | Loss: 1.5059 | LR: 1.00e-04
|
| 236 |
+
[2026-04-25 20:20:25] Epoch 1 | Step 1580 | Loss: 1.5061 | LR: 1.00e-04
|
| 237 |
+
[2026-04-25 20:20:27] Epoch 1 | Step 1590 | Loss: 1.5067 | LR: 1.00e-04
|
| 238 |
+
[2026-04-25 20:20:30] Epoch 1 | Step 1600 | Loss: 1.5071 | LR: 1.00e-04
|
| 239 |
+
[2026-04-25 20:20:32] Epoch 1 | Step 1610 | Loss: 1.5065 | LR: 1.00e-04
|
| 240 |
+
[2026-04-25 20:20:35] Epoch 1 | Step 1620 | Loss: 1.5059 | LR: 1.00e-04
|
| 241 |
+
[2026-04-25 20:20:37] Epoch 1 | Step 1630 | Loss: 1.5075 | LR: 1.00e-04
|
| 242 |
+
[2026-04-25 20:20:40] Epoch 1 | Step 1640 | Loss: 1.5084 | LR: 1.00e-04
|
| 243 |
+
[2026-04-25 20:20:42] Epoch 1 | Step 1650 | Loss: 1.5082 | LR: 1.00e-04
|
| 244 |
+
[2026-04-25 20:20:44] Epoch 1 | Step 1660 | Loss: 1.5083 | LR: 1.00e-04
|
| 245 |
+
[2026-04-25 20:20:47] Epoch 1 | Step 1670 | Loss: 1.5100 | LR: 1.00e-04
|
| 246 |
+
[2026-04-25 20:20:49] Epoch 1 | Step 1680 | Loss: 1.5108 | LR: 1.00e-04
|
| 247 |
+
[2026-04-25 20:20:52] Epoch 1 | Step 1690 | Loss: 1.5118 | LR: 1.00e-04
|
| 248 |
+
[2026-04-25 20:20:54] Epoch 1 | Step 1700 | Loss: 1.5122 | LR: 1.00e-04
|
| 249 |
+
[2026-04-25 20:20:57] Epoch 1 | Step 1710 | Loss: 1.5127 | LR: 1.00e-04
|
| 250 |
+
[2026-04-25 20:20:59] Epoch 1 | Step 1720 | Loss: 1.5128 | LR: 1.00e-04
|
| 251 |
+
[2026-04-25 20:21:02] Epoch 1 | Step 1730 | Loss: 1.5132 | LR: 1.00e-04
|
| 252 |
+
[2026-04-25 20:21:04] Epoch 1 | Step 1740 | Loss: 1.5139 | LR: 1.00e-04
|
| 253 |
+
[2026-04-25 20:21:07] Epoch 1 | Step 1750 | Loss: 1.5154 | LR: 1.00e-04
|
| 254 |
+
[2026-04-25 20:21:10] Epoch 1 | Step 1760 | Loss: 1.5155 | LR: 1.00e-04
|
| 255 |
+
[2026-04-25 20:21:12] Epoch 1 | Step 1770 | Loss: 1.5169 | LR: 1.00e-04
|
| 256 |
+
[2026-04-25 20:21:15] Epoch 1 | Step 1780 | Loss: 1.5172 | LR: 1.00e-04
|
| 257 |
+
[2026-04-25 20:21:17] Epoch 1 | Step 1790 | Loss: 1.5180 | LR: 1.00e-04
|
| 258 |
+
[2026-04-25 20:21:20] Epoch 1 | Step 1800 | Loss: 1.5179 | LR: 1.00e-04
|
| 259 |
+
[2026-04-25 20:21:22] Epoch 1 | Step 1810 | Loss: 1.5186 | LR: 1.00e-04
|
| 260 |
+
[2026-04-25 20:21:25] Epoch 1 | Step 1820 | Loss: 1.5198 | LR: 1.00e-04
|
| 261 |
+
[2026-04-25 20:21:27] Epoch 1 | Step 1830 | Loss: 1.5205 | LR: 1.00e-04
|
| 262 |
+
[2026-04-25 20:21:30] Epoch 1 | Step 1840 | Loss: 1.5216 | LR: 1.00e-04
|
| 263 |
+
[2026-04-25 20:21:33] Epoch 1 | Step 1850 | Loss: 1.5217 | LR: 1.00e-04
|
| 264 |
+
[2026-04-25 20:21:35] Epoch 1 | Step 1860 | Loss: 1.5223 | LR: 1.00e-04
|
| 265 |
+
[2026-04-25 20:21:38] Epoch 1 | Step 1870 | Loss: 1.5225 | LR: 1.00e-04
|
| 266 |
+
[2026-04-25 20:21:40] Epoch 1 | Step 1880 | Loss: 1.5225 | LR: 1.00e-04
|
| 267 |
+
[2026-04-25 20:21:43] Epoch 1 | Step 1890 | Loss: 1.5236 | LR: 1.00e-04
|
| 268 |
+
[2026-04-25 20:21:46] Epoch 1 | Step 1900 | Loss: 1.5237 | LR: 1.00e-04
|
| 269 |
+
[2026-04-25 20:21:48] Epoch 1 | Step 1910 | Loss: 1.5251 | LR: 1.00e-04
|
| 270 |
+
[2026-04-25 20:21:51] Epoch 1 | Step 1920 | Loss: 1.5261 | LR: 1.00e-04
|
| 271 |
+
[2026-04-25 20:21:53] Epoch 1 | Step 1930 | Loss: 1.5267 | LR: 1.00e-04
|
| 272 |
+
[2026-04-25 20:21:56] Epoch 1 | Step 1940 | Loss: 1.5268 | LR: 1.00e-04
|
| 273 |
+
[2026-04-25 20:21:59] Epoch 1 | Step 1950 | Loss: 1.5263 | LR: 1.00e-04
|
| 274 |
+
[2026-04-25 20:22:01] Epoch 1 | Step 1960 | Loss: 1.5272 | LR: 1.00e-04
|
| 275 |
+
[2026-04-25 20:22:04] Epoch 1 | Step 1970 | Loss: 1.5278 | LR: 1.00e-04
|
| 276 |
+
[2026-04-25 20:22:06] Epoch 1 | Step 1980 | Loss: 1.5290 | LR: 1.00e-04
|
| 277 |
+
[2026-04-25 20:22:08] Epoch 1 | Step 1990 | Loss: 1.5296 | LR: 1.00e-04
|
| 278 |
+
[2026-04-25 20:22:11] Epoch 1 | Step 2000 | Loss: 1.5301 | LR: 1.00e-04
|
| 279 |
+
[2026-04-25 20:22:11] Validation | Batch 10/84 | Loss: 1.5171
|
| 280 |
+
[2026-04-25 20:22:11] Validation | Batch 20/84 | Loss: 1.5820
|
| 281 |
+
[2026-04-25 20:22:12] Validation | Batch 30/84 | Loss: 1.7006
|
| 282 |
+
[2026-04-25 20:22:12] Validation | Batch 40/84 | Loss: 1.6892
|
| 283 |
+
[2026-04-25 20:22:13] Validation | Batch 50/84 | Loss: 1.6662
|
| 284 |
+
[2026-04-25 20:22:13] Validation | Batch 60/84 | Loss: 1.6410
|
| 285 |
+
[2026-04-25 20:22:14] Validation | Batch 70/84 | Loss: 1.6259
|
| 286 |
+
[2026-04-25 20:22:14] Validation | Batch 80/84 | Loss: 1.6316
|
| 287 |
+
[2026-04-25 20:22:14] Validation | Batch 84/84 | Loss: 1.6241
|
| 288 |
+
[2026-04-25 20:22:15] Validation | Loss: 1.6241 | PPL: 5.23 | Time: 3.87s
|
| 289 |
+
[2026-04-25 20:22:17] New best model saved! Val loss: 1.6241
|
| 290 |
+
[2026-04-25 20:22:20] Epoch 1 | Step 2010 | Loss: 1.5307 | LR: 1.00e-04
|
| 291 |
+
[2026-04-25 20:22:22] Epoch 1 | Step 2020 | Loss: 1.5311 | LR: 1.00e-04
|
| 292 |
+
[2026-04-25 20:22:25] Epoch 1 | Step 2030 | Loss: 1.5320 | LR: 1.00e-04
|
| 293 |
+
[2026-04-25 20:22:27] Epoch 1 | Step 2040 | Loss: 1.5323 | LR: 1.00e-04
|
| 294 |
+
[2026-04-25 20:22:30] Epoch 1 | Step 2050 | Loss: 1.5330 | LR: 1.00e-04
|
| 295 |
+
[2026-04-25 20:22:33] Epoch 1 | Step 2060 | Loss: 1.5336 | LR: 1.00e-04
|
| 296 |
+
[2026-04-25 20:22:35] Epoch 1 | Step 2070 | Loss: 1.5330 | LR: 1.00e-04
|
| 297 |
+
[2026-04-25 20:22:37] Epoch 1 | Step 2080 | Loss: 1.5334 | LR: 1.00e-04
|
| 298 |
+
[2026-04-25 20:22:40] Epoch 1 | Step 2090 | Loss: 1.5343 | LR: 1.00e-04
|
| 299 |
+
[2026-04-25 20:22:42] Epoch 1 | Step 2100 | Loss: 1.5348 | LR: 1.00e-04
|
| 300 |
+
[2026-04-25 20:22:45] Epoch 1 | Step 2110 | Loss: 1.5352 | LR: 1.00e-04
|
| 301 |
+
[2026-04-25 20:22:48] Epoch 1 | Step 2120 | Loss: 1.5351 | LR: 1.00e-04
|
| 302 |
+
[2026-04-25 20:22:50] Epoch 1 | Step 2130 | Loss: 1.5360 | LR: 1.00e-04
|
| 303 |
+
[2026-04-25 20:22:53] Epoch 1 | Step 2140 | Loss: 1.5362 | LR: 1.00e-04
|
| 304 |
+
[2026-04-25 20:22:55] Epoch 1 | Step 2150 | Loss: 1.5364 | LR: 1.00e-04
|
| 305 |
+
[2026-04-25 20:22:58] Epoch 1 | Step 2160 | Loss: 1.5376 | LR: 1.00e-04
|
| 306 |
+
[2026-04-25 20:23:00] Epoch 1 | Step 2170 | Loss: 1.5378 | LR: 1.00e-04
|
| 307 |
+
[2026-04-25 20:23:02] Epoch 1 | Step 2180 | Loss: 1.5377 | LR: 1.00e-04
|
| 308 |
+
[2026-04-25 20:23:05] Epoch 1 | Step 2190 | Loss: 1.5383 | LR: 1.00e-04
|
| 309 |
+
[2026-04-25 20:23:07] Epoch 1 | Step 2200 | Loss: 1.5383 | LR: 1.00e-04
|
| 310 |
+
[2026-04-25 20:23:10] Epoch 1 | Step 2210 | Loss: 1.5385 | LR: 1.00e-04
|
| 311 |
+
[2026-04-25 20:23:12] Epoch 1 | Step 2220 | Loss: 1.5400 | LR: 1.00e-04
|
| 312 |
+
[2026-04-25 20:23:15] Epoch 1 | Step 2230 | Loss: 1.5411 | LR: 1.00e-04
|
| 313 |
+
[2026-04-25 20:23:17] Epoch 1 | Step 2240 | Loss: 1.5420 | LR: 1.00e-04
|
| 314 |
+
[2026-04-25 20:23:20] Epoch 1 | Step 2250 | Loss: 1.5429 | LR: 1.00e-04
|
| 315 |
+
[2026-04-25 20:23:22] Epoch 1 | Step 2260 | Loss: 1.5429 | LR: 1.00e-04
|
| 316 |
+
[2026-04-25 20:23:25] Epoch 1 | Step 2270 | Loss: 1.5435 | LR: 1.00e-04
|
| 317 |
+
[2026-04-25 20:23:27] Epoch 1 | Step 2280 | Loss: 1.5441 | LR: 1.00e-04
|
| 318 |
+
[2026-04-25 20:23:30] Epoch 1 | Step 2290 | Loss: 1.5455 | LR: 1.00e-04
|
| 319 |
+
[2026-04-25 20:23:32] Epoch 1 | Step 2300 | Loss: 1.5461 | LR: 1.00e-04
|
| 320 |
+
[2026-04-25 20:23:35] Epoch 1 | Step 2310 | Loss: 1.5464 | LR: 1.00e-04
|
| 321 |
+
[2026-04-25 20:23:37] Epoch 1 | Step 2320 | Loss: 1.5469 | LR: 1.00e-04
|
| 322 |
+
[2026-04-25 20:23:40] Epoch 1 | Step 2330 | Loss: 1.5473 | LR: 1.00e-04
|
| 323 |
+
[2026-04-25 20:23:42] Epoch 1 | Step 2340 | Loss: 1.5475 | LR: 1.00e-04
|
| 324 |
+
[2026-04-25 20:23:45] Epoch 1 | Step 2350 | Loss: 1.5475 | LR: 1.00e-04
|
| 325 |
+
[2026-04-25 20:23:47] Epoch 1 | Step 2360 | Loss: 1.5482 | LR: 1.00e-04
|
| 326 |
+
[2026-04-25 20:23:50] Epoch 1 | Step 2370 | Loss: 1.5483 | LR: 1.00e-04
|
| 327 |
+
[2026-04-25 20:23:52] Epoch 1 | Step 2380 | Loss: 1.5485 | LR: 1.00e-04
|
| 328 |
+
[2026-04-25 20:23:55] Epoch 1 | Step 2390 | Loss: 1.5492 | LR: 1.00e-04
|
| 329 |
+
[2026-04-25 20:23:57] Epoch 1 | Step 2400 | Loss: 1.5490 | LR: 1.00e-04
|
| 330 |
+
[2026-04-25 20:24:00] Epoch 1 | Step 2410 | Loss: 1.5502 | LR: 1.00e-04
|
| 331 |
+
[2026-04-25 20:24:02] Epoch 1 | Step 2420 | Loss: 1.5507 | LR: 1.00e-04
|
| 332 |
+
[2026-04-25 20:24:05] Epoch 1 | Step 2430 | Loss: 1.5513 | LR: 1.00e-04
|
| 333 |
+
[2026-04-25 20:24:07] Epoch 1 | Step 2440 | Loss: 1.5511 | LR: 1.00e-04
|
| 334 |
+
[2026-04-25 20:24:10] Epoch 1 | Step 2450 | Loss: 1.5511 | LR: 1.00e-04
|
| 335 |
+
[2026-04-25 20:24:12] Epoch 1 | Step 2460 | Loss: 1.5516 | LR: 1.00e-04
|
| 336 |
+
[2026-04-25 20:24:15] Epoch 1 | Step 2470 | Loss: 1.5521 | LR: 1.00e-04
|
| 337 |
+
[2026-04-25 20:24:18] Epoch 1 | Step 2480 | Loss: 1.5526 | LR: 1.00e-04
|
| 338 |
+
[2026-04-25 20:24:20] Epoch 1 | Step 2490 | Loss: 1.5522 | LR: 1.00e-04
|
| 339 |
+
[2026-04-25 20:24:23] Epoch 1 | Step 2500 | Loss: 1.5523 | LR: 1.00e-04
|
| 340 |
+
[2026-04-25 20:24:25] Epoch 1 | Step 2510 | Loss: 1.5528 | LR: 1.00e-04
|
| 341 |
+
[2026-04-25 20:24:28] Epoch 1 | Step 2520 | Loss: 1.5526 | LR: 1.00e-04
|
| 342 |
+
[2026-04-25 20:24:30] Epoch 1 | Step 2530 | Loss: 1.5525 | LR: 1.00e-04
|
| 343 |
+
[2026-04-25 20:24:33] Epoch 1 | Step 2540 | Loss: 1.5528 | LR: 1.00e-04
|
| 344 |
+
[2026-04-25 20:24:36] Epoch 1 | Step 2550 | Loss: 1.5524 | LR: 1.00e-04
|
| 345 |
+
[2026-04-25 20:24:38] Epoch 1 | Step 2560 | Loss: 1.5531 | LR: 1.00e-04
|
| 346 |
+
[2026-04-25 20:24:41] Epoch 1 | Step 2570 | Loss: 1.5537 | LR: 1.00e-04
|
| 347 |
+
[2026-04-25 20:24:44] Epoch 1 | Step 2580 | Loss: 1.5546 | LR: 1.00e-04
|
| 348 |
+
[2026-04-25 20:24:46] Epoch 1 | Step 2590 | Loss: 1.5551 | LR: 1.00e-04
|
| 349 |
+
[2026-04-25 20:24:49] Epoch 1 | Step 2600 | Loss: 1.5555 | LR: 1.00e-04
|
| 350 |
+
[2026-04-25 20:24:51] Epoch 1 | Step 2610 | Loss: 1.5558 | LR: 1.00e-04
|
| 351 |
+
[2026-04-25 20:24:53] Epoch 1 | Step 2620 | Loss: 1.5558 | LR: 1.00e-04
|
| 352 |
+
[2026-04-25 20:24:56] Epoch 1 | Step 2630 | Loss: 1.5557 | LR: 1.00e-04
|
| 353 |
+
[2026-04-25 20:24:58] Epoch 1 | Step 2640 | Loss: 1.5563 | LR: 1.00e-04
|
| 354 |
+
[2026-04-25 20:25:01] Epoch 1 | Step 2650 | Loss: 1.5562 | LR: 1.00e-04
|
| 355 |
+
[2026-04-25 20:25:04] Epoch 1 | Step 2660 | Loss: 1.5565 | LR: 1.00e-04
|
| 356 |
+
[2026-04-25 20:25:06] Epoch 1 | Step 2670 | Loss: 1.5565 | LR: 1.00e-04
|
| 357 |
+
[2026-04-25 20:25:09] Epoch 1 | Step 2680 | Loss: 1.5565 | LR: 1.00e-04
|
| 358 |
+
[2026-04-25 20:25:11] Epoch 1 | Step 2690 | Loss: 1.5568 | LR: 1.00e-04
|
| 359 |
+
[2026-04-25 20:25:14] Epoch 1 | Step 2700 | Loss: 1.5568 | LR: 1.00e-04
|
| 360 |
+
[2026-04-25 20:25:16] Epoch 1 | Step 2710 | Loss: 1.5566 | LR: 1.00e-04
|
| 361 |
+
[2026-04-25 20:25:19] Epoch 1 | Step 2720 | Loss: 1.5572 | LR: 1.00e-04
|
| 362 |
+
[2026-04-25 20:25:21] Epoch 1 | Step 2730 | Loss: 1.5574 | LR: 1.00e-04
|
| 363 |
+
[2026-04-25 20:25:24] Epoch 1 | Step 2740 | Loss: 1.5581 | LR: 1.00e-04
|
| 364 |
+
[2026-04-25 20:25:26] Epoch 1 | Step 2750 | Loss: 1.5587 | LR: 1.00e-04
|
| 365 |
+
[2026-04-25 20:25:29] Epoch 1 | Step 2760 | Loss: 1.5583 | LR: 1.00e-04
|
| 366 |
+
[2026-04-25 20:25:31] Epoch 1 | Step 2770 | Loss: 1.5584 | LR: 1.00e-04
|
| 367 |
+
[2026-04-25 20:25:34] Epoch 1 | Step 2780 | Loss: 1.5593 | LR: 1.00e-04
|
| 368 |
+
[2026-04-25 20:25:36] Epoch 1 | Step 2790 | Loss: 1.5594 | LR: 1.00e-04
|
| 369 |
+
[2026-04-25 20:25:39] Epoch 1 | Step 2800 | Loss: 1.5594 | LR: 1.00e-04
|
| 370 |
+
[2026-04-25 20:25:41] Epoch 1 | Step 2810 | Loss: 1.5602 | LR: 1.00e-04
|
| 371 |
+
[2026-04-25 20:25:44] Epoch 1 | Step 2820 | Loss: 1.5605 | LR: 1.00e-04
|
| 372 |
+
[2026-04-25 20:25:46] Epoch 1 | Step 2830 | Loss: 1.5604 | LR: 1.00e-04
|
| 373 |
+
[2026-04-25 20:25:49] Epoch 1 | Step 2840 | Loss: 1.5615 | LR: 1.00e-04
|
| 374 |
+
[2026-04-25 20:25:51] Epoch 1 | Step 2850 | Loss: 1.5618 | LR: 1.00e-04
|
| 375 |
+
[2026-04-25 20:25:54] Epoch 1 | Step 2860 | Loss: 1.5620 | LR: 1.00e-04
|
| 376 |
+
[2026-04-25 20:25:56] Epoch 1 | Step 2870 | Loss: 1.5624 | LR: 1.00e-04
|
| 377 |
+
[2026-04-25 20:25:59] Epoch 1 | Step 2880 | Loss: 1.5624 | LR: 1.00e-04
|
| 378 |
+
[2026-04-25 20:26:02] Epoch 1 | Step 2890 | Loss: 1.5625 | LR: 1.00e-04
|
| 379 |
+
[2026-04-25 20:26:04] Epoch 1 | Step 2900 | Loss: 1.5620 | LR: 1.00e-04
|
| 380 |
+
[2026-04-25 20:26:07] Epoch 1 | Step 2910 | Loss: 1.5624 | LR: 1.00e-04
|
| 381 |
+
[2026-04-25 20:26:10] Epoch 1 | Step 2920 | Loss: 1.5628 | LR: 1.00e-04
|
| 382 |
+
[2026-04-25 20:26:12] Epoch 1 | Step 2930 | Loss: 1.5627 | LR: 1.00e-04
|
| 383 |
+
[2026-04-25 20:26:15] Epoch 1 | Step 2940 | Loss: 1.5626 | LR: 1.00e-04
|
| 384 |
+
[2026-04-25 20:26:17] Epoch 1 | Step 2950 | Loss: 1.5634 | LR: 1.00e-04
|
| 385 |
+
[2026-04-25 20:26:20] Epoch 1 | Step 2960 | Loss: 1.5637 | LR: 1.00e-04
|
| 386 |
+
[2026-04-25 20:26:23] Epoch 1 | Step 2970 | Loss: 1.5641 | LR: 1.00e-04
|
| 387 |
+
[2026-04-25 20:26:25] Epoch 1 | Step 2980 | Loss: 1.5642 | LR: 1.00e-04
|
| 388 |
+
[2026-04-25 20:26:28] Epoch 1 | Step 2990 | Loss: 1.5646 | LR: 1.00e-04
|
| 389 |
+
[2026-04-25 20:26:31] Epoch 1 | Step 3000 | Loss: 1.5647 | LR: 1.00e-04
|
| 390 |
+
[2026-04-25 20:26:33] Epoch 1 | Step 3010 | Loss: 1.5650 | LR: 1.00e-04
|
| 391 |
+
[2026-04-25 20:26:36] Epoch 1 | Step 3020 | Loss: 1.5650 | LR: 1.00e-04
|
| 392 |
+
[2026-04-25 20:26:38] Epoch 1 | Step 3030 | Loss: 1.5649 | LR: 1.00e-04
|
| 393 |
+
[2026-04-25 20:26:41] Epoch 1 | Step 3040 | Loss: 1.5644 | LR: 1.00e-04
|
| 394 |
+
[2026-04-25 20:26:43] Epoch 1 | Step 3050 | Loss: 1.5641 | LR: 1.00e-04
|
| 395 |
+
[2026-04-25 20:26:46] Epoch 1 | Step 3060 | Loss: 1.5644 | LR: 1.00e-04
|
| 396 |
+
[2026-04-25 20:26:48] Epoch 1 | Step 3070 | Loss: 1.5644 | LR: 1.00e-04
|
| 397 |
+
[2026-04-25 20:26:51] Epoch 1 | Step 3080 | Loss: 1.5647 | LR: 1.00e-04
|
| 398 |
+
[2026-04-25 20:26:54] Epoch 1 | Step 3090 | Loss: 1.5645 | LR: 1.00e-04
|
| 399 |
+
[2026-04-25 20:26:56] Epoch 1 | Step 3100 | Loss: 1.5646 | LR: 1.00e-04
|
| 400 |
+
[2026-04-25 20:26:58] Epoch 1 | Step 3110 | Loss: 1.5645 | LR: 1.00e-04
|
| 401 |
+
[2026-04-25 20:27:01] Epoch 1 | Step 3120 | Loss: 1.5655 | LR: 1.00e-04
|
| 402 |
+
[2026-04-25 20:27:04] Epoch 1 | Step 3130 | Loss: 1.5655 | LR: 1.00e-04
|
| 403 |
+
[2026-04-25 20:27:06] Epoch 1 | Step 3140 | Loss: 1.5659 | LR: 1.00e-04
|
| 404 |
+
[2026-04-25 20:27:09] Epoch 1 | Step 3150 | Loss: 1.5664 | LR: 1.00e-04
|
| 405 |
+
[2026-04-25 20:27:11] Epoch 1 | Step 3160 | Loss: 1.5666 | LR: 1.00e-04
|
| 406 |
+
[2026-04-25 20:27:14] Epoch 1 | Step 3170 | Loss: 1.5669 | LR: 1.00e-04
|
| 407 |
+
[2026-04-25 20:27:16] Epoch 1 | Step 3180 | Loss: 1.5673 | LR: 1.00e-04
|
| 408 |
+
[2026-04-25 20:27:19] Epoch 1 | Step 3190 | Loss: 1.5670 | LR: 1.00e-04
|
| 409 |
+
[2026-04-25 20:27:21] Epoch 1 | Step 3200 | Loss: 1.5671 | LR: 1.00e-04
|
| 410 |
+
[2026-04-25 20:27:24] Epoch 1 | Step 3210 | Loss: 1.5670 | LR: 1.00e-04
|
| 411 |
+
[2026-04-25 20:27:26] Epoch 1 | Step 3220 | Loss: 1.5668 | LR: 1.00e-04
|
| 412 |
+
[2026-04-25 20:27:29] Epoch 1 | Step 3230 | Loss: 1.5675 | LR: 1.00e-04
|
| 413 |
+
[2026-04-25 20:27:31] Epoch 1 | Step 3240 | Loss: 1.5673 | LR: 1.00e-04
|
| 414 |
+
[2026-04-25 20:27:33] Epoch 1 | Step 3250 | Loss: 1.5676 | LR: 1.00e-04
|
| 415 |
+
[2026-04-25 20:27:36] Epoch 1 | Step 3260 | Loss: 1.5680 | LR: 1.00e-04
|
| 416 |
+
[2026-04-25 20:27:38] Epoch 1 | Step 3270 | Loss: 1.5682 | LR: 1.00e-04
|
| 417 |
+
[2026-04-25 20:27:41] Epoch 1 | Step 3280 | Loss: 1.5678 | LR: 1.00e-04
|
| 418 |
+
[2026-04-25 20:27:44] Epoch 1 | Step 3290 | Loss: 1.5681 | LR: 1.00e-04
|
| 419 |
+
[2026-04-25 20:27:46] Epoch 1 | Step 3300 | Loss: 1.5686 | LR: 1.00e-04
|
| 420 |
+
[2026-04-25 20:27:49] Epoch 1 | Step 3310 | Loss: 1.5687 | LR: 1.00e-04
|
| 421 |
+
[2026-04-25 20:27:51] Epoch 1 | Step 3320 | Loss: 1.5691 | LR: 1.00e-04
|
| 422 |
+
[2026-04-25 20:27:54] Epoch 1 | Step 3330 | Loss: 1.5691 | LR: 1.00e-04
|
| 423 |
+
[2026-04-25 20:27:57] Epoch 1 | Step 3340 | Loss: 1.5695 | LR: 1.00e-04
|
| 424 |
+
[2026-04-25 20:27:59] Epoch 1 | Step 3350 | Loss: 1.5691 | LR: 1.00e-04
|
| 425 |
+
[2026-04-25 20:28:02] Epoch 1 | Step 3360 | Loss: 1.5694 | LR: 1.00e-04
|
| 426 |
+
[2026-04-25 20:28:04] Epoch 1 | Step 3370 | Loss: 1.5699 | LR: 1.00e-04
|
| 427 |
+
[2026-04-25 20:28:07] Epoch 1 | Step 3380 | Loss: 1.5699 | LR: 1.00e-04
|
| 428 |
+
[2026-04-25 20:28:10] Epoch 1 | Step 3390 | Loss: 1.5703 | LR: 1.00e-04
|
| 429 |
+
[2026-04-25 20:28:12] Epoch 1 | Step 3400 | Loss: 1.5709 | LR: 1.00e-04
|
| 430 |
+
[2026-04-25 20:28:15] Epoch 1 | Step 3410 | Loss: 1.5709 | LR: 1.00e-04
|
| 431 |
+
[2026-04-25 20:28:18] Epoch 1 | Step 3420 | Loss: 1.5706 | LR: 1.00e-04
|
| 432 |
+
[2026-04-25 20:28:20] Epoch 1 | Step 3430 | Loss: 1.5710 | LR: 1.00e-04
|
| 433 |
+
[2026-04-25 20:28:23] Epoch 1 | Step 3440 | Loss: 1.5715 | LR: 1.00e-04
|
| 434 |
+
[2026-04-25 20:28:25] Epoch 1 | Step 3450 | Loss: 1.5715 | LR: 1.00e-04
|
| 435 |
+
[2026-04-25 20:28:28] Epoch 1 | Step 3460 | Loss: 1.5717 | LR: 1.00e-04
|
| 436 |
+
[2026-04-25 20:28:31] Epoch 1 | Step 3470 | Loss: 1.5719 | LR: 1.00e-04
|
| 437 |
+
[2026-04-25 20:28:33] Epoch 1 | Step 3480 | Loss: 1.5719 | LR: 1.00e-04
|
| 438 |
+
[2026-04-25 20:28:35] Epoch 1 | Step 3490 | Loss: 1.5718 | LR: 1.00e-04
|
| 439 |
+
[2026-04-25 20:28:38] Epoch 1 | Step 3500 | Loss: 1.5716 | LR: 1.00e-04
|
| 440 |
+
[2026-04-25 20:28:41] Epoch 1 | Step 3510 | Loss: 1.5723 | LR: 1.00e-04
|
| 441 |
+
[2026-04-25 20:28:43] Epoch 1 | Step 3520 | Loss: 1.5721 | LR: 1.00e-04
|
| 442 |
+
[2026-04-25 20:28:46] Epoch 1 | Step 3530 | Loss: 1.5726 | LR: 1.00e-04
|
| 443 |
+
[2026-04-25 20:28:48] Epoch 1 | Step 3540 | Loss: 1.5724 | LR: 1.00e-04
|
| 444 |
+
[2026-04-25 20:28:51] Epoch 1 | Step 3550 | Loss: 1.5723 | LR: 1.00e-04
|
| 445 |
+
[2026-04-25 20:28:54] Epoch 1 | Step 3560 | Loss: 1.5724 | LR: 1.00e-04
|
| 446 |
+
[2026-04-25 20:28:56] Epoch 1 | Step 3570 | Loss: 1.5723 | LR: 1.00e-04
|
| 447 |
+
[2026-04-25 20:28:59] Epoch 1 | Step 3580 | Loss: 1.5725 | LR: 1.00e-04
|
| 448 |
+
[2026-04-25 20:29:01] Epoch 1 | Step 3590 | Loss: 1.5727 | LR: 1.00e-04
|
| 449 |
+
[2026-04-25 20:29:04] Epoch 1 | Step 3600 | Loss: 1.5727 | LR: 1.00e-04
|
| 450 |
+
[2026-04-25 20:29:06] Epoch 1 | Step 3610 | Loss: 1.5727 | LR: 1.00e-04
|
| 451 |
+
[2026-04-25 20:29:09] Epoch 1 | Step 3620 | Loss: 1.5729 | LR: 1.00e-04
|
| 452 |
+
[2026-04-25 20:29:11] Epoch 1 | Step 3630 | Loss: 1.5735 | LR: 1.00e-04
|
| 453 |
+
[2026-04-25 20:29:14] Epoch 1 | Step 3640 | Loss: 1.5739 | LR: 1.00e-04
|
| 454 |
+
[2026-04-25 20:29:16] Epoch 1 | Step 3650 | Loss: 1.5743 | LR: 1.00e-04
|
| 455 |
+
[2026-04-25 20:29:19] Epoch 1 | Step 3660 | Loss: 1.5740 | LR: 1.00e-04
|
| 456 |
+
[2026-04-25 20:29:21] Epoch 1 | Step 3670 | Loss: 1.5740 | LR: 1.00e-04
|
| 457 |
+
[2026-04-25 20:29:24] Epoch 1 | Step 3680 | Loss: 1.5743 | LR: 1.00e-04
|
| 458 |
+
[2026-04-25 20:29:26] Epoch 1 | Step 3690 | Loss: 1.5743 | LR: 1.00e-04
|
| 459 |
+
[2026-04-25 20:29:29] Epoch 1 | Step 3700 | Loss: 1.5742 | LR: 1.00e-04
|
| 460 |
+
[2026-04-25 20:29:31] Epoch 1 | Step 3710 | Loss: 1.5744 | LR: 1.00e-04
|
| 461 |
+
[2026-04-25 20:29:34] Epoch 1 | Step 3720 | Loss: 1.5747 | LR: 1.00e-04
|
| 462 |
+
[2026-04-25 20:29:36] Epoch 1 | Step 3730 | Loss: 1.5748 | LR: 1.00e-04
|
| 463 |
+
[2026-04-25 20:29:39] Epoch 1 | Step 3740 | Loss: 1.5751 | LR: 1.00e-04
|
| 464 |
+
[2026-04-25 20:29:41] Epoch 1 | Step 3750 | Loss: 1.5749 | LR: 1.00e-04
|
| 465 |
+
[2026-04-25 20:29:44] Epoch 1 | Step 3760 | Loss: 1.5752 | LR: 1.00e-04
|
| 466 |
+
[2026-04-25 20:29:47] Epoch 1 | Step 3770 | Loss: 1.5755 | LR: 1.00e-04
|
| 467 |
+
[2026-04-25 20:29:49] Epoch 1 | Step 3780 | Loss: 1.5758 | LR: 1.00e-04
|
| 468 |
+
[2026-04-25 20:29:52] Epoch 1 | Step 3790 | Loss: 1.5760 | LR: 1.00e-04
|
| 469 |
+
[2026-04-25 20:29:54] Epoch 1 | Step 3800 | Loss: 1.5764 | LR: 1.00e-04
|
| 470 |
+
[2026-04-25 20:29:57] Epoch 1 | Step 3810 | Loss: 1.5759 | LR: 1.00e-04
|
| 471 |
+
[2026-04-25 20:29:59] Epoch 1 | Step 3820 | Loss: 1.5757 | LR: 1.00e-04
|
| 472 |
+
[2026-04-25 20:30:02] Epoch 1 | Step 3830 | Loss: 1.5759 | LR: 1.00e-04
|
| 473 |
+
[2026-04-25 20:30:04] Epoch 1 | Step 3840 | Loss: 1.5763 | LR: 1.00e-04
|
| 474 |
+
[2026-04-25 20:30:07] Epoch 1 | Step 3850 | Loss: 1.5760 | LR: 1.00e-04
|
| 475 |
+
[2026-04-25 20:30:10] Epoch 1 | Step 3860 | Loss: 1.5761 | LR: 1.00e-04
|
| 476 |
+
[2026-04-25 20:30:12] Epoch 1 | Step 3870 | Loss: 1.5763 | LR: 1.00e-04
|
| 477 |
+
[2026-04-25 20:30:15] Epoch 1 | Step 3880 | Loss: 1.5761 | LR: 1.00e-04
|
| 478 |
+
[2026-04-25 20:30:18] Epoch 1 | Step 3890 | Loss: 1.5762 | LR: 1.00e-04
|
| 479 |
+
[2026-04-25 20:30:20] Epoch 1 | Step 3900 | Loss: 1.5763 | LR: 1.00e-04
|
| 480 |
+
[2026-04-25 20:30:23] Epoch 1 | Step 3910 | Loss: 1.5766 | LR: 1.00e-04
|
| 481 |
+
[2026-04-25 20:30:25] Epoch 1 | Step 3920 | Loss: 1.5770 | LR: 1.00e-04
|
| 482 |
+
[2026-04-25 20:30:28] Epoch 1 | Step 3930 | Loss: 1.5771 | LR: 1.00e-04
|
| 483 |
+
[2026-04-25 20:30:30] Epoch 1 | Step 3940 | Loss: 1.5772 | LR: 1.00e-04
|
| 484 |
+
[2026-04-25 20:30:33] Epoch 1 | Step 3950 | Loss: 1.5771 | LR: 1.00e-04
|
| 485 |
+
[2026-04-25 20:30:36] Epoch 1 | Step 3960 | Loss: 1.5774 | LR: 1.00e-04
|
| 486 |
+
[2026-04-25 20:30:38] Epoch 1 | Step 3970 | Loss: 1.5774 | LR: 9.99e-05
|
| 487 |
+
[2026-04-25 20:30:41] Epoch 1 | Step 3980 | Loss: 1.5776 | LR: 9.99e-05
|
| 488 |
+
[2026-04-25 20:30:43] Epoch 1 | Step 3990 | Loss: 1.5776 | LR: 9.97e-05
|
| 489 |
+
[2026-04-25 20:30:46] Epoch 1 | Step 4000 | Loss: 1.5778 | LR: 9.95e-05
|
| 490 |
+
[2026-04-25 20:30:46] Validation | Batch 10/84 | Loss: 1.5538
|
| 491 |
+
[2026-04-25 20:30:47] Validation | Batch 20/84 | Loss: 1.5837
|
| 492 |
+
[2026-04-25 20:30:47] Validation | Batch 30/84 | Loss: 1.6910
|
| 493 |
+
[2026-04-25 20:30:48] Validation | Batch 40/84 | Loss: 1.6895
|
| 494 |
+
[2026-04-25 20:30:48] Validation | Batch 50/84 | Loss: 1.6701
|
| 495 |
+
[2026-04-25 20:30:49] Validation | Batch 60/84 | Loss: 1.6431
|
| 496 |
+
[2026-04-25 20:30:49] Validation | Batch 70/84 | Loss: 1.6177
|
| 497 |
+
[2026-04-25 20:30:49] Validation | Batch 80/84 | Loss: 1.6208
|
| 498 |
+
[2026-04-25 20:30:50] Validation | Batch 84/84 | Loss: 1.6048
|
| 499 |
+
[2026-04-25 20:30:50] Validation | Loss: 1.6048 | PPL: 5.10 | Time: 3.82s
|
| 500 |
+
[2026-04-25 20:30:53] New best model saved! Val loss: 1.6048
|
| 501 |
+
[2026-04-25 20:30:55] Epoch 1 | Step 4010 | Loss: 1.5777 | LR: 9.93e-05
|
| 502 |
+
[2026-04-25 20:30:57] Epoch 1 | Step 4020 | Loss: 1.5780 | LR: 9.90e-05
|
| 503 |
+
[2026-04-25 20:31:00] Epoch 1 | Step 4030 | Loss: 1.5778 | LR: 9.87e-05
|
| 504 |
+
[2026-04-25 20:31:02] Epoch 1 | Step 4040 | Loss: 1.5774 | LR: 9.84e-05
|
| 505 |
+
[2026-04-25 20:31:05] Epoch 1 | Step 4050 | Loss: 1.5774 | LR: 9.80e-05
|
| 506 |
+
[2026-04-25 20:31:07] Epoch 1 | Step 4060 | Loss: 1.5768 | LR: 9.75e-05
|
| 507 |
+
[2026-04-25 20:31:10] Epoch 1 | Step 4070 | Loss: 1.5771 | LR: 9.70e-05
|
| 508 |
+
[2026-04-25 20:31:12] Epoch 1 | Step 4080 | Loss: 1.5774 | LR: 9.65e-05
|
| 509 |
+
[2026-04-25 20:31:15] Epoch 1 | Step 4090 | Loss: 1.5776 | LR: 9.59e-05
|
| 510 |
+
[2026-04-25 20:31:17] Epoch 1 | Step 4100 | Loss: 1.5777 | LR: 9.53e-05
|
| 511 |
+
[2026-04-25 20:31:20] Epoch 1 | Step 4110 | Loss: 1.5778 | LR: 9.46e-05
|
| 512 |
+
[2026-04-25 20:31:22] Epoch 1 | Step 4120 | Loss: 1.5781 | LR: 9.40e-05
|
| 513 |
+
[2026-04-25 20:31:25] Epoch 1 | Step 4130 | Loss: 1.5780 | LR: 9.32e-05
|
| 514 |
+
[2026-04-25 20:31:28] Epoch 1 | Step 4140 | Loss: 1.5782 | LR: 9.24e-05
|
| 515 |
+
[2026-04-25 20:31:31] Epoch 1 | Step 4150 | Loss: 1.5789 | LR: 9.16e-05
|
| 516 |
+
[2026-04-25 20:31:33] Epoch 1 | Step 4160 | Loss: 1.5795 | LR: 9.08e-05
|
| 517 |
+
[2026-04-25 20:31:36] Epoch 1 | Step 4170 | Loss: 1.5796 | LR: 8.99e-05
|
| 518 |
+
[2026-04-25 20:31:38] Epoch 1 | Step 4180 | Loss: 1.5796 | LR: 8.90e-05
|
| 519 |
+
[2026-04-25 20:31:41] Epoch 1 | Step 4190 | Loss: 1.5796 | LR: 8.80e-05
|
| 520 |
+
[2026-04-25 20:31:44] Epoch 1 | Step 4200 | Loss: 1.5803 | LR: 8.70e-05
|
| 521 |
+
[2026-04-25 20:31:46] Epoch 1 | Step 4210 | Loss: 1.5801 | LR: 8.60e-05
|
| 522 |
+
[2026-04-25 20:31:49] Epoch 1 | Step 4220 | Loss: 1.5806 | LR: 8.50e-05
|
| 523 |
+
[2026-04-25 20:31:52] Epoch 1 | Step 4230 | Loss: 1.5808 | LR: 8.39e-05
|
| 524 |
+
[2026-04-25 20:31:54] Epoch 1 | Step 4240 | Loss: 1.5812 | LR: 8.28e-05
|
| 525 |
+
[2026-04-25 20:31:57] Epoch 1 | Step 4250 | Loss: 1.5813 | LR: 8.16e-05
|
| 526 |
+
[2026-04-25 20:32:00] Epoch 1 | Step 4260 | Loss: 1.5809 | LR: 8.05e-05
|
| 527 |
+
[2026-04-25 20:32:02] Epoch 1 | Step 4270 | Loss: 1.5812 | LR: 7.93e-05
|
| 528 |
+
[2026-04-25 20:32:04] Epoch 1 | Step 4280 | Loss: 1.5811 | LR: 7.81e-05
|
| 529 |
+
[2026-04-25 20:32:07] Epoch 1 | Step 4290 | Loss: 1.5810 | LR: 7.68e-05
|
| 530 |
+
[2026-04-25 20:32:10] Epoch 1 | Step 4300 | Loss: 1.5810 | LR: 7.56e-05
|
| 531 |
+
[2026-04-25 20:32:12] Epoch 1 | Step 4310 | Loss: 1.5812 | LR: 7.43e-05
|
| 532 |
+
[2026-04-25 20:32:15] Epoch 1 | Step 4320 | Loss: 1.5812 | LR: 7.30e-05
|
| 533 |
+
[2026-04-25 20:32:17] Epoch 1 | Step 4330 | Loss: 1.5812 | LR: 7.16e-05
|
| 534 |
+
[2026-04-25 20:32:20] Epoch 1 | Step 4340 | Loss: 1.5808 | LR: 7.03e-05
|
| 535 |
+
[2026-04-25 20:32:22] Epoch 1 | Step 4350 | Loss: 1.5806 | LR: 6.90e-05
|
| 536 |
+
[2026-04-25 20:32:25] Epoch 1 | Step 4360 | Loss: 1.5804 | LR: 6.76e-05
|
| 537 |
+
[2026-04-25 20:32:27] Epoch 1 | Step 4370 | Loss: 1.5804 | LR: 6.62e-05
|
| 538 |
+
[2026-04-25 20:32:30] Epoch 1 | Step 4380 | Loss: 1.5802 | LR: 6.48e-05
|
| 539 |
+
[2026-04-25 20:32:32] Epoch 1 | Step 4390 | Loss: 1.5803 | LR: 6.34e-05
|
| 540 |
+
[2026-04-25 20:32:35] Epoch 1 | Step 4400 | Loss: 1.5800 | LR: 6.20e-05
|
| 541 |
+
[2026-04-25 20:32:37] Epoch 1 | Step 4410 | Loss: 1.5795 | LR: 6.06e-05
|
| 542 |
+
[2026-04-25 20:32:40] Epoch 1 | Step 4420 | Loss: 1.5798 | LR: 5.92e-05
|
| 543 |
+
[2026-04-25 20:32:42] Epoch 1 | Step 4430 | Loss: 1.5796 | LR: 5.78e-05
|
| 544 |
+
[2026-04-25 20:32:45] Epoch 1 | Step 4440 | Loss: 1.5799 | LR: 5.63e-05
|
| 545 |
+
[2026-04-25 20:32:47] Epoch 1 | Step 4450 | Loss: 1.5797 | LR: 5.49e-05
|
| 546 |
+
[2026-04-25 20:32:50] Epoch 1 | Step 4460 | Loss: 1.5801 | LR: 5.35e-05
|
| 547 |
+
[2026-04-25 20:32:53] Epoch 1 | Step 4470 | Loss: 1.5797 | LR: 5.20e-05
|
| 548 |
+
[2026-04-25 20:32:55] Epoch 1 | Step 4480 | Loss: 1.5792 | LR: 5.06e-05
|
| 549 |
+
[2026-04-25 20:32:58] Epoch 1 | Step 4490 | Loss: 1.5790 | LR: 4.92e-05
|
| 550 |
+
[2026-04-25 20:33:00] Epoch 1 | Step 4500 | Loss: 1.5789 | LR: 4.78e-05
|
| 551 |
+
[2026-04-25 20:33:03] Epoch 1 | Step 4510 | Loss: 1.5783 | LR: 4.64e-05
|
| 552 |
+
[2026-04-25 20:33:05] Epoch 1 | Step 4520 | Loss: 1.5778 | LR: 4.50e-05
|
| 553 |
+
[2026-04-25 20:33:08] Epoch 1 | Step 4530 | Loss: 1.5774 | LR: 4.36e-05
|
| 554 |
+
[2026-04-25 20:33:11] Epoch 1 | Step 4540 | Loss: 1.5771 | LR: 4.22e-05
|
| 555 |
+
[2026-04-25 20:33:14] Epoch 1 | Step 4550 | Loss: 1.5766 | LR: 4.08e-05
|
| 556 |
+
[2026-04-25 20:33:16] Epoch 1 | Step 4560 | Loss: 1.5764 | LR: 3.95e-05
|
| 557 |
+
[2026-04-25 20:33:19] Epoch 1 | Step 4570 | Loss: 1.5762 | LR: 3.82e-05
|
| 558 |
+
[2026-04-25 20:33:21] Epoch 1 | Step 4580 | Loss: 1.5759 | LR: 3.68e-05
|
| 559 |
+
[2026-04-25 20:33:24] Epoch 1 | Step 4590 | Loss: 1.5756 | LR: 3.55e-05
|
| 560 |
+
[2026-04-25 20:33:26] Epoch 1 | Step 4600 | Loss: 1.5753 | LR: 3.43e-05
|
| 561 |
+
[2026-04-25 20:33:29] Epoch 1 | Step 4610 | Loss: 1.5747 | LR: 3.30e-05
|
| 562 |
+
[2026-04-25 20:33:31] Epoch 1 | Step 4620 | Loss: 1.5747 | LR: 3.18e-05
|
| 563 |
+
[2026-04-25 20:33:34] Epoch 1 | Step 4630 | Loss: 1.5744 | LR: 3.05e-05
|
| 564 |
+
[2026-04-25 20:33:36] Epoch 1 | Step 4640 | Loss: 1.5742 | LR: 2.94e-05
|
| 565 |
+
[2026-04-25 20:33:39] Epoch 1 | Step 4650 | Loss: 1.5740 | LR: 2.82e-05
|
| 566 |
+
[2026-04-25 20:33:41] Epoch 1 | Step 4660 | Loss: 1.5736 | LR: 2.71e-05
|
| 567 |
+
[2026-04-25 20:33:44] Epoch 1 | Step 4670 | Loss: 1.5732 | LR: 2.60e-05
|
| 568 |
+
[2026-04-25 20:33:46] Epoch 1 | Step 4680 | Loss: 1.5731 | LR: 2.49e-05
|
| 569 |
+
[2026-04-25 20:33:49] Epoch 1 | Step 4690 | Loss: 1.5728 | LR: 2.38e-05
|
| 570 |
+
[2026-04-25 20:33:51] Epoch 1 | Step 4700 | Loss: 1.5726 | LR: 2.28e-05
|
| 571 |
+
[2026-04-25 20:33:54] Epoch 1 | Step 4710 | Loss: 1.5722 | LR: 2.18e-05
|
| 572 |
+
[2026-04-25 20:33:57] Epoch 1 | Step 4720 | Loss: 1.5719 | LR: 2.09e-05
|
| 573 |
+
[2026-04-25 20:33:59] Epoch 1 | Step 4730 | Loss: 1.5718 | LR: 2.00e-05
|
| 574 |
+
[2026-04-25 20:34:01] Epoch 1 | Step 4740 | Loss: 1.5713 | LR: 1.91e-05
|
| 575 |
+
[2026-04-25 20:34:04] Epoch 1 | Step 4750 | Loss: 1.5712 | LR: 1.82e-05
|
| 576 |
+
[2026-04-25 20:34:06] Epoch 1 | Step 4760 | Loss: 1.5708 | LR: 1.74e-05
|
| 577 |
+
[2026-04-25 20:34:09] Epoch 1 | Step 4770 | Loss: 1.5702 | LR: 1.67e-05
|
| 578 |
+
[2026-04-25 20:34:11] Epoch 1 | Step 4780 | Loss: 1.5702 | LR: 1.59e-05
|
| 579 |
+
[2026-04-25 20:34:14] Epoch 1 | Step 4790 | Loss: 1.5699 | LR: 1.52e-05
|
| 580 |
+
[2026-04-25 20:34:17] Epoch 1 | Step 4800 | Loss: 1.5694 | LR: 1.46e-05
|
| 581 |
+
[2026-04-25 20:34:19] Epoch 1 | Step 4810 | Loss: 1.5688 | LR: 1.40e-05
|
| 582 |
+
[2026-04-25 20:34:22] Epoch 1 | Step 4820 | Loss: 1.5683 | LR: 1.34e-05
|
| 583 |
+
[2026-04-25 20:34:24] Epoch 1 | Step 4830 | Loss: 1.5677 | LR: 1.29e-05
|
| 584 |
+
[2026-04-25 20:34:27] Epoch 1 | Step 4840 | Loss: 1.5674 | LR: 1.24e-05
|
| 585 |
+
[2026-04-25 20:34:30] Epoch 1 | Step 4850 | Loss: 1.5676 | LR: 1.20e-05
|
| 586 |
+
[2026-04-25 20:34:32] Epoch 1 | Step 4860 | Loss: 1.5675 | LR: 1.16e-05
|
| 587 |
+
[2026-04-25 20:34:35] Epoch 1 | Step 4870 | Loss: 1.5674 | LR: 1.12e-05
|
| 588 |
+
[2026-04-25 20:34:37] Epoch 1 | Step 4880 | Loss: 1.5671 | LR: 1.09e-05
|
| 589 |
+
[2026-04-25 20:34:40] Epoch 1 | Step 4890 | Loss: 1.5666 | LR: 1.06e-05
|
| 590 |
+
[2026-04-25 20:34:43] Epoch 1 | Step 4900 | Loss: 1.5664 | LR: 1.04e-05
|
| 591 |
+
[2026-04-25 20:34:45] Epoch 1 | Step 4910 | Loss: 1.5660 | LR: 1.03e-05
|
| 592 |
+
[2026-04-25 20:34:48] Epoch 1 | Step 4920 | Loss: 1.5658 | LR: 1.01e-05
|
| 593 |
+
[2026-04-25 20:34:50] Epoch 1 | Step 4930 | Loss: 1.5655 | LR: 1.00e-05
|
| 594 |
+
[2026-04-25 20:34:52] Epoch 1 | Step 4940 | Loss: 1.5652 | LR: 1.00e-05
|
| 595 |
+
[2026-04-25 20:34:55] Epoch 1 | Step 4950 | Loss: 1.5649 | LR: 1.00e-05
|
| 596 |
+
[2026-04-25 20:34:58] Epoch 1 | Step 4960 | Loss: 1.5648 | LR: 1.00e-05
|
| 597 |
+
[2026-04-25 20:35:00] Epoch 1 | Step 4970 | Loss: 1.5643 | LR: 1.00e-05
|
| 598 |
+
[2026-04-25 20:35:02] Epoch 1 | Step 4980 | Loss: 1.5642 | LR: 1.00e-05
|
| 599 |
+
[2026-04-25 20:35:05] Epoch 1 | Step 4990 | Loss: 1.5636 | LR: 1.00e-05
|
| 600 |
+
[2026-04-25 20:35:08] Epoch 1 | Step 5000 | Loss: 1.5636 | LR: 1.00e-05
|
| 601 |
+
[2026-04-25 20:35:10] Epoch 1 | Step 5010 | Loss: 1.5631 | LR: 1.00e-05
|
| 602 |
+
[2026-04-25 20:35:12] Epoch 1 | Step 5020 | Loss: 1.5626 | LR: 1.00e-05
|
| 603 |
+
[2026-04-25 20:35:15] Epoch 1 | Step 5030 | Loss: 1.5623 | LR: 1.00e-05
|
| 604 |
+
[2026-04-25 20:35:17] Epoch 1 | Step 5040 | Loss: 1.5618 | LR: 1.00e-05
|
| 605 |
+
[2026-04-25 20:35:20] Epoch 1 | Step 5050 | Loss: 1.5615 | LR: 1.00e-05
|
| 606 |
+
[2026-04-25 20:35:22] Epoch 1 | Step 5060 | Loss: 1.5612 | LR: 1.00e-05
|
| 607 |
+
[2026-04-25 20:35:25] Epoch 1 | Step 5070 | Loss: 1.5609 | LR: 1.00e-05
|
| 608 |
+
[2026-04-25 20:35:27] Epoch 1 | Step 5080 | Loss: 1.5606 | LR: 1.00e-05
|
| 609 |
+
[2026-04-25 20:35:30] Epoch 1 | Step 5090 | Loss: 1.5604 | LR: 1.00e-05
|
| 610 |
+
[2026-04-25 20:35:33] Epoch 1 | Step 5100 | Loss: 1.5599 | LR: 1.00e-05
|
| 611 |
+
[2026-04-25 20:35:35] Epoch 1 | Step 5110 | Loss: 1.5597 | LR: 1.00e-05
|
| 612 |
+
[2026-04-25 20:35:38] Epoch 1 | Step 5120 | Loss: 1.5597 | LR: 1.00e-05
|
| 613 |
+
[2026-04-25 20:35:41] Epoch 1 | Step 5130 | Loss: 1.5593 | LR: 1.00e-05
|
| 614 |
+
[2026-04-25 20:35:43] Epoch 1 | Step 5140 | Loss: 1.5590 | LR: 1.00e-05
|
| 615 |
+
[2026-04-25 20:35:46] Epoch 1 | Step 5150 | Loss: 1.5586 | LR: 1.00e-05
|
| 616 |
+
[2026-04-25 20:35:48] Epoch 1 | Step 5160 | Loss: 1.5579 | LR: 1.00e-05
|
| 617 |
+
[2026-04-25 20:35:51] Epoch 1 | Step 5170 | Loss: 1.5578 | LR: 1.00e-05
|
| 618 |
+
[2026-04-25 20:35:53] Epoch 1 | Step 5180 | Loss: 1.5575 | LR: 1.00e-05
|
| 619 |
+
[2026-04-25 20:35:56] Epoch 1 | Step 5190 | Loss: 1.5573 | LR: 1.00e-05
|
| 620 |
+
[2026-04-25 20:35:59] Epoch 1 | Step 5200 | Loss: 1.5570 | LR: 1.00e-05
|
| 621 |
+
[2026-04-25 20:36:01] Epoch 1 | Step 5210 | Loss: 1.5567 | LR: 1.00e-05
|
| 622 |
+
[2026-04-25 20:36:04] Epoch 1 | Step 5220 | Loss: 1.5564 | LR: 1.00e-05
|
| 623 |
+
[2026-04-25 20:36:07] Epoch 1 | Step 5230 | Loss: 1.5563 | LR: 1.00e-05
|
| 624 |
+
[2026-04-25 20:36:09] Epoch 1 | Step 5240 | Loss: 1.5560 | LR: 1.00e-05
|
| 625 |
+
[2026-04-25 20:36:12] Epoch 1 | Step 5250 | Loss: 1.5560 | LR: 1.00e-05
|
| 626 |
+
[2026-04-25 20:36:14] Epoch 1 | Step 5260 | Loss: 1.5556 | LR: 1.00e-05
|
| 627 |
+
[2026-04-25 20:36:17] Epoch 1 | Step 5270 | Loss: 1.5553 | LR: 1.00e-05
|
| 628 |
+
[2026-04-25 20:36:20] Epoch 1 | Step 5280 | Loss: 1.5548 | LR: 1.00e-05
|
| 629 |
+
[2026-04-25 20:36:22] Epoch 1 | Step 5290 | Loss: 1.5543 | LR: 1.00e-05
|
| 630 |
+
[2026-04-25 20:36:24] Epoch 1 | Step 5300 | Loss: 1.5541 | LR: 1.00e-05
|
| 631 |
+
[2026-04-25 20:36:27] Epoch 1 | Step 5310 | Loss: 1.5539 | LR: 1.00e-05
|
| 632 |
+
[2026-04-25 20:36:29] Epoch 1 | Step 5320 | Loss: 1.5534 | LR: 1.00e-05
|
| 633 |
+
[2026-04-25 20:36:32] Epoch 1 | Step 5330 | Loss: 1.5532 | LR: 1.00e-05
|
| 634 |
+
[2026-04-25 20:36:34] Epoch 1 | Step 5340 | Loss: 1.5528 | LR: 1.00e-05
|
| 635 |
+
[2026-04-25 20:36:37] Epoch 1 | Step 5350 | Loss: 1.5526 | LR: 1.00e-05
|
| 636 |
+
[2026-04-25 20:36:39] Epoch 1 | Step 5360 | Loss: 1.5526 | LR: 1.00e-05
|
| 637 |
+
[2026-04-25 20:36:42] Epoch 1 | Step 5370 | Loss: 1.5523 | LR: 1.00e-05
|
| 638 |
+
[2026-04-25 20:36:44] Epoch 1 | Step 5380 | Loss: 1.5520 | LR: 1.00e-05
|
| 639 |
+
[2026-04-25 20:36:47] Epoch 1 | Step 5390 | Loss: 1.5516 | LR: 1.00e-05
|
| 640 |
+
[2026-04-25 20:36:49] Epoch 1 | Step 5400 | Loss: 1.5511 | LR: 1.00e-05
|
| 641 |
+
[2026-04-25 20:36:52] Epoch 1 | Step 5410 | Loss: 1.5509 | LR: 1.00e-05
|
| 642 |
+
[2026-04-25 20:36:55] Epoch 1 | Step 5420 | Loss: 1.5505 | LR: 1.00e-05
|
| 643 |
+
[2026-04-25 20:36:57] Epoch 1 | Step 5430 | Loss: 1.5502 | LR: 1.00e-05
|
| 644 |
+
[2026-04-25 20:36:59] Epoch 1 | Step 5440 | Loss: 1.5501 | LR: 1.00e-05
|
| 645 |
+
[2026-04-25 20:37:02] Epoch 1 | Step 5450 | Loss: 1.5501 | LR: 1.00e-05
|
| 646 |
+
[2026-04-25 20:37:05] Epoch 1 | Step 5460 | Loss: 1.5498 | LR: 1.00e-05
|
| 647 |
+
[2026-04-25 20:37:07] Epoch 1 | Step 5470 | Loss: 1.5495 | LR: 1.00e-05
|
| 648 |
+
[2026-04-25 20:37:10] Epoch 1 | Step 5480 | Loss: 1.5492 | LR: 1.00e-05
|
| 649 |
+
[2026-04-25 20:37:12] Epoch 1 | Step 5490 | Loss: 1.5491 | LR: 1.00e-05
|
| 650 |
+
[2026-04-25 20:37:15] Epoch 1 | Step 5500 | Loss: 1.5488 | LR: 1.00e-05
|
| 651 |
+
[2026-04-25 20:37:17] Epoch 1 | Step 5510 | Loss: 1.5488 | LR: 1.00e-05
|
| 652 |
+
[2026-04-25 20:37:20] Epoch 1 | Step 5520 | Loss: 1.5484 | LR: 1.00e-05
|
| 653 |
+
[2026-04-25 20:37:22] Epoch 1 | Step 5530 | Loss: 1.5481 | LR: 1.00e-05
|
| 654 |
+
[2026-04-25 20:37:25] Epoch 1 | Step 5540 | Loss: 1.5475 | LR: 1.00e-05
|
| 655 |
+
[2026-04-25 20:37:27] Epoch 1 | Step 5550 | Loss: 1.5473 | LR: 1.00e-05
|
| 656 |
+
[2026-04-25 20:37:30] Epoch 1 | Step 5560 | Loss: 1.5468 | LR: 1.00e-05
|
| 657 |
+
[2026-04-25 20:37:32] Epoch 1 | Step 5570 | Loss: 1.5468 | LR: 1.00e-05
|
| 658 |
+
[2026-04-25 20:37:35] Epoch 1 | Step 5580 | Loss: 1.5464 | LR: 1.00e-05
|
| 659 |
+
[2026-04-25 20:37:38] Epoch 1 | Step 5590 | Loss: 1.5458 | LR: 1.00e-05
|
| 660 |
+
[2026-04-25 20:37:40] Epoch 1 | Step 5600 | Loss: 1.5459 | LR: 1.00e-05
|
| 661 |
+
[2026-04-25 20:37:43] Epoch 1 | Step 5610 | Loss: 1.5459 | LR: 1.00e-05
|
| 662 |
+
[2026-04-25 20:37:45] Epoch 1 | Step 5620 | Loss: 1.5456 | LR: 1.00e-05
|
| 663 |
+
[2026-04-25 20:37:48] Epoch 1 | Step 5630 | Loss: 1.5454 | LR: 1.00e-05
|
| 664 |
+
[2026-04-25 20:37:50] Epoch 1 | Step 5640 | Loss: 1.5452 | LR: 1.00e-05
|
| 665 |
+
[2026-04-25 20:37:53] Epoch 1 | Step 5650 | Loss: 1.5449 | LR: 1.00e-05
|
| 666 |
+
[2026-04-25 20:37:55] Epoch 1 | Step 5660 | Loss: 1.5444 | LR: 1.00e-05
|
| 667 |
+
[2026-04-25 20:37:58] Epoch 1 | Step 5670 | Loss: 1.5442 | LR: 1.00e-05
|
| 668 |
+
[2026-04-25 20:38:01] Epoch 1 | Step 5680 | Loss: 1.5437 | LR: 1.00e-05
|
| 669 |
+
[2026-04-25 20:38:03] Epoch 1 | Step 5690 | Loss: 1.5436 | LR: 1.00e-05
|
| 670 |
+
[2026-04-25 20:38:05] Epoch 1 | Step 5700 | Loss: 1.5432 | LR: 1.00e-05
|
| 671 |
+
[2026-04-25 20:38:08] Epoch 1 | Step 5710 | Loss: 1.5430 | LR: 1.00e-05
|
| 672 |
+
[2026-04-25 20:38:11] Epoch 1 | Step 5720 | Loss: 1.5428 | LR: 1.00e-05
|
| 673 |
+
[2026-04-25 20:38:13] Epoch 1 | Step 5730 | Loss: 1.5425 | LR: 1.00e-05
|
| 674 |
+
[2026-04-25 20:38:16] Epoch 1 | Step 5740 | Loss: 1.5424 | LR: 1.00e-05
|
| 675 |
+
[2026-04-25 20:38:19] Epoch 1 | Step 5750 | Loss: 1.5422 | LR: 1.00e-05
|
| 676 |
+
[2026-04-25 20:38:21] Epoch 1 | Step 5760 | Loss: 1.5420 | LR: 1.00e-05
|
| 677 |
+
[2026-04-25 20:38:24] Epoch 1 | Step 5770 | Loss: 1.5418 | LR: 1.00e-05
|
| 678 |
+
[2026-04-25 20:38:26] Epoch 1 | Step 5780 | Loss: 1.5414 | LR: 1.00e-05
|
| 679 |
+
[2026-04-25 20:38:29] Epoch 1 | Step 5790 | Loss: 1.5414 | LR: 1.00e-05
|
| 680 |
+
[2026-04-25 20:38:31] Epoch 1 | Step 5800 | Loss: 1.5416 | LR: 1.00e-05
|
| 681 |
+
[2026-04-25 20:38:34] Epoch 1 | Step 5810 | Loss: 1.5414 | LR: 1.00e-05
|
| 682 |
+
[2026-04-25 20:38:37] Epoch 1 | Step 5820 | Loss: 1.5410 | LR: 1.00e-05
|
| 683 |
+
[2026-04-25 20:38:39] Epoch 1 | Step 5830 | Loss: 1.5406 | LR: 1.00e-05
|
| 684 |
+
[2026-04-25 20:38:41] Epoch 1 | Step 5840 | Loss: 1.5405 | LR: 1.00e-05
|
| 685 |
+
[2026-04-25 20:38:44] Epoch 1 | Step 5850 | Loss: 1.5404 | LR: 1.00e-05
|
| 686 |
+
[2026-04-25 20:38:46] Epoch 1 | Step 5860 | Loss: 1.5402 | LR: 1.00e-05
|
| 687 |
+
[2026-04-25 20:38:49] Epoch 1 | Step 5870 | Loss: 1.5400 | LR: 1.00e-05
|
| 688 |
+
[2026-04-25 20:38:52] Epoch 1 | Step 5880 | Loss: 1.5400 | LR: 1.00e-05
|
| 689 |
+
[2026-04-25 20:38:54] Epoch 1 | Step 5890 | Loss: 1.5398 | LR: 1.00e-05
|
| 690 |
+
[2026-04-25 20:38:57] Epoch 1 | Step 5900 | Loss: 1.5394 | LR: 1.00e-05
|
| 691 |
+
[2026-04-25 20:39:00] Epoch 1 | Step 5910 | Loss: 1.5393 | LR: 1.00e-05
|
| 692 |
+
[2026-04-25 20:39:02] Epoch 1 | Step 5920 | Loss: 1.5389 | LR: 1.00e-05
|
| 693 |
+
[2026-04-25 20:39:05] Epoch 1 | Step 5930 | Loss: 1.5389 | LR: 1.00e-05
|
| 694 |
+
[2026-04-25 20:39:08] Epoch 1 | Step 5940 | Loss: 1.5385 | LR: 1.00e-05
|
| 695 |
+
[2026-04-25 20:39:10] Epoch 1 | Step 5950 | Loss: 1.5385 | LR: 1.00e-05
|
| 696 |
+
[2026-04-25 20:39:13] Epoch 1 | Step 5960 | Loss: 1.5383 | LR: 1.00e-05
|
| 697 |
+
[2026-04-25 20:39:15] Epoch 1 | Step 5970 | Loss: 1.5382 | LR: 1.00e-05
|
| 698 |
+
[2026-04-25 20:39:18] Epoch 1 | Step 5980 | Loss: 1.5378 | LR: 1.00e-05
|
| 699 |
+
[2026-04-25 20:39:21] Epoch 1 | Step 5990 | Loss: 1.5379 | LR: 1.00e-05
|
| 700 |
+
[2026-04-25 20:39:23] Epoch 1 | Step 6000 | Loss: 1.5376 | LR: 1.00e-05
|
| 701 |
+
[2026-04-25 20:39:24] Validation | Batch 10/84 | Loss: 1.3357
|
| 702 |
+
[2026-04-25 20:39:24] Validation | Batch 20/84 | Loss: 1.3663
|
| 703 |
+
[2026-04-25 20:39:25] Validation | Batch 30/84 | Loss: 1.4614
|
| 704 |
+
[2026-04-25 20:39:25] Validation | Batch 40/84 | Loss: 1.4637
|
| 705 |
+
[2026-04-25 20:39:25] Validation | Batch 50/84 | Loss: 1.4406
|
| 706 |
+
[2026-04-25 20:39:26] Validation | Batch 60/84 | Loss: 1.4091
|
| 707 |
+
[2026-04-25 20:39:26] Validation | Batch 70/84 | Loss: 1.3889
|
| 708 |
+
[2026-04-25 20:39:27] Validation | Batch 80/84 | Loss: 1.3947
|
| 709 |
+
[2026-04-25 20:39:27] Validation | Batch 84/84 | Loss: 1.3849
|
| 710 |
+
[2026-04-25 20:39:27] Validation | Loss: 1.3849 | PPL: 4.09 | Time: 3.80s
|
| 711 |
+
[2026-04-25 20:39:30] New best model saved! Val loss: 1.3849
|
| 712 |
+
[2026-04-25 20:39:32] Epoch 1 | Step 6010 | Loss: 1.5375 | LR: 1.00e-05
|
| 713 |
+
[2026-04-25 20:39:35] Epoch 1 | Step 6020 | Loss: 1.5371 | LR: 1.00e-05
|
| 714 |
+
[2026-04-25 20:39:37] Epoch 1 | Step 6030 | Loss: 1.5371 | LR: 1.00e-05
|
| 715 |
+
[2026-04-25 20:39:40] Epoch 1 | Step 6040 | Loss: 1.5369 | LR: 1.00e-05
|
| 716 |
+
[2026-04-25 20:39:43] Epoch 1 | Step 6050 | Loss: 1.5369 | LR: 1.00e-05
|
| 717 |
+
[2026-04-25 20:39:46] Epoch 1 | Step 6060 | Loss: 1.5366 | LR: 1.00e-05
|
| 718 |
+
[2026-04-25 20:39:48] Epoch 1 | Step 6070 | Loss: 1.5362 | LR: 1.00e-05
|
| 719 |
+
[2026-04-25 20:39:51] Epoch 1 | Step 6080 | Loss: 1.5362 | LR: 1.00e-05
|
| 720 |
+
[2026-04-25 20:39:53] Epoch 1 | Step 6090 | Loss: 1.5361 | LR: 1.00e-05
|
| 721 |
+
[2026-04-25 20:39:56] Epoch 1 | Step 6100 | Loss: 1.5361 | LR: 1.00e-05
|
| 722 |
+
[2026-04-25 20:39:59] Epoch 1 | Step 6110 | Loss: 1.5360 | LR: 1.00e-05
|
| 723 |
+
[2026-04-25 20:40:01] Epoch 1 | Step 6120 | Loss: 1.5357 | LR: 1.00e-05
|
| 724 |
+
[2026-04-25 20:40:04] Epoch 1 | Step 6130 | Loss: 1.5353 | LR: 1.00e-05
|
| 725 |
+
[2026-04-25 20:40:06] Epoch 1 | Step 6140 | Loss: 1.5346 | LR: 1.00e-05
|
| 726 |
+
[2026-04-25 20:40:09] Epoch 1 | Step 6150 | Loss: 1.5344 | LR: 1.00e-05
|
| 727 |
+
[2026-04-25 20:40:11] Epoch 1 | Step 6160 | Loss: 1.5341 | LR: 1.00e-05
|
| 728 |
+
[2026-04-25 20:40:14] Epoch 1 | Step 6170 | Loss: 1.5341 | LR: 1.00e-05
|
| 729 |
+
[2026-04-25 20:40:16] Epoch 1 | Step 6180 | Loss: 1.5337 | LR: 1.00e-05
|
| 730 |
+
[2026-04-25 20:40:19] Epoch 1 | Step 6190 | Loss: 1.5333 | LR: 1.00e-05
|
| 731 |
+
[2026-04-25 20:40:22] Epoch 1 | Step 6200 | Loss: 1.5331 | LR: 1.00e-05
|
| 732 |
+
[2026-04-25 20:40:24] Epoch 1 | Step 6210 | Loss: 1.5330 | LR: 1.00e-05
|
| 733 |
+
[2026-04-25 20:40:27] Epoch 1 | Step 6220 | Loss: 1.5329 | LR: 1.00e-05
|
| 734 |
+
[2026-04-25 20:40:30] Epoch 1 | Step 6230 | Loss: 1.5326 | LR: 1.00e-05
|
| 735 |
+
[2026-04-25 20:40:32] Epoch 1 | Step 6240 | Loss: 1.5324 | LR: 1.00e-05
|
| 736 |
+
[2026-04-25 20:40:35] Epoch 1 | Step 6250 | Loss: 1.5319 | LR: 1.00e-05
|
| 737 |
+
[2026-04-25 20:40:37] Epoch 1 | Step 6260 | Loss: 1.5319 | LR: 1.00e-05
|
| 738 |
+
[2026-04-25 20:40:40] Epoch 1 | Step 6270 | Loss: 1.5316 | LR: 1.00e-05
|
| 739 |
+
[2026-04-25 20:40:42] Epoch 1 | Step 6280 | Loss: 1.5310 | LR: 1.00e-05
|
| 740 |
+
[2026-04-25 20:40:45] Epoch 1 | Step 6290 | Loss: 1.5308 | LR: 1.00e-05
|
| 741 |
+
[2026-04-25 20:40:47] Epoch 1 | Step 6300 | Loss: 1.5306 | LR: 1.00e-05
|
| 742 |
+
[2026-04-25 20:40:50] Epoch 1 | Step 6310 | Loss: 1.5306 | LR: 1.00e-05
|
| 743 |
+
[2026-04-25 20:40:52] Epoch 1 | Step 6320 | Loss: 1.5304 | LR: 1.00e-05
|
| 744 |
+
[2026-04-25 20:40:55] Epoch 1 | Step 6330 | Loss: 1.5306 | LR: 1.00e-05
|
| 745 |
+
[2026-04-25 20:40:58] Epoch 1 | Step 6340 | Loss: 1.5306 | LR: 1.00e-05
|
| 746 |
+
[2026-04-25 20:41:00] Epoch 1 | Step 6350 | Loss: 1.5305 | LR: 1.00e-05
|
| 747 |
+
[2026-04-25 20:41:03] Epoch 1 | Step 6360 | Loss: 1.5305 | LR: 1.00e-05
|
| 748 |
+
[2026-04-25 20:41:05] Epoch 1 | Step 6370 | Loss: 1.5302 | LR: 1.00e-05
|
| 749 |
+
[2026-04-25 20:41:08] Epoch 1 | Step 6380 | Loss: 1.5301 | LR: 1.00e-05
|
| 750 |
+
[2026-04-25 20:41:10] Epoch 1 | Step 6390 | Loss: 1.5297 | LR: 1.00e-05
|
| 751 |
+
[2026-04-25 20:41:13] Epoch 1 | Step 6400 | Loss: 1.5294 | LR: 1.00e-05
|
| 752 |
+
[2026-04-25 20:41:15] Epoch 1 | Step 6410 | Loss: 1.5292 | LR: 1.00e-05
|
| 753 |
+
[2026-04-25 20:41:17] Epoch 1 | Step 6420 | Loss: 1.5289 | LR: 1.00e-05
|
| 754 |
+
[2026-04-25 20:41:20] Epoch 1 | Step 6430 | Loss: 1.5286 | LR: 1.00e-05
|
| 755 |
+
[2026-04-25 20:41:22] Epoch 1 | Step 6440 | Loss: 1.5285 | LR: 1.00e-05
|
| 756 |
+
[2026-04-25 20:41:25] Epoch 1 | Step 6450 | Loss: 1.5283 | LR: 1.00e-05
|
| 757 |
+
[2026-04-25 20:41:27] Epoch 1 | Step 6460 | Loss: 1.5277 | LR: 1.00e-05
|
| 758 |
+
[2026-04-25 20:41:30] Epoch 1 | Step 6470 | Loss: 1.5275 | LR: 1.00e-05
|
| 759 |
+
[2026-04-25 20:41:32] Epoch 1 | Step 6480 | Loss: 1.5275 | LR: 1.00e-05
|
| 760 |
+
[2026-04-25 20:41:35] Epoch 1 | Step 6490 | Loss: 1.5276 | LR: 1.00e-05
|
| 761 |
+
[2026-04-25 20:41:37] Epoch 1 | Step 6500 | Loss: 1.5271 | LR: 1.00e-05
|
| 762 |
+
[2026-04-25 20:41:40] Epoch 1 | Step 6510 | Loss: 1.5267 | LR: 1.00e-05
|
| 763 |
+
[2026-04-25 20:41:42] Epoch 1 | Step 6520 | Loss: 1.5264 | LR: 1.00e-05
|
| 764 |
+
[2026-04-25 20:41:45] Epoch 1 | Step 6530 | Loss: 1.5260 | LR: 1.00e-05
|
| 765 |
+
[2026-04-25 20:41:47] Epoch 1 | Step 6540 | Loss: 1.5257 | LR: 1.00e-05
|
| 766 |
+
[2026-04-25 20:41:50] Epoch 1 | Step 6550 | Loss: 1.5254 | LR: 1.00e-05
|
| 767 |
+
[2026-04-25 20:41:52] Epoch 1 | Step 6560 | Loss: 1.5251 | LR: 1.00e-05
|
| 768 |
+
[2026-04-25 20:41:55] Epoch 1 | Step 6570 | Loss: 1.5250 | LR: 1.00e-05
|
| 769 |
+
[2026-04-25 20:41:57] Epoch 1 | Step 6580 | Loss: 1.5248 | LR: 1.00e-05
|
| 770 |
+
[2026-04-25 20:42:00] Epoch 1 | Step 6590 | Loss: 1.5244 | LR: 1.00e-05
|
| 771 |
+
[2026-04-25 20:42:02] Epoch 1 | Step 6600 | Loss: 1.5242 | LR: 1.00e-05
|
| 772 |
+
[2026-04-25 20:42:05] Epoch 1 | Step 6610 | Loss: 1.5240 | LR: 1.00e-05
|
| 773 |
+
[2026-04-25 20:42:08] Epoch 1 | Step 6620 | Loss: 1.5238 | LR: 1.00e-05
|
| 774 |
+
[2026-04-25 20:42:10] Epoch 1 | Step 6630 | Loss: 1.5234 | LR: 1.00e-05
|
| 775 |
+
[2026-04-25 20:42:13] Epoch 1 | Step 6640 | Loss: 1.5232 | LR: 1.00e-05
|
| 776 |
+
[2026-04-25 20:42:15] Epoch 1 | Step 6650 | Loss: 1.5231 | LR: 1.00e-05
|
| 777 |
+
[2026-04-25 20:42:17] Epoch 1 | Step 6660 | Loss: 1.5227 | LR: 1.00e-05
|
| 778 |
+
[2026-04-25 20:42:20] Epoch 1 | Step 6670 | Loss: 1.5226 | LR: 1.00e-05
|
| 779 |
+
[2026-04-25 20:42:23] Epoch 1 | Step 6680 | Loss: 1.5224 | LR: 1.00e-05
|
| 780 |
+
[2026-04-25 20:42:25] Epoch 1 | Step 6690 | Loss: 1.5223 | LR: 1.00e-05
|
| 781 |
+
[2026-04-25 20:42:28] Epoch 1 | Step 6700 | Loss: 1.5221 | LR: 1.00e-05
|
| 782 |
+
[2026-04-25 20:42:30] Epoch 1 | Step 6710 | Loss: 1.5220 | LR: 1.00e-05
|
| 783 |
+
[2026-04-25 20:42:33] Epoch 1 | Step 6720 | Loss: 1.5218 | LR: 1.00e-05
|
| 784 |
+
[2026-04-25 20:42:35] Epoch 1 | Step 6730 | Loss: 1.5218 | LR: 1.00e-05
|
| 785 |
+
[2026-04-25 20:42:38] Epoch 1 | Step 6740 | Loss: 1.5215 | LR: 1.00e-05
|
| 786 |
+
[2026-04-25 20:42:40] Epoch 1 | Step 6750 | Loss: 1.5212 | LR: 1.00e-05
|
| 787 |
+
[2026-04-25 20:42:43] Epoch 1 | Step 6760 | Loss: 1.5211 | LR: 1.00e-05
|
| 788 |
+
[2026-04-25 20:42:45] Epoch 1 | Step 6770 | Loss: 1.5208 | LR: 1.00e-05
|
| 789 |
+
[2026-04-25 20:42:48] Epoch 1 | Step 6780 | Loss: 1.5207 | LR: 1.00e-05
|
| 790 |
+
[2026-04-25 20:42:50] Epoch 1 | Step 6790 | Loss: 1.5207 | LR: 1.00e-05
|
| 791 |
+
[2026-04-25 20:42:53] Epoch 1 | Step 6800 | Loss: 1.5206 | LR: 1.00e-05
|
| 792 |
+
[2026-04-25 20:42:55] Epoch 1 | Step 6810 | Loss: 1.5205 | LR: 1.00e-05
|
| 793 |
+
[2026-04-25 20:42:58] Epoch 1 | Step 6820 | Loss: 1.5205 | LR: 1.00e-05
|
| 794 |
+
[2026-04-25 20:43:01] Epoch 1 | Step 6830 | Loss: 1.5205 | LR: 1.00e-05
|
| 795 |
+
[2026-04-25 20:43:03] Epoch 1 | Step 6840 | Loss: 1.5205 | LR: 1.00e-05
|
| 796 |
+
[2026-04-25 20:43:06] Epoch 1 | Step 6850 | Loss: 1.5203 | LR: 1.00e-05
|
| 797 |
+
[2026-04-25 20:43:08] Epoch 1 | Step 6860 | Loss: 1.5201 | LR: 1.00e-05
|
| 798 |
+
[2026-04-25 20:43:11] Epoch 1 | Step 6870 | Loss: 1.5199 | LR: 1.00e-05
|
| 799 |
+
[2026-04-25 20:43:13] Epoch 1 | Step 6880 | Loss: 1.5196 | LR: 1.00e-05
|
| 800 |
+
[2026-04-25 20:43:16] Epoch 1 | Step 6890 | Loss: 1.5197 | LR: 1.00e-05
|
| 801 |
+
[2026-04-25 20:43:19] Epoch 1 | Step 6900 | Loss: 1.5195 | LR: 1.00e-05
|
| 802 |
+
[2026-04-25 20:43:21] Epoch 1 | Step 6910 | Loss: 1.5190 | LR: 1.00e-05
|
| 803 |
+
[2026-04-25 20:43:24] Epoch 1 | Step 6920 | Loss: 1.5189 | LR: 1.00e-05
|
| 804 |
+
[2026-04-25 20:43:26] Epoch 1 | Step 6930 | Loss: 1.5190 | LR: 1.00e-05
|
| 805 |
+
[2026-04-25 20:43:29] Epoch 1 | Step 6940 | Loss: 1.5187 | LR: 1.00e-05
|
| 806 |
+
[2026-04-25 20:43:31] Epoch 1 | Step 6950 | Loss: 1.5184 | LR: 1.00e-05
|
| 807 |
+
[2026-04-25 20:43:34] Epoch 1 | Step 6960 | Loss: 1.5183 | LR: 1.00e-05
|
| 808 |
+
[2026-04-25 20:43:36] Epoch 1 | Step 6970 | Loss: 1.5180 | LR: 1.00e-05
|
| 809 |
+
[2026-04-25 20:43:39] Epoch 1 | Step 6980 | Loss: 1.5179 | LR: 1.00e-05
|
| 810 |
+
[2026-04-25 20:43:41] Epoch 1 | Step 6990 | Loss: 1.5174 | LR: 1.00e-05
|
| 811 |
+
[2026-04-25 20:43:44] Epoch 1 | Step 7000 | Loss: 1.5172 | LR: 1.00e-05
|
| 812 |
+
[2026-04-25 20:43:46] Epoch 1 | Step 7010 | Loss: 1.5169 | LR: 1.00e-05
|
| 813 |
+
[2026-04-25 20:43:49] Epoch 1 | Step 7020 | Loss: 1.5169 | LR: 1.00e-05
|
| 814 |
+
[2026-04-25 20:43:51] Epoch 1 | Step 7030 | Loss: 1.5167 | LR: 1.00e-05
|
| 815 |
+
[2026-04-25 20:43:54] Epoch 1 | Step 7040 | Loss: 1.5167 | LR: 1.00e-05
|
| 816 |
+
[2026-04-25 20:43:56] Epoch 1 | Step 7050 | Loss: 1.5164 | LR: 1.00e-05
|
| 817 |
+
[2026-04-25 20:43:59] Epoch 1 | Step 7060 | Loss: 1.5162 | LR: 1.00e-05
|
| 818 |
+
[2026-04-25 20:44:01] Epoch 1 | Step 7070 | Loss: 1.5163 | LR: 1.00e-05
|
| 819 |
+
[2026-04-25 20:44:04] Epoch 1 | Step 7080 | Loss: 1.5159 | LR: 1.00e-05
|
| 820 |
+
[2026-04-25 20:44:06] Epoch 1 | Step 7090 | Loss: 1.5159 | LR: 1.00e-05
|
| 821 |
+
[2026-04-25 20:44:09] Epoch 1 | Step 7100 | Loss: 1.5155 | LR: 1.00e-05
|
| 822 |
+
[2026-04-25 20:44:11] Epoch 1 | Step 7110 | Loss: 1.5152 | LR: 1.00e-05
|
| 823 |
+
[2026-04-25 20:44:14] Epoch 1 | Step 7120 | Loss: 1.5153 | LR: 1.00e-05
|
| 824 |
+
[2026-04-25 20:44:17] Epoch 1 | Step 7130 | Loss: 1.5149 | LR: 1.00e-05
|
| 825 |
+
[2026-04-25 20:44:19] Epoch 1 | Step 7140 | Loss: 1.5146 | LR: 1.00e-05
|
| 826 |
+
[2026-04-25 20:44:21] Epoch 1 | Step 7150 | Loss: 1.5147 | LR: 1.00e-05
|
| 827 |
+
[2026-04-25 20:44:24] Epoch 1 | Step 7160 | Loss: 1.5143 | LR: 1.00e-05
|
| 828 |
+
[2026-04-25 20:44:27] Epoch 1 | Step 7170 | Loss: 1.5141 | LR: 1.00e-05
|
| 829 |
+
[2026-04-25 20:44:29] Epoch 1 | Step 7180 | Loss: 1.5141 | LR: 1.00e-05
|
| 830 |
+
[2026-04-25 20:44:32] Epoch 1 | Step 7190 | Loss: 1.5141 | LR: 1.00e-05
|
| 831 |
+
[2026-04-25 20:44:35] Epoch 1 | Step 7200 | Loss: 1.5139 | LR: 1.00e-05
|
| 832 |
+
[2026-04-25 20:44:37] Epoch 1 | Step 7210 | Loss: 1.5135 | LR: 1.00e-05
|
| 833 |
+
[2026-04-25 20:44:40] Epoch 1 | Step 7220 | Loss: 1.5135 | LR: 1.00e-05
|
| 834 |
+
[2026-04-25 20:44:43] Epoch 1 | Step 7230 | Loss: 1.5134 | LR: 1.00e-05
|
| 835 |
+
[2026-04-25 20:44:45] Epoch 1 | Step 7240 | Loss: 1.5132 | LR: 1.00e-05
|
| 836 |
+
[2026-04-25 20:44:48] Epoch 1 | Step 7250 | Loss: 1.5131 | LR: 1.00e-05
|
| 837 |
+
[2026-04-25 20:44:50] Epoch 1 | Step 7260 | Loss: 1.5128 | LR: 1.00e-05
|
| 838 |
+
[2026-04-25 20:44:53] Epoch 1 | Step 7270 | Loss: 1.5128 | LR: 1.00e-05
|
| 839 |
+
[2026-04-25 20:44:55] Epoch 1 | Step 7280 | Loss: 1.5128 | LR: 1.00e-05
|
| 840 |
+
[2026-04-25 20:44:58] Epoch 1 | Step 7290 | Loss: 1.5124 | LR: 1.00e-05
|
| 841 |
+
[2026-04-25 20:45:00] Epoch 1 | Step 7300 | Loss: 1.5122 | LR: 1.00e-05
|
| 842 |
+
[2026-04-25 20:45:03] Epoch 1 | Step 7310 | Loss: 1.5119 | LR: 1.00e-05
|
| 843 |
+
[2026-04-25 20:45:06] Epoch 1 | Step 7320 | Loss: 1.5115 | LR: 1.00e-05
|
| 844 |
+
[2026-04-25 20:45:08] Epoch 1 | Step 7330 | Loss: 1.5114 | LR: 1.00e-05
|
| 845 |
+
[2026-04-25 20:45:11] Epoch 1 | Step 7340 | Loss: 1.5114 | LR: 1.00e-05
|
| 846 |
+
[2026-04-25 20:45:13] Epoch 1 | Step 7350 | Loss: 1.5114 | LR: 1.00e-05
|
| 847 |
+
[2026-04-25 20:45:16] Epoch 1 | Step 7360 | Loss: 1.5110 | LR: 1.00e-05
|
| 848 |
+
[2026-04-25 20:45:19] Epoch 1 | Step 7370 | Loss: 1.5106 | LR: 1.00e-05
|
| 849 |
+
[2026-04-25 20:45:21] Epoch 1 | Step 7380 | Loss: 1.5103 | LR: 1.00e-05
|
| 850 |
+
[2026-04-25 20:45:24] Epoch 1 | Step 7390 | Loss: 1.5100 | LR: 1.00e-05
|
| 851 |
+
[2026-04-25 20:45:26] Epoch 1 | Step 7400 | Loss: 1.5097 | LR: 1.00e-05
|
| 852 |
+
[2026-04-25 20:45:29] Epoch 1 | Step 7410 | Loss: 1.5098 | LR: 1.00e-05
|
| 853 |
+
[2026-04-25 20:45:31] Epoch 1 | Step 7420 | Loss: 1.5097 | LR: 1.00e-05
|
| 854 |
+
[2026-04-25 20:45:34] Epoch 1 | Step 7430 | Loss: 1.5094 | LR: 1.00e-05
|
| 855 |
+
[2026-04-25 20:45:36] Epoch 1 | Step 7440 | Loss: 1.5093 | LR: 1.00e-05
|
| 856 |
+
[2026-04-25 20:45:39] Epoch 1 | Step 7450 | Loss: 1.5090 | LR: 1.00e-05
|
| 857 |
+
[2026-04-25 20:45:41] Epoch 1 | Step 7460 | Loss: 1.5088 | LR: 1.00e-05
|
| 858 |
+
[2026-04-25 20:45:44] Epoch 1 | Step 7470 | Loss: 1.5087 | LR: 1.00e-05
|
| 859 |
+
[2026-04-25 20:45:46] Epoch 1 | Step 7480 | Loss: 1.5085 | LR: 1.00e-05
|
| 860 |
+
[2026-04-25 20:45:49] Epoch 1 | Step 7490 | Loss: 1.5085 | LR: 1.00e-05
|
| 861 |
+
[2026-04-25 20:45:51] Epoch 1 | Step 7500 | Loss: 1.5085 | LR: 1.00e-05
|
| 862 |
+
[2026-04-25 20:45:54] Epoch 1 | Step 7510 | Loss: 1.5084 | LR: 1.00e-05
|
| 863 |
+
[2026-04-25 20:45:56] Epoch 1 | Step 7520 | Loss: 1.5082 | LR: 1.00e-05
|
| 864 |
+
[2026-04-25 20:45:58] Epoch 1 | Step 7530 | Loss: 1.5080 | LR: 1.00e-05
|
| 865 |
+
[2026-04-25 20:46:01] Epoch 1 | Step 7540 | Loss: 1.5078 | LR: 1.00e-05
|
| 866 |
+
[2026-04-25 20:46:03] Epoch 1 | Step 7550 | Loss: 1.5077 | LR: 1.00e-05
|
| 867 |
+
[2026-04-25 20:46:06] Epoch 1 | Step 7560 | Loss: 1.5075 | LR: 1.00e-05
|
| 868 |
+
[2026-04-25 20:46:08] Epoch 1 | Step 7570 | Loss: 1.5074 | LR: 1.00e-05
|
| 869 |
+
[2026-04-25 20:46:11] Epoch 1 | Step 7580 | Loss: 1.5071 | LR: 1.00e-05
|
| 870 |
+
[2026-04-25 20:46:13] Epoch 1 | Step 7590 | Loss: 1.5069 | LR: 1.00e-05
|
| 871 |
+
[2026-04-25 20:46:16] Epoch 1 | Step 7600 | Loss: 1.5068 | LR: 1.00e-05
|
| 872 |
+
[2026-04-25 20:46:18] Epoch 1 | Step 7610 | Loss: 1.5065 | LR: 1.00e-05
|
| 873 |
+
[2026-04-25 20:46:21] Epoch 1 | Step 7620 | Loss: 1.5062 | LR: 1.00e-05
|
| 874 |
+
[2026-04-25 20:46:23] Epoch 1 | Step 7630 | Loss: 1.5060 | LR: 1.00e-05
|
| 875 |
+
[2026-04-25 20:46:26] Epoch 1 | Step 7640 | Loss: 1.5058 | LR: 1.00e-05
|
| 876 |
+
[2026-04-25 20:46:29] Epoch 1 | Step 7650 | Loss: 1.5054 | LR: 1.00e-05
|
| 877 |
+
[2026-04-25 20:46:31] Epoch 1 | Step 7660 | Loss: 1.5052 | LR: 1.00e-05
|
| 878 |
+
[2026-04-25 20:46:34] Epoch 1 | Step 7670 | Loss: 1.5049 | LR: 1.00e-05
|
| 879 |
+
[2026-04-25 20:46:36] Epoch 1 | Step 7680 | Loss: 1.5047 | LR: 1.00e-05
|
| 880 |
+
[2026-04-25 20:46:39] Epoch 1 | Step 7690 | Loss: 1.5048 | LR: 1.00e-05
|
| 881 |
+
[2026-04-25 20:46:42] Epoch 1 | Step 7700 | Loss: 1.5044 | LR: 1.00e-05
|
| 882 |
+
[2026-04-25 20:46:44] Epoch 1 | Step 7710 | Loss: 1.5040 | LR: 1.00e-05
|
| 883 |
+
[2026-04-25 20:46:47] Epoch 1 | Step 7720 | Loss: 1.5041 | LR: 1.00e-05
|
| 884 |
+
[2026-04-25 20:46:49] Epoch 1 | Step 7730 | Loss: 1.5041 | LR: 1.00e-05
|
| 885 |
+
[2026-04-25 20:46:52] Epoch 1 | Step 7740 | Loss: 1.5041 | LR: 1.00e-05
|
| 886 |
+
[2026-04-25 20:46:55] Epoch 1 | Step 7750 | Loss: 1.5040 | LR: 1.00e-05
|
| 887 |
+
[2026-04-25 20:46:57] Epoch 1 | Step 7760 | Loss: 1.5037 | LR: 1.00e-05
|
| 888 |
+
[2026-04-25 20:47:00] Epoch 1 | Step 7770 | Loss: 1.5035 | LR: 1.00e-05
|
| 889 |
+
[2026-04-25 20:47:02] Epoch 1 | Step 7780 | Loss: 1.5032 | LR: 1.00e-05
|
| 890 |
+
[2026-04-25 20:47:05] Epoch 1 | Step 7790 | Loss: 1.5031 | LR: 1.00e-05
|
| 891 |
+
[2026-04-25 20:47:07] Epoch 1 | Step 7800 | Loss: 1.5028 | LR: 1.00e-05
|
| 892 |
+
[2026-04-25 20:47:10] Epoch 1 | Step 7810 | Loss: 1.5029 | LR: 1.00e-05
|
| 893 |
+
[2026-04-25 20:47:12] Epoch 1 | Step 7820 | Loss: 1.5028 | LR: 1.00e-05
|
| 894 |
+
[2026-04-25 20:47:15] Epoch 1 | Step 7830 | Loss: 1.5027 | LR: 1.00e-05
|
| 895 |
+
[2026-04-25 20:47:18] Epoch 1 | Step 7840 | Loss: 1.5024 | LR: 1.00e-05
|
| 896 |
+
[2026-04-25 20:47:20] Epoch 1 | Step 7850 | Loss: 1.5020 | LR: 1.00e-05
|
| 897 |
+
[2026-04-25 20:47:23] Epoch 1 | Step 7860 | Loss: 1.5019 | LR: 1.00e-05
|
| 898 |
+
[2026-04-25 20:47:25] Epoch 1 | Step 7870 | Loss: 1.5016 | LR: 1.00e-05
|
| 899 |
+
[2026-04-25 20:47:28] Epoch 1 | Step 7880 | Loss: 1.5016 | LR: 1.00e-05
|
| 900 |
+
[2026-04-25 20:47:31] Epoch 1 | Step 7890 | Loss: 1.5013 | LR: 1.00e-05
|
| 901 |
+
[2026-04-25 20:47:34] Epoch 1 | Step 7900 | Loss: 1.5012 | LR: 1.00e-05
|
| 902 |
+
[2026-04-25 20:47:37] Epoch 1 | Step 7910 | Loss: 1.5013 | LR: 1.00e-05
|
| 903 |
+
[2026-04-25 20:47:39] Epoch 1 | Step 7920 | Loss: 1.5011 | LR: 1.00e-05
|
| 904 |
+
[2026-04-25 20:47:42] Epoch 1 | Step 7930 | Loss: 1.5011 | LR: 1.00e-05
|
| 905 |
+
[2026-04-25 20:47:44] Epoch 1 | Step 7940 | Loss: 1.5010 | LR: 1.00e-05
|
| 906 |
+
[2026-04-25 20:47:47] Epoch 1 | Step 7950 | Loss: 1.5010 | LR: 1.00e-05
|
| 907 |
+
[2026-04-25 20:47:49] Epoch 1 | Step 7960 | Loss: 1.5009 | LR: 1.00e-05
|
| 908 |
+
[2026-04-25 20:47:52] Epoch 1 | Step 7970 | Loss: 1.5008 | LR: 1.00e-05
|
| 909 |
+
[2026-04-25 20:47:54] Epoch 1 | Step 7980 | Loss: 1.5005 | LR: 1.00e-05
|
| 910 |
+
[2026-04-25 20:47:57] Epoch 1 | Step 7990 | Loss: 1.5003 | LR: 1.00e-05
|
| 911 |
+
[2026-04-25 20:47:59] Epoch 1 | Step 8000 | Loss: 1.5001 | LR: 1.00e-05
|
| 912 |
+
[2026-04-25 20:48:00] Validation | Batch 10/84 | Loss: 1.3175
|
| 913 |
+
[2026-04-25 20:48:00] Validation | Batch 20/84 | Loss: 1.3445
|
| 914 |
+
[2026-04-25 20:48:01] Validation | Batch 30/84 | Loss: 1.4386
|
| 915 |
+
[2026-04-25 20:48:01] Validation | Batch 40/84 | Loss: 1.4415
|
| 916 |
+
[2026-04-25 20:48:01] Validation | Batch 50/84 | Loss: 1.4190
|
| 917 |
+
[2026-04-25 20:48:02] Validation | Batch 60/84 | Loss: 1.3877
|
| 918 |
+
[2026-04-25 20:48:02] Validation | Batch 70/84 | Loss: 1.3681
|
| 919 |
+
[2026-04-25 20:48:03] Validation | Batch 80/84 | Loss: 1.3739
|
| 920 |
+
[2026-04-25 20:48:03] Validation | Batch 84/84 | Loss: 1.3637
|
| 921 |
+
[2026-04-25 20:48:03] Validation | Loss: 1.3637 | PPL: 4.00 | Time: 3.78s
|
| 922 |
+
[2026-04-25 20:48:06] New best model saved! Val loss: 1.3637
|
| 923 |
+
[2026-04-25 20:48:09] Epoch 1 | Step 8010 | Loss: 1.4999 | LR: 1.00e-05
|
| 924 |
+
[2026-04-25 20:48:11] Epoch 1 | Step 8020 | Loss: 1.4995 | LR: 1.00e-05
|
| 925 |
+
[2026-04-25 20:48:14] Epoch 1 | Step 8030 | Loss: 1.4992 | LR: 1.00e-05
|
| 926 |
+
[2026-04-25 20:48:17] Epoch 1 | Step 8040 | Loss: 1.4993 | LR: 1.00e-05
|
| 927 |
+
[2026-04-25 20:48:19] Epoch 1 | Step 8050 | Loss: 1.4990 | LR: 1.00e-05
|
| 928 |
+
[2026-04-25 20:48:22] Epoch 1 | Step 8060 | Loss: 1.4988 | LR: 1.00e-05
|
| 929 |
+
[2026-04-25 20:48:25] Epoch 1 | Step 8070 | Loss: 1.4986 | LR: 1.00e-05
|
| 930 |
+
[2026-04-25 20:48:27] Epoch 1 | Step 8080 | Loss: 1.4985 | LR: 1.00e-05
|
| 931 |
+
[2026-04-25 20:48:30] Epoch 1 | Step 8090 | Loss: 1.4982 | LR: 1.00e-05
|
| 932 |
+
[2026-04-25 20:48:33] Epoch 1 | Step 8100 | Loss: 1.4980 | LR: 1.00e-05
|
| 933 |
+
[2026-04-25 20:48:35] Epoch 1 | Step 8110 | Loss: 1.4981 | LR: 1.00e-05
|
| 934 |
+
[2026-04-25 20:48:38] Epoch 1 | Step 8120 | Loss: 1.4979 | LR: 1.00e-05
|
| 935 |
+
[2026-04-25 20:48:40] Epoch 1 | Step 8130 | Loss: 1.4977 | LR: 1.00e-05
|
| 936 |
+
[2026-04-25 20:48:43] Epoch 1 | Step 8140 | Loss: 1.4977 | LR: 1.00e-05
|
| 937 |
+
[2026-04-25 20:48:45] Epoch 1 | Step 8150 | Loss: 1.4976 | LR: 1.00e-05
|
| 938 |
+
[2026-04-25 20:48:47] Epoch 1 | Step 8160 | Loss: 1.4972 | LR: 1.00e-05
|
| 939 |
+
[2026-04-25 20:48:50] Epoch 1 | Step 8170 | Loss: 1.4971 | LR: 1.00e-05
|
| 940 |
+
[2026-04-25 20:48:53] Epoch 1 | Step 8180 | Loss: 1.4970 | LR: 1.00e-05
|
| 941 |
+
[2026-04-25 20:48:55] Epoch 1 | Step 8190 | Loss: 1.4967 | LR: 1.00e-05
|
| 942 |
+
[2026-04-25 20:48:58] Epoch 1 | Step 8200 | Loss: 1.4967 | LR: 1.00e-05
|
| 943 |
+
[2026-04-25 20:49:00] Epoch 1 | Step 8210 | Loss: 1.4965 | LR: 1.00e-05
|
| 944 |
+
[2026-04-25 20:49:03] Epoch 1 | Step 8220 | Loss: 1.4965 | LR: 1.00e-05
|
| 945 |
+
[2026-04-25 20:49:05] Epoch 1 | Step 8230 | Loss: 1.4964 | LR: 1.00e-05
|
| 946 |
+
[2026-04-25 20:49:08] Epoch 1 | Step 8240 | Loss: 1.4964 | LR: 1.00e-05
|
| 947 |
+
[2026-04-25 20:49:10] Epoch 1 | Step 8250 | Loss: 1.4962 | LR: 1.00e-05
|
| 948 |
+
[2026-04-25 20:49:13] Epoch 1 | Step 8260 | Loss: 1.4962 | LR: 1.00e-05
|
| 949 |
+
[2026-04-25 20:49:15] Epoch 1 | Step 8270 | Loss: 1.4962 | LR: 1.00e-05
|
| 950 |
+
[2026-04-25 20:49:18] Epoch 1 | Step 8280 | Loss: 1.4962 | LR: 1.00e-05
|
| 951 |
+
[2026-04-25 20:49:21] Epoch 1 | Step 8290 | Loss: 1.4962 | LR: 1.00e-05
|
| 952 |
+
[2026-04-25 20:49:23] Epoch 1 | Step 8300 | Loss: 1.4958 | LR: 1.00e-05
|
| 953 |
+
[2026-04-25 20:49:25] Epoch 1 | Step 8310 | Loss: 1.4958 | LR: 1.00e-05
|
| 954 |
+
[2026-04-25 20:49:28] Epoch 1 | Step 8320 | Loss: 1.4957 | LR: 1.00e-05
|
| 955 |
+
[2026-04-25 20:49:30] Epoch 1 | Step 8330 | Loss: 1.4957 | LR: 1.00e-05
|
| 956 |
+
[2026-04-25 20:49:33] Epoch 1 | Step 8340 | Loss: 1.4955 | LR: 1.00e-05
|
| 957 |
+
[2026-04-25 20:49:35] Epoch 1 | Step 8350 | Loss: 1.4953 | LR: 1.00e-05
|
| 958 |
+
[2026-04-25 20:49:38] Epoch 1 | Step 8360 | Loss: 1.4951 | LR: 1.00e-05
|
| 959 |
+
[2026-04-25 20:49:40] Epoch 1 | Step 8370 | Loss: 1.4950 | LR: 1.00e-05
|
| 960 |
+
[2026-04-25 20:49:43] Epoch 1 | Step 8380 | Loss: 1.4948 | LR: 1.00e-05
|
| 961 |
+
[2026-04-25 20:49:45] Epoch 1 | Step 8390 | Loss: 1.4948 | LR: 1.00e-05
|
| 962 |
+
[2026-04-25 20:49:48] Epoch 1 | Step 8400 | Loss: 1.4946 | LR: 1.00e-05
|
| 963 |
+
[2026-04-25 20:49:50] Epoch 1 | Step 8410 | Loss: 1.4945 | LR: 1.00e-05
|
| 964 |
+
[2026-04-25 20:49:53] Epoch 1 | Step 8420 | Loss: 1.4945 | LR: 1.00e-05
|
| 965 |
+
[2026-04-25 20:49:55] Epoch 1 | Step 8430 | Loss: 1.4942 | LR: 1.00e-05
|
| 966 |
+
[2026-04-25 20:49:58] Epoch 1 | Step 8440 | Loss: 1.4941 | LR: 1.00e-05
|
| 967 |
+
[2026-04-25 20:50:00] Epoch 1 | Step 8450 | Loss: 1.4940 | LR: 1.00e-05
|
| 968 |
+
[2026-04-25 20:50:03] Epoch 1 | Step 8460 | Loss: 1.4939 | LR: 1.00e-05
|
| 969 |
+
[2026-04-25 20:50:05] Epoch 1 | Step 8470 | Loss: 1.4937 | LR: 1.00e-05
|
| 970 |
+
[2026-04-25 20:50:08] Epoch 1 | Step 8480 | Loss: 1.4935 | LR: 1.00e-05
|
| 971 |
+
[2026-04-25 20:50:10] Epoch 1 | Step 8490 | Loss: 1.4932 | LR: 1.00e-05
|
| 972 |
+
[2026-04-25 20:50:13] Epoch 1 | Step 8500 | Loss: 1.4931 | LR: 1.00e-05
|
| 973 |
+
[2026-04-25 20:50:15] Epoch 1 | Step 8510 | Loss: 1.4929 | LR: 1.00e-05
|
| 974 |
+
[2026-04-25 20:50:18] Epoch 1 | Step 8520 | Loss: 1.4929 | LR: 1.00e-05
|
| 975 |
+
[2026-04-25 20:50:20] Epoch 1 | Step 8530 | Loss: 1.4927 | LR: 1.00e-05
|
| 976 |
+
[2026-04-25 20:50:23] Epoch 1 | Step 8540 | Loss: 1.4928 | LR: 1.00e-05
|
| 977 |
+
[2026-04-25 20:50:26] Epoch 1 | Step 8550 | Loss: 1.4926 | LR: 1.00e-05
|
| 978 |
+
[2026-04-25 20:50:28] Epoch 1 | Step 8560 | Loss: 1.4925 | LR: 1.00e-05
|
| 979 |
+
[2026-04-25 20:50:30] Epoch 1 | Step 8570 | Loss: 1.4924 | LR: 1.00e-05
|
| 980 |
+
[2026-04-25 20:50:33] Epoch 1 | Step 8580 | Loss: 1.4921 | LR: 1.00e-05
|
| 981 |
+
[2026-04-25 20:50:36] Epoch 1 | Step 8590 | Loss: 1.4918 | LR: 1.00e-05
|
| 982 |
+
[2026-04-25 20:50:38] Epoch 1 | Step 8600 | Loss: 1.4916 | LR: 1.00e-05
|
| 983 |
+
[2026-04-25 20:50:41] Epoch 1 | Step 8610 | Loss: 1.4916 | LR: 1.00e-05
|
| 984 |
+
[2026-04-25 20:50:43] Epoch 1 | Step 8620 | Loss: 1.4914 | LR: 1.00e-05
|
| 985 |
+
[2026-04-25 20:50:46] Epoch 1 | Step 8630 | Loss: 1.4910 | LR: 1.00e-05
|
| 986 |
+
[2026-04-25 20:50:48] Epoch 1 | Step 8640 | Loss: 1.4911 | LR: 1.00e-05
|
| 987 |
+
[2026-04-25 20:50:51] Epoch 1 | Step 8650 | Loss: 1.4910 | LR: 1.00e-05
|
| 988 |
+
[2026-04-25 20:50:53] Epoch 1 | Step 8660 | Loss: 1.4907 | LR: 1.00e-05
|
| 989 |
+
[2026-04-25 20:50:56] Epoch 1 | Step 8670 | Loss: 1.4907 | LR: 1.00e-05
|
| 990 |
+
[2026-04-25 20:50:58] Epoch 1 | Step 8680 | Loss: 1.4907 | LR: 1.00e-05
|
| 991 |
+
[2026-04-25 20:51:01] Epoch 1 | Step 8690 | Loss: 1.4903 | LR: 1.00e-05
|
| 992 |
+
[2026-04-25 20:51:03] Epoch 1 | Step 8700 | Loss: 1.4902 | LR: 1.00e-05
|
| 993 |
+
[2026-04-25 20:51:06] Epoch 1 | Step 8710 | Loss: 1.4899 | LR: 1.00e-05
|
| 994 |
+
[2026-04-25 20:51:09] Epoch 1 | Step 8720 | Loss: 1.4897 | LR: 1.00e-05
|
| 995 |
+
[2026-04-25 20:51:11] Epoch 1 | Step 8730 | Loss: 1.4896 | LR: 1.00e-05
|
| 996 |
+
[2026-04-25 20:51:14] Epoch 1 | Step 8740 | Loss: 1.4897 | LR: 1.00e-05
|
| 997 |
+
[2026-04-25 20:51:16] Epoch 1 | Step 8750 | Loss: 1.4895 | LR: 1.00e-05
|
| 998 |
+
[2026-04-25 20:51:19] Epoch 1 | Step 8760 | Loss: 1.4894 | LR: 1.00e-05
|
| 999 |
+
[2026-04-25 20:51:21] Epoch 1 | Step 8770 | Loss: 1.4891 | LR: 1.00e-05
|
| 1000 |
+
[2026-04-25 20:51:24] Epoch 1 | Step 8780 | Loss: 1.4889 | LR: 1.00e-05
|
| 1001 |
+
[2026-04-25 20:51:27] Epoch 1 | Step 8790 | Loss: 1.4888 | LR: 1.00e-05
|
| 1002 |
+
[2026-04-25 20:51:29] Epoch 1 | Step 8800 | Loss: 1.4885 | LR: 1.00e-05
|
| 1003 |
+
[2026-04-25 20:51:32] Epoch 1 | Step 8810 | Loss: 1.4884 | LR: 1.00e-05
|
| 1004 |
+
[2026-04-25 20:51:35] Epoch 1 | Step 8820 | Loss: 1.4881 | LR: 1.00e-05
|
| 1005 |
+
[2026-04-25 20:51:37] Epoch 1 | Step 8830 | Loss: 1.4881 | LR: 1.00e-05
|
| 1006 |
+
[2026-04-25 20:51:40] Epoch 1 | Step 8840 | Loss: 1.4879 | LR: 1.00e-05
|
| 1007 |
+
[2026-04-25 20:51:42] Epoch 1 | Step 8850 | Loss: 1.4878 | LR: 1.00e-05
|
| 1008 |
+
[2026-04-25 20:51:45] Epoch 1 | Step 8860 | Loss: 1.4877 | LR: 1.00e-05
|
| 1009 |
+
[2026-04-25 20:51:47] Epoch 1 | Step 8870 | Loss: 1.4877 | LR: 1.00e-05
|
| 1010 |
+
[2026-04-25 20:51:50] Epoch 1 | Step 8880 | Loss: 1.4876 | LR: 1.00e-05
|
| 1011 |
+
[2026-04-25 20:51:52] Epoch 1 | Step 8890 | Loss: 1.4874 | LR: 1.00e-05
|
| 1012 |
+
[2026-04-25 20:51:55] Epoch 1 | Step 8900 | Loss: 1.4869 | LR: 1.00e-05
|
| 1013 |
+
[2026-04-25 20:51:57] Epoch 1 | Step 8910 | Loss: 1.4871 | LR: 1.00e-05
|
| 1014 |
+
[2026-04-25 20:52:00] Epoch 1 | Step 8920 | Loss: 1.4866 | LR: 1.00e-05
|
| 1015 |
+
[2026-04-25 20:52:02] Epoch 1 | Step 8930 | Loss: 1.4864 | LR: 1.00e-05
|
| 1016 |
+
[2026-04-25 20:52:05] Epoch 1 | Step 8940 | Loss: 1.4864 | LR: 1.00e-05
|
| 1017 |
+
[2026-04-25 20:52:07] Epoch 1 | Step 8950 | Loss: 1.4864 | LR: 1.00e-05
|
| 1018 |
+
[2026-04-25 20:52:10] Epoch 1 | Step 8960 | Loss: 1.4863 | LR: 1.00e-05
|
| 1019 |
+
[2026-04-25 20:52:13] Epoch 1 | Step 8970 | Loss: 1.4862 | LR: 1.00e-05
|
| 1020 |
+
[2026-04-25 20:52:15] Epoch 1 | Step 8980 | Loss: 1.4860 | LR: 1.00e-05
|
| 1021 |
+
[2026-04-25 20:52:18] Epoch 1 | Step 8990 | Loss: 1.4858 | LR: 1.00e-05
|
| 1022 |
+
[2026-04-25 20:52:20] Epoch 1 | Step 9000 | Loss: 1.4857 | LR: 1.00e-05
|
| 1023 |
+
[2026-04-25 20:52:23] Epoch 1 | Step 9010 | Loss: 1.4857 | LR: 1.00e-05
|
| 1024 |
+
[2026-04-25 20:52:26] Epoch 1 | Step 9020 | Loss: 1.4857 | LR: 1.00e-05
|
| 1025 |
+
[2026-04-25 20:52:28] Epoch 1 | Step 9030 | Loss: 1.4856 | LR: 1.00e-05
|
| 1026 |
+
[2026-04-25 20:52:30] Epoch 1 | Step 9040 | Loss: 1.4854 | LR: 1.00e-05
|
| 1027 |
+
[2026-04-25 20:52:33] Epoch 1 | Step 9050 | Loss: 1.4852 | LR: 1.00e-05
|
| 1028 |
+
[2026-04-25 20:52:35] Epoch 1 | Step 9060 | Loss: 1.4852 | LR: 1.00e-05
|
| 1029 |
+
[2026-04-25 20:52:38] Epoch 1 | Step 9070 | Loss: 1.4849 | LR: 1.00e-05
|
| 1030 |
+
[2026-04-25 20:52:40] Epoch 1 | Step 9080 | Loss: 1.4848 | LR: 1.00e-05
|
| 1031 |
+
[2026-04-25 20:52:43] Epoch 1 | Step 9090 | Loss: 1.4846 | LR: 1.00e-05
|
| 1032 |
+
[2026-04-25 20:52:45] Epoch 1 | Step 9100 | Loss: 1.4846 | LR: 1.00e-05
|
| 1033 |
+
[2026-04-25 20:52:48] Epoch 1 | Step 9110 | Loss: 1.4845 | LR: 1.00e-05
|
| 1034 |
+
[2026-04-25 20:52:50] Epoch 1 | Step 9120 | Loss: 1.4845 | LR: 1.00e-05
|
| 1035 |
+
[2026-04-25 20:52:53] Epoch 1 | Step 9130 | Loss: 1.4843 | LR: 1.00e-05
|
| 1036 |
+
[2026-04-25 20:52:55] Epoch 1 | Step 9140 | Loss: 1.4842 | LR: 1.00e-05
|
| 1037 |
+
[2026-04-25 20:52:57] Epoch 1 | Step 9150 | Loss: 1.4842 | LR: 1.00e-05
|
| 1038 |
+
[2026-04-25 20:53:00] Epoch 1 | Step 9160 | Loss: 1.4840 | LR: 1.00e-05
|
| 1039 |
+
[2026-04-25 20:53:03] Epoch 1 | Step 9170 | Loss: 1.4836 | LR: 1.00e-05
|
| 1040 |
+
[2026-04-25 20:53:05] Epoch 1 | Step 9180 | Loss: 1.4835 | LR: 1.00e-05
|
| 1041 |
+
[2026-04-25 20:53:08] Epoch 1 | Step 9190 | Loss: 1.4831 | LR: 1.00e-05
|
| 1042 |
+
[2026-04-25 20:53:10] Epoch 1 | Step 9200 | Loss: 1.4830 | LR: 1.00e-05
|
| 1043 |
+
[2026-04-25 20:53:13] Epoch 1 | Step 9210 | Loss: 1.4829 | LR: 1.00e-05
|
| 1044 |
+
[2026-04-25 20:53:16] Epoch 1 | Step 9220 | Loss: 1.4827 | LR: 1.00e-05
|
| 1045 |
+
[2026-04-25 20:53:18] Epoch 1 | Step 9230 | Loss: 1.4826 | LR: 1.00e-05
|
| 1046 |
+
[2026-04-25 20:53:21] Epoch 1 | Step 9240 | Loss: 1.4822 | LR: 1.00e-05
|
| 1047 |
+
[2026-04-25 20:53:23] Epoch 1 | Step 9250 | Loss: 1.4820 | LR: 1.00e-05
|
| 1048 |
+
[2026-04-25 20:53:26] Epoch 1 | Step 9260 | Loss: 1.4817 | LR: 1.00e-05
|
| 1049 |
+
[2026-04-25 20:53:29] Epoch 1 | Step 9270 | Loss: 1.4816 | LR: 1.00e-05
|
| 1050 |
+
[2026-04-25 20:53:31] Epoch 1 | Step 9280 | Loss: 1.4815 | LR: 1.00e-05
|
| 1051 |
+
[2026-04-25 20:53:34] Epoch 1 | Step 9290 | Loss: 1.4814 | LR: 1.00e-05
|
| 1052 |
+
[2026-04-25 20:53:36] Epoch 1 | Step 9300 | Loss: 1.4813 | LR: 1.00e-05
|
| 1053 |
+
[2026-04-25 20:53:39] Epoch 1 | Step 9310 | Loss: 1.4812 | LR: 1.00e-05
|
| 1054 |
+
[2026-04-25 20:53:41] Epoch 1 | Step 9320 | Loss: 1.4811 | LR: 1.00e-05
|
| 1055 |
+
[2026-04-25 20:53:44] Epoch 1 | Step 9330 | Loss: 1.4810 | LR: 1.00e-05
|
| 1056 |
+
[2026-04-25 20:53:46] Epoch 1 | Step 9340 | Loss: 1.4807 | LR: 1.00e-05
|
| 1057 |
+
[2026-04-25 20:53:49] Epoch 1 | Step 9350 | Loss: 1.4806 | LR: 1.00e-05
|
| 1058 |
+
[2026-04-25 20:53:51] Epoch 1 | Step 9360 | Loss: 1.4805 | LR: 1.00e-05
|
| 1059 |
+
[2026-04-25 20:53:54] Epoch 1 | Step 9370 | Loss: 1.4804 | LR: 1.00e-05
|
| 1060 |
+
[2026-04-25 20:53:56] Epoch 1 | Step 9380 | Loss: 1.4803 | LR: 1.00e-05
|
| 1061 |
+
[2026-04-25 20:53:59] Epoch 1 | Step 9390 | Loss: 1.4799 | LR: 1.00e-05
|
| 1062 |
+
[2026-04-25 20:54:01] Epoch 1 | Step 9400 | Loss: 1.4800 | LR: 1.00e-05
|
| 1063 |
+
[2026-04-25 20:54:04] Epoch 1 | Step 9410 | Loss: 1.4799 | LR: 1.00e-05
|
| 1064 |
+
[2026-04-25 20:54:07] Epoch 1 | Step 9420 | Loss: 1.4800 | LR: 1.00e-05
|
| 1065 |
+
[2026-04-25 20:54:09] Epoch 1 | Step 9430 | Loss: 1.4799 | LR: 1.00e-05
|
| 1066 |
+
[2026-04-25 20:54:12] Epoch 1 | Step 9440 | Loss: 1.4798 | LR: 1.00e-05
|
| 1067 |
+
[2026-04-25 20:54:14] Epoch 1 | Step 9450 | Loss: 1.4798 | LR: 1.00e-05
|
| 1068 |
+
[2026-04-25 20:54:17] Epoch 1 | Step 9460 | Loss: 1.4796 | LR: 1.00e-05
|
| 1069 |
+
[2026-04-25 20:54:19] Epoch 1 | Step 9470 | Loss: 1.4793 | LR: 1.00e-05
|
| 1070 |
+
[2026-04-25 20:54:22] Epoch 1 | Step 9480 | Loss: 1.4790 | LR: 1.00e-05
|
| 1071 |
+
[2026-04-25 20:54:24] Epoch 1 | Step 9490 | Loss: 1.4790 | LR: 1.00e-05
|
| 1072 |
+
[2026-04-25 20:54:27] Epoch 1 | Step 9500 | Loss: 1.4788 | LR: 1.00e-05
|
| 1073 |
+
[2026-04-25 20:54:30] Epoch 1 | Step 9510 | Loss: 1.4787 | LR: 1.00e-05
|
| 1074 |
+
[2026-04-25 20:54:32] Epoch 1 | Step 9520 | Loss: 1.4785 | LR: 1.00e-05
|
| 1075 |
+
[2026-04-25 20:54:35] Epoch 1 | Step 9530 | Loss: 1.4785 | LR: 1.00e-05
|
| 1076 |
+
[2026-04-25 20:54:37] Epoch 1 | Step 9540 | Loss: 1.4782 | LR: 1.00e-05
|
| 1077 |
+
[2026-04-25 20:54:40] Epoch 1 | Step 9550 | Loss: 1.4781 | LR: 1.00e-05
|
| 1078 |
+
[2026-04-25 20:54:42] Epoch 1 | Step 9560 | Loss: 1.4781 | LR: 1.00e-05
|
| 1079 |
+
[2026-04-25 20:54:45] Epoch 1 | Step 9570 | Loss: 1.4781 | LR: 1.00e-05
|
| 1080 |
+
[2026-04-25 20:54:47] Epoch 1 | Step 9580 | Loss: 1.4781 | LR: 1.00e-05
|
| 1081 |
+
[2026-04-25 20:54:50] Epoch 1 | Step 9590 | Loss: 1.4780 | LR: 1.00e-05
|
| 1082 |
+
[2026-04-25 20:54:53] Epoch 1 | Step 9600 | Loss: 1.4778 | LR: 1.00e-05
|
| 1083 |
+
[2026-04-25 20:54:56] Epoch 1 | Step 9610 | Loss: 1.4776 | LR: 1.00e-05
|
| 1084 |
+
[2026-04-25 20:54:58] Epoch 1 | Step 9620 | Loss: 1.4776 | LR: 1.00e-05
|
| 1085 |
+
[2026-04-25 20:55:01] Epoch 1 | Step 9630 | Loss: 1.4776 | LR: 1.00e-05
|
| 1086 |
+
[2026-04-25 20:55:04] Epoch 1 | Step 9640 | Loss: 1.4775 | LR: 1.00e-05
|
| 1087 |
+
[2026-04-25 20:55:07] Epoch 1 | Step 9650 | Loss: 1.4773 | LR: 1.00e-05
|
| 1088 |
+
[2026-04-25 20:55:09] Epoch 1 | Step 9660 | Loss: 1.4772 | LR: 1.00e-05
|
| 1089 |
+
[2026-04-25 20:55:12] Epoch 1 | Step 9670 | Loss: 1.4772 | LR: 1.00e-05
|
| 1090 |
+
[2026-04-25 20:55:14] Epoch 1 | Step 9680 | Loss: 1.4770 | LR: 1.00e-05
|
| 1091 |
+
[2026-04-25 20:55:17] Epoch 1 | Step 9690 | Loss: 1.4769 | LR: 1.00e-05
|
| 1092 |
+
[2026-04-25 20:55:19] Epoch 1 | Step 9700 | Loss: 1.4768 | LR: 1.00e-05
|
| 1093 |
+
[2026-04-25 20:55:22] Epoch 1 | Step 9710 | Loss: 1.4767 | LR: 1.00e-05
|
| 1094 |
+
[2026-04-25 20:55:24] Epoch 1 | Step 9720 | Loss: 1.4766 | LR: 1.00e-05
|
| 1095 |
+
[2026-04-25 20:55:27] Epoch 1 | Step 9730 | Loss: 1.4766 | LR: 1.00e-05
|
| 1096 |
+
[2026-04-25 20:55:29] Epoch 1 | Step 9740 | Loss: 1.4765 | LR: 1.00e-05
|
| 1097 |
+
[2026-04-25 20:55:32] Epoch 1 | Step 9750 | Loss: 1.4763 | LR: 1.00e-05
|
| 1098 |
+
[2026-04-25 20:55:34] Epoch 1 | Step 9760 | Loss: 1.4762 | LR: 1.00e-05
|
| 1099 |
+
[2026-04-25 20:55:37] Epoch 1 | Step 9770 | Loss: 1.4761 | LR: 1.00e-05
|
| 1100 |
+
[2026-04-25 20:55:39] Epoch 1 | Step 9780 | Loss: 1.4759 | LR: 1.00e-05
|
| 1101 |
+
[2026-04-25 20:55:42] Epoch 1 | Step 9790 | Loss: 1.4757 | LR: 1.00e-05
|
| 1102 |
+
[2026-04-25 20:55:45] Epoch 1 | Step 9800 | Loss: 1.4757 | LR: 1.00e-05
|
| 1103 |
+
[2026-04-25 20:55:47] Epoch 1 | Step 9810 | Loss: 1.4755 | LR: 1.00e-05
|
| 1104 |
+
[2026-04-25 20:55:50] Epoch 1 | Step 9820 | Loss: 1.4754 | LR: 1.00e-05
|
| 1105 |
+
[2026-04-25 20:55:53] Epoch 1 | Step 9830 | Loss: 1.4753 | LR: 1.00e-05
|
| 1106 |
+
[2026-04-25 20:55:55] Epoch 1 | Step 9840 | Loss: 1.4754 | LR: 1.00e-05
|
| 1107 |
+
[2026-04-25 20:55:58] Epoch 1 | Step 9850 | Loss: 1.4752 | LR: 1.00e-05
|
| 1108 |
+
[2026-04-25 20:56:00] Epoch 1 | Step 9860 | Loss: 1.4749 | LR: 1.00e-05
|
| 1109 |
+
[2026-04-25 20:56:03] Epoch 1 | Step 9870 | Loss: 1.4749 | LR: 1.00e-05
|
| 1110 |
+
[2026-04-25 20:56:05] Epoch 1 | Step 9880 | Loss: 1.4748 | LR: 1.00e-05
|
| 1111 |
+
[2026-04-25 20:56:08] Epoch 1 completed in 2548.63s | Loss: 1.4748
|
| 1112 |
+
[2026-04-25 20:56:08]
|
| 1113 |
+
Training completed!
|
| 1114 |
+
[2026-04-25 20:56:10] Final model: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-4/model_final.pt
|
lr_sweep/pythia_1b_lr_1e-5/.hydra/config.yaml
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
model:
|
| 2 |
+
name: EleutherAI/pythia-1b
|
| 3 |
+
checkpoint_path: null
|
| 4 |
+
from_scratch: false
|
| 5 |
+
training:
|
| 6 |
+
epochs: 1
|
| 7 |
+
batch_size: 4
|
| 8 |
+
eval_batch_size: 12
|
| 9 |
+
gradient_accumulation_steps: 4
|
| 10 |
+
lr: 1.0e-05
|
| 11 |
+
weight_decay: 0.1
|
| 12 |
+
betas:
|
| 13 |
+
- 0.9
|
| 14 |
+
- 0.95
|
| 15 |
+
eps: 1.0e-08
|
| 16 |
+
lr_scheduler: wsd
|
| 17 |
+
warmup_ratio: 0.1
|
| 18 |
+
decay_ratio: 0.2
|
| 19 |
+
warmup_steps: 100
|
| 20 |
+
min_lr_ratio: 0.1
|
| 21 |
+
max_grad_norm: 1.0
|
| 22 |
+
use_amp: true
|
| 23 |
+
resume: false
|
| 24 |
+
resume_checkpoint: null
|
| 25 |
+
data:
|
| 26 |
+
path: /workspace/byte-llms-code/code_completion_exp/datasets/data_V4_full
|
| 27 |
+
max_context_len: 4096
|
| 28 |
+
max_target_len: 256
|
| 29 |
+
num_workers: 4
|
| 30 |
+
pin_memory: true
|
| 31 |
+
max_train_samples: null
|
| 32 |
+
max_val_samples: 2000
|
| 33 |
+
logging:
|
| 34 |
+
log_interval: 10
|
| 35 |
+
save_interval: 0
|
| 36 |
+
eval_interval: 2000
|
| 37 |
+
save_every_epoch: false
|
| 38 |
+
tracking:
|
| 39 |
+
enabled: true
|
| 40 |
+
backend: wandb
|
| 41 |
+
project: code-completion_lr-sweep
|
| 42 |
+
run_name: pythia_1b_lr_1e-5
|
| 43 |
+
entity: null
|
| 44 |
+
base_url: https://wandb.platun0v.ru
|
| 45 |
+
local_dir: ${paths.output_dir}
|
| 46 |
+
paths:
|
| 47 |
+
output_dir: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5
|
| 48 |
+
seed: 42
|
| 49 |
+
device: cuda
|
lr_sweep/pythia_1b_lr_1e-5/.hydra/hydra.yaml
ADDED
|
@@ -0,0 +1,167 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
hydra:
|
| 2 |
+
run:
|
| 3 |
+
dir: ${paths.output_dir}
|
| 4 |
+
sweep:
|
| 5 |
+
dir: outputs/multirun/${now:%Y-%m-%d}/${now:%H-%M-%S}
|
| 6 |
+
subdir: ${hydra.job.num}
|
| 7 |
+
launcher:
|
| 8 |
+
_target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher
|
| 9 |
+
sweeper:
|
| 10 |
+
_target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper
|
| 11 |
+
max_batch_size: null
|
| 12 |
+
params: null
|
| 13 |
+
help:
|
| 14 |
+
app_name: ${hydra.job.name}
|
| 15 |
+
header: '${hydra.help.app_name} is powered by Hydra.
|
| 16 |
+
|
| 17 |
+
'
|
| 18 |
+
footer: 'Powered by Hydra (https://hydra.cc)
|
| 19 |
+
|
| 20 |
+
Use --hydra-help to view Hydra specific help
|
| 21 |
+
|
| 22 |
+
'
|
| 23 |
+
template: '${hydra.help.header}
|
| 24 |
+
|
| 25 |
+
== Configuration groups ==
|
| 26 |
+
|
| 27 |
+
Compose your configuration from those groups (group=option)
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
$APP_CONFIG_GROUPS
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
== Config ==
|
| 34 |
+
|
| 35 |
+
Override anything in the config (foo.bar=value)
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
$CONFIG
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
${hydra.help.footer}
|
| 42 |
+
|
| 43 |
+
'
|
| 44 |
+
hydra_help:
|
| 45 |
+
template: 'Hydra (${hydra.runtime.version})
|
| 46 |
+
|
| 47 |
+
See https://hydra.cc for more info.
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
== Flags ==
|
| 51 |
+
|
| 52 |
+
$FLAGS_HELP
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
== Configuration groups ==
|
| 56 |
+
|
| 57 |
+
Compose your configuration from those groups (For example, append hydra/job_logging=disabled
|
| 58 |
+
to command line)
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
$HYDRA_CONFIG_GROUPS
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
Use ''--cfg hydra'' to Show the Hydra config.
|
| 65 |
+
|
| 66 |
+
'
|
| 67 |
+
hydra_help: ???
|
| 68 |
+
hydra_logging:
|
| 69 |
+
version: 1
|
| 70 |
+
formatters:
|
| 71 |
+
simple:
|
| 72 |
+
format: '[%(asctime)s][HYDRA] %(message)s'
|
| 73 |
+
handlers:
|
| 74 |
+
console:
|
| 75 |
+
class: logging.StreamHandler
|
| 76 |
+
formatter: simple
|
| 77 |
+
stream: ext://sys.stdout
|
| 78 |
+
root:
|
| 79 |
+
level: INFO
|
| 80 |
+
handlers:
|
| 81 |
+
- console
|
| 82 |
+
loggers:
|
| 83 |
+
logging_example:
|
| 84 |
+
level: DEBUG
|
| 85 |
+
disable_existing_loggers: false
|
| 86 |
+
job_logging:
|
| 87 |
+
version: 1
|
| 88 |
+
formatters:
|
| 89 |
+
simple:
|
| 90 |
+
format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
|
| 91 |
+
handlers:
|
| 92 |
+
console:
|
| 93 |
+
class: logging.StreamHandler
|
| 94 |
+
formatter: simple
|
| 95 |
+
stream: ext://sys.stdout
|
| 96 |
+
file:
|
| 97 |
+
class: logging.FileHandler
|
| 98 |
+
formatter: simple
|
| 99 |
+
filename: ${hydra.runtime.output_dir}/${hydra.job.name}.log
|
| 100 |
+
root:
|
| 101 |
+
level: INFO
|
| 102 |
+
handlers:
|
| 103 |
+
- console
|
| 104 |
+
- file
|
| 105 |
+
disable_existing_loggers: false
|
| 106 |
+
env: {}
|
| 107 |
+
mode: RUN
|
| 108 |
+
searchpath: []
|
| 109 |
+
callbacks: {}
|
| 110 |
+
output_subdir: .hydra
|
| 111 |
+
overrides:
|
| 112 |
+
hydra:
|
| 113 |
+
- hydra.mode=RUN
|
| 114 |
+
task:
|
| 115 |
+
- tracking=wandb
|
| 116 |
+
- tracking.project=code-completion_lr-sweep
|
| 117 |
+
- tracking.run_name=pythia_1b_lr_1e-5
|
| 118 |
+
- training.lr=1e-5
|
| 119 |
+
- paths.output_dir=/workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5
|
| 120 |
+
- model=pythia_1b
|
| 121 |
+
- data.path=/workspace/byte-llms-code/code_completion_exp/datasets/data_V4_full
|
| 122 |
+
job:
|
| 123 |
+
name: train
|
| 124 |
+
chdir: false
|
| 125 |
+
override_dirname: data.path=/workspace/byte-llms-code/code_completion_exp/datasets/data_V4_full,model=pythia_1b,paths.output_dir=/workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5,tracking.project=code-completion_lr-sweep,tracking.run_name=pythia_1b_lr_1e-5,tracking=wandb,training.lr=1e-5
|
| 126 |
+
id: ???
|
| 127 |
+
num: ???
|
| 128 |
+
config_name: config
|
| 129 |
+
env_set: {}
|
| 130 |
+
env_copy: []
|
| 131 |
+
config:
|
| 132 |
+
override_dirname:
|
| 133 |
+
kv_sep: '='
|
| 134 |
+
item_sep: ','
|
| 135 |
+
exclude_keys: []
|
| 136 |
+
runtime:
|
| 137 |
+
version: 1.3.2
|
| 138 |
+
version_base: '1.3'
|
| 139 |
+
cwd: /workspace/byte-llms-code/code_completion_exp/train_pythia
|
| 140 |
+
config_sources:
|
| 141 |
+
- path: hydra.conf
|
| 142 |
+
schema: pkg
|
| 143 |
+
provider: hydra
|
| 144 |
+
- path: /workspace/byte-llms-code/code_completion_exp/train_pythia/configs
|
| 145 |
+
schema: file
|
| 146 |
+
provider: main
|
| 147 |
+
- path: ''
|
| 148 |
+
schema: structured
|
| 149 |
+
provider: schema
|
| 150 |
+
output_dir: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5
|
| 151 |
+
choices:
|
| 152 |
+
paths: default
|
| 153 |
+
tracking: wandb
|
| 154 |
+
logging: default
|
| 155 |
+
data: default
|
| 156 |
+
training: default
|
| 157 |
+
model: pythia_1b
|
| 158 |
+
hydra/env: default
|
| 159 |
+
hydra/callbacks: null
|
| 160 |
+
hydra/job_logging: default
|
| 161 |
+
hydra/hydra_logging: default
|
| 162 |
+
hydra/hydra_help: default
|
| 163 |
+
hydra/help: default
|
| 164 |
+
hydra/sweeper: basic
|
| 165 |
+
hydra/launcher: basic
|
| 166 |
+
hydra/output: default
|
| 167 |
+
verbose: false
|
lr_sweep/pythia_1b_lr_1e-5/.hydra/overrides.yaml
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
- tracking=wandb
|
| 2 |
+
- tracking.project=code-completion_lr-sweep
|
| 3 |
+
- tracking.run_name=pythia_1b_lr_1e-5
|
| 4 |
+
- training.lr=1e-5
|
| 5 |
+
- paths.output_dir=/workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5
|
| 6 |
+
- model=pythia_1b
|
| 7 |
+
- data.path=/workspace/byte-llms-code/code_completion_exp/datasets/data_V4_full
|
lr_sweep/pythia_1b_lr_1e-5/train.log
ADDED
|
@@ -0,0 +1,1259 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[2026-04-25 17:51:23,039][accelerate.utils.other][WARNING] - Detected kernel version 5.4.0, which is below the recommended minimum of 5.5.0; this can cause the process to hang. It is recommended to upgrade the kernel to the minimum version or higher.
|
| 2 |
+
[2026-04-25 17:51:23] CUDA_VISIBLE_DEVICES: 2,3
|
| 3 |
+
[2026-04-25 17:51:23] Number of processes: 2
|
| 4 |
+
[2026-04-25 17:51:23] Process index: 0
|
| 5 |
+
[2026-04-25 17:51:23] Mixed precision: bf16
|
| 6 |
+
[2026-04-25 17:51:23] ============================================================
|
| 7 |
+
[2026-04-25 17:51:23] Pythia Training Pipeline (Hydra + Trackio + Accelerate)
|
| 8 |
+
[2026-04-25 17:51:23] ============================================================
|
| 9 |
+
[2026-04-25 17:51:23] Config:
|
| 10 |
+
model:
|
| 11 |
+
name: EleutherAI/pythia-1b
|
| 12 |
+
checkpoint_path: null
|
| 13 |
+
from_scratch: false
|
| 14 |
+
training:
|
| 15 |
+
epochs: 1
|
| 16 |
+
batch_size: 4
|
| 17 |
+
eval_batch_size: 12
|
| 18 |
+
gradient_accumulation_steps: 4
|
| 19 |
+
lr: 1.0e-05
|
| 20 |
+
weight_decay: 0.1
|
| 21 |
+
betas:
|
| 22 |
+
- 0.9
|
| 23 |
+
- 0.95
|
| 24 |
+
eps: 1.0e-08
|
| 25 |
+
lr_scheduler: wsd
|
| 26 |
+
warmup_ratio: 0.1
|
| 27 |
+
decay_ratio: 0.2
|
| 28 |
+
warmup_steps: 100
|
| 29 |
+
min_lr_ratio: 0.1
|
| 30 |
+
max_grad_norm: 1.0
|
| 31 |
+
use_amp: true
|
| 32 |
+
resume: false
|
| 33 |
+
resume_checkpoint: null
|
| 34 |
+
data:
|
| 35 |
+
path: /workspace/byte-llms-code/code_completion_exp/datasets/data_V4_full
|
| 36 |
+
max_context_len: 4096
|
| 37 |
+
max_target_len: 256
|
| 38 |
+
num_workers: 4
|
| 39 |
+
pin_memory: true
|
| 40 |
+
max_train_samples: 20000
|
| 41 |
+
max_val_samples: 2000
|
| 42 |
+
logging:
|
| 43 |
+
log_interval: 10
|
| 44 |
+
save_interval: 3000
|
| 45 |
+
eval_interval: 1000
|
| 46 |
+
save_every_epoch: true
|
| 47 |
+
tracking:
|
| 48 |
+
enabled: true
|
| 49 |
+
backend: wandb
|
| 50 |
+
project: code-completion_lr-sweep
|
| 51 |
+
run_name: pythia_1b_lr_1e-5
|
| 52 |
+
entity: null
|
| 53 |
+
base_url: https://wandb.platun0v.ru
|
| 54 |
+
local_dir: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5
|
| 55 |
+
paths:
|
| 56 |
+
output_dir: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5
|
| 57 |
+
seed: 42
|
| 58 |
+
device: cuda
|
| 59 |
+
|
| 60 |
+
[2026-04-25 17:51:25] Initializing tokenizer...
|
| 61 |
+
[2026-04-25 17:51:26] Loading model...
|
| 62 |
+
[2026-04-25 17:51:30] Loaded pretrained: EleutherAI/pythia-1b
|
| 63 |
+
[2026-04-25 17:51:30] Total params: 1,011,781,632
|
| 64 |
+
[2026-04-25 17:51:30] Trainable params: 1,011,781,632
|
| 65 |
+
[2026-04-25 17:51:30] Creating dataloaders...
|
| 66 |
+
[2026-04-25 17:51:30] Train dataset size: 20000
|
| 67 |
+
[2026-04-25 17:51:30] Train batches per epoch (before DDP split): 5000
|
| 68 |
+
[2026-04-25 17:51:30] Validation dataset size: 2000
|
| 69 |
+
[2026-04-25 17:51:30] Validation batches: 167
|
| 70 |
+
[2026-04-25 17:51:30] Creating optimizer...
|
| 71 |
+
[2026-04-25 17:51:30] Total steps: 625, Steps per epoch: 2500
|
| 72 |
+
[2026-04-25 17:51:30] Preparing model, optimizer, and dataloaders with Accelerate...
|
| 73 |
+
[2026-04-25 17:51:32] Train batches per epoch (after DDP split): 2500
|
| 74 |
+
[2026-04-25 17:51:32] Starting training...
|
| 75 |
+
[2026-04-25 17:51:32]
|
| 76 |
+
============================================================
|
| 77 |
+
[2026-04-25 17:51:32] EPOCH 1/1
|
| 78 |
+
[2026-04-25 17:51:32] ============================================================
|
| 79 |
+
[2026-04-25 17:51:35] Epoch 1 | Step 10 | Loss: 2.4974 | LR: 3.90e-06
|
| 80 |
+
[2026-04-25 17:51:38] Epoch 1 | Step 20 | Loss: 2.2461 | LR: 6.81e-06
|
| 81 |
+
[2026-04-25 17:51:41] Epoch 1 | Step 30 | Loss: 1.9776 | LR: 9.71e-06
|
| 82 |
+
[2026-04-25 17:51:43] Epoch 1 | Step 40 | Loss: 1.8005 | LR: 1.00e-05
|
| 83 |
+
[2026-04-25 17:51:46] Epoch 1 | Step 50 | Loss: 1.6747 | LR: 1.00e-05
|
| 84 |
+
[2026-04-25 17:51:48] Epoch 1 | Step 60 | Loss: 1.5843 | LR: 1.00e-05
|
| 85 |
+
[2026-04-25 17:51:51] Epoch 1 | Step 70 | Loss: 1.5115 | LR: 1.00e-05
|
| 86 |
+
[2026-04-25 17:51:53] Epoch 1 | Step 80 | Loss: 1.4551 | LR: 1.00e-05
|
| 87 |
+
[2026-04-25 17:51:56] Epoch 1 | Step 90 | Loss: 1.3999 | LR: 1.00e-05
|
| 88 |
+
[2026-04-25 17:51:58] Epoch 1 | Step 100 | Loss: 1.3846 | LR: 1.00e-05
|
| 89 |
+
[2026-04-25 17:52:01] Epoch 1 | Step 110 | Loss: 1.3649 | LR: 1.00e-05
|
| 90 |
+
[2026-04-25 17:52:03] Epoch 1 | Step 120 | Loss: 1.3474 | LR: 1.00e-05
|
| 91 |
+
[2026-04-25 17:52:06] Epoch 1 | Step 130 | Loss: 1.3130 | LR: 1.00e-05
|
| 92 |
+
[2026-04-25 17:52:08] Epoch 1 | Step 140 | Loss: 1.2949 | LR: 1.00e-05
|
| 93 |
+
[2026-04-25 17:52:11] Epoch 1 | Step 150 | Loss: 1.2813 | LR: 1.00e-05
|
| 94 |
+
[2026-04-25 17:52:13] Epoch 1 | Step 160 | Loss: 1.2615 | LR: 1.00e-05
|
| 95 |
+
[2026-04-25 17:52:16] Epoch 1 | Step 170 | Loss: 1.2478 | LR: 1.00e-05
|
| 96 |
+
[2026-04-25 17:52:18] Epoch 1 | Step 180 | Loss: 1.2421 | LR: 1.00e-05
|
| 97 |
+
[2026-04-25 17:52:21] Epoch 1 | Step 190 | Loss: 1.2342 | LR: 1.00e-05
|
| 98 |
+
[2026-04-25 17:52:24] Epoch 1 | Step 200 | Loss: 1.2227 | LR: 1.00e-05
|
| 99 |
+
[2026-04-25 17:52:26] Epoch 1 | Step 210 | Loss: 1.2143 | LR: 1.00e-05
|
| 100 |
+
[2026-04-25 17:52:29] Epoch 1 | Step 220 | Loss: 1.2103 | LR: 1.00e-05
|
| 101 |
+
[2026-04-25 17:52:31] Epoch 1 | Step 230 | Loss: 1.2054 | LR: 1.00e-05
|
| 102 |
+
[2026-04-25 17:52:34] Epoch 1 | Step 240 | Loss: 1.1962 | LR: 1.00e-05
|
| 103 |
+
[2026-04-25 17:52:36] Epoch 1 | Step 250 | Loss: 1.1868 | LR: 1.00e-05
|
| 104 |
+
[2026-04-25 17:52:39] Epoch 1 | Step 260 | Loss: 1.1883 | LR: 9.44e-06
|
| 105 |
+
[2026-04-25 17:52:42] Epoch 1 | Step 270 | Loss: 1.1847 | LR: 7.91e-06
|
| 106 |
+
[2026-04-25 17:52:44] Epoch 1 | Step 280 | Loss: 1.1804 | LR: 5.78e-06
|
| 107 |
+
[2026-04-25 17:52:47] Epoch 1 | Step 290 | Loss: 1.1755 | LR: 3.58e-06
|
| 108 |
+
[2026-04-25 17:52:49] Epoch 1 | Step 300 | Loss: 1.1688 | LR: 1.86e-06
|
| 109 |
+
[2026-04-25 17:52:52] Epoch 1 | Step 310 | Loss: 1.1661 | LR: 1.04e-06
|
| 110 |
+
[2026-04-25 17:52:55] Epoch 1 | Step 320 | Loss: 1.1683 | LR: 1.00e-06
|
| 111 |
+
[2026-04-25 17:52:57] Epoch 1 | Step 330 | Loss: 1.1641 | LR: 1.00e-06
|
| 112 |
+
[2026-04-25 17:52:59] Epoch 1 | Step 340 | Loss: 1.1625 | LR: 1.00e-06
|
| 113 |
+
[2026-04-25 17:53:02] Epoch 1 | Step 350 | Loss: 1.1564 | LR: 1.00e-06
|
| 114 |
+
[2026-04-25 17:53:04] Epoch 1 | Step 360 | Loss: 1.1521 | LR: 1.00e-06
|
| 115 |
+
[2026-04-25 17:53:07] Epoch 1 | Step 370 | Loss: 1.1474 | LR: 1.00e-06
|
| 116 |
+
[2026-04-25 17:53:10] Epoch 1 | Step 380 | Loss: 1.1489 | LR: 1.00e-06
|
| 117 |
+
[2026-04-25 17:53:12] Epoch 1 | Step 390 | Loss: 1.1455 | LR: 1.00e-06
|
| 118 |
+
[2026-04-25 17:53:14] Epoch 1 | Step 400 | Loss: 1.1420 | LR: 1.00e-06
|
| 119 |
+
[2026-04-25 17:53:17] Epoch 1 | Step 410 | Loss: 1.1431 | LR: 1.00e-06
|
| 120 |
+
[2026-04-25 17:53:20] Epoch 1 | Step 420 | Loss: 1.1391 | LR: 1.00e-06
|
| 121 |
+
[2026-04-25 17:53:22] Epoch 1 | Step 430 | Loss: 1.1357 | LR: 1.00e-06
|
| 122 |
+
[2026-04-25 17:53:25] Epoch 1 | Step 440 | Loss: 1.1338 | LR: 1.00e-06
|
| 123 |
+
[2026-04-25 17:53:27] Epoch 1 | Step 450 | Loss: 1.1328 | LR: 1.00e-06
|
| 124 |
+
[2026-04-25 17:53:30] Epoch 1 | Step 460 | Loss: 1.1333 | LR: 1.00e-06
|
| 125 |
+
[2026-04-25 17:53:32] Epoch 1 | Step 470 | Loss: 1.1346 | LR: 1.00e-06
|
| 126 |
+
[2026-04-25 17:53:35] Epoch 1 | Step 480 | Loss: 1.1344 | LR: 1.00e-06
|
| 127 |
+
[2026-04-25 17:53:37] Epoch 1 | Step 490 | Loss: 1.1360 | LR: 1.00e-06
|
| 128 |
+
[2026-04-25 17:53:40] Epoch 1 | Step 500 | Loss: 1.1334 | LR: 1.00e-06
|
| 129 |
+
[2026-04-25 17:53:42] Epoch 1 | Step 510 | Loss: 1.1307 | LR: 1.00e-06
|
| 130 |
+
[2026-04-25 17:53:45] Epoch 1 | Step 520 | Loss: 1.1298 | LR: 1.00e-06
|
| 131 |
+
[2026-04-25 17:53:48] Epoch 1 | Step 530 | Loss: 1.1278 | LR: 1.00e-06
|
| 132 |
+
[2026-04-25 17:53:50] Epoch 1 | Step 540 | Loss: 1.1289 | LR: 1.00e-06
|
| 133 |
+
[2026-04-25 17:53:53] Epoch 1 | Step 550 | Loss: 1.1277 | LR: 1.00e-06
|
| 134 |
+
[2026-04-25 17:53:55] Epoch 1 | Step 560 | Loss: 1.1266 | LR: 1.00e-06
|
| 135 |
+
[2026-04-25 17:53:58] Epoch 1 | Step 570 | Loss: 1.1254 | LR: 1.00e-06
|
| 136 |
+
[2026-04-25 17:54:01] Epoch 1 | Step 580 | Loss: 1.1248 | LR: 1.00e-06
|
| 137 |
+
[2026-04-25 17:54:03] Epoch 1 | Step 590 | Loss: 1.1261 | LR: 1.00e-06
|
| 138 |
+
[2026-04-25 17:54:05] Epoch 1 | Step 600 | Loss: 1.1263 | LR: 1.00e-06
|
| 139 |
+
[2026-04-25 17:54:08] Epoch 1 | Step 610 | Loss: 1.1270 | LR: 1.00e-06
|
| 140 |
+
[2026-04-25 17:54:11] Epoch 1 | Step 620 | Loss: 1.1250 | LR: 1.00e-06
|
| 141 |
+
[2026-04-25 17:54:12] Epoch 1 completed in 159.64s | Loss: 1.1239
|
| 142 |
+
[2026-04-25 17:54:18] Checkpoint saved: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5/checkpoints/checkpoint_step_625.pt
|
| 143 |
+
[2026-04-25 17:54:25]
|
| 144 |
+
Training completed!
|
| 145 |
+
[2026-04-25 17:54:27] Final model: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5/model_final.pt
|
| 146 |
+
[2026-04-25 18:06:08,363][accelerate.utils.other][WARNING] - Detected kernel version 5.4.0, which is below the recommended minimum of 5.5.0; this can cause the process to hang. It is recommended to upgrade the kernel to the minimum version or higher.
|
| 147 |
+
[2026-04-25 18:06:08] CUDA_VISIBLE_DEVICES: 2,3
|
| 148 |
+
[2026-04-25 18:06:08] Number of processes: 2
|
| 149 |
+
[2026-04-25 18:06:08] Process index: 0
|
| 150 |
+
[2026-04-25 18:06:08] Mixed precision: bf16
|
| 151 |
+
[2026-04-25 18:06:08] ============================================================
|
| 152 |
+
[2026-04-25 18:06:08] Pythia Training Pipeline (Hydra + Trackio + Accelerate)
|
| 153 |
+
[2026-04-25 18:06:08] ============================================================
|
| 154 |
+
[2026-04-25 18:06:08] Config:
|
| 155 |
+
model:
|
| 156 |
+
name: EleutherAI/pythia-1b
|
| 157 |
+
checkpoint_path: null
|
| 158 |
+
from_scratch: false
|
| 159 |
+
training:
|
| 160 |
+
epochs: 1
|
| 161 |
+
batch_size: 4
|
| 162 |
+
eval_batch_size: 12
|
| 163 |
+
gradient_accumulation_steps: 4
|
| 164 |
+
lr: 1.0e-05
|
| 165 |
+
weight_decay: 0.1
|
| 166 |
+
betas:
|
| 167 |
+
- 0.9
|
| 168 |
+
- 0.95
|
| 169 |
+
eps: 1.0e-08
|
| 170 |
+
lr_scheduler: wsd
|
| 171 |
+
warmup_ratio: 0.1
|
| 172 |
+
decay_ratio: 0.2
|
| 173 |
+
warmup_steps: 100
|
| 174 |
+
min_lr_ratio: 0.1
|
| 175 |
+
max_grad_norm: 1.0
|
| 176 |
+
use_amp: true
|
| 177 |
+
resume: false
|
| 178 |
+
resume_checkpoint: null
|
| 179 |
+
data:
|
| 180 |
+
path: /workspace/byte-llms-code/code_completion_exp/datasets/data_V4_full
|
| 181 |
+
max_context_len: 4096
|
| 182 |
+
max_target_len: 256
|
| 183 |
+
num_workers: 4
|
| 184 |
+
pin_memory: true
|
| 185 |
+
max_train_samples: null
|
| 186 |
+
max_val_samples: 2000
|
| 187 |
+
logging:
|
| 188 |
+
log_interval: 10
|
| 189 |
+
save_interval: 0
|
| 190 |
+
eval_interval: 2000
|
| 191 |
+
save_every_epoch: false
|
| 192 |
+
tracking:
|
| 193 |
+
enabled: true
|
| 194 |
+
backend: wandb
|
| 195 |
+
project: code-completion_lr-sweep
|
| 196 |
+
run_name: pythia_1b_lr_1e-5
|
| 197 |
+
entity: null
|
| 198 |
+
base_url: https://wandb.platun0v.ru
|
| 199 |
+
local_dir: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5
|
| 200 |
+
paths:
|
| 201 |
+
output_dir: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5
|
| 202 |
+
seed: 42
|
| 203 |
+
device: cuda
|
| 204 |
+
|
| 205 |
+
[2026-04-25 18:06:10] Initializing tokenizer...
|
| 206 |
+
[2026-04-25 18:06:11] Loading model...
|
| 207 |
+
[2026-04-25 18:06:14] Loaded pretrained: EleutherAI/pythia-1b
|
| 208 |
+
[2026-04-25 18:06:14] Total params: 1,011,781,632
|
| 209 |
+
[2026-04-25 18:06:14] Trainable params: 1,011,781,632
|
| 210 |
+
[2026-04-25 18:06:14] Creating dataloaders...
|
| 211 |
+
[2026-04-25 18:06:14] Train dataset size: 316397
|
| 212 |
+
[2026-04-25 18:06:14] Train batches per epoch (before DDP split): 79100
|
| 213 |
+
[2026-04-25 18:06:14] Validation dataset size: 2000
|
| 214 |
+
[2026-04-25 18:06:14] Validation batches: 167
|
| 215 |
+
[2026-04-25 18:06:14] Creating optimizer...
|
| 216 |
+
[2026-04-25 18:06:14] Total steps: 9887, Steps per epoch: 39550
|
| 217 |
+
[2026-04-25 18:06:14] Preparing model, optimizer, and dataloaders with Accelerate...
|
| 218 |
+
[2026-04-25 18:06:15] Train batches per epoch (after DDP split): 39550
|
| 219 |
+
[2026-04-25 18:06:15] Starting training...
|
| 220 |
+
[2026-04-25 18:06:15]
|
| 221 |
+
============================================================
|
| 222 |
+
[2026-04-25 18:06:15] EPOCH 1/1
|
| 223 |
+
[2026-04-25 18:06:15] ============================================================
|
| 224 |
+
[2026-04-25 18:06:18] Epoch 1 | Step 10 | Loss: 2.6460 | LR: 1.18e-06
|
| 225 |
+
[2026-04-25 18:06:21] Epoch 1 | Step 20 | Loss: 2.6636 | LR: 1.36e-06
|
| 226 |
+
[2026-04-25 18:06:23] Epoch 1 | Step 30 | Loss: 2.5925 | LR: 1.55e-06
|
| 227 |
+
[2026-04-25 18:06:26] Epoch 1 | Step 40 | Loss: 2.5481 | LR: 1.73e-06
|
| 228 |
+
[2026-04-25 18:06:29] Epoch 1 | Step 50 | Loss: 2.4824 | LR: 1.91e-06
|
| 229 |
+
[2026-04-25 18:06:31] Epoch 1 | Step 60 | Loss: 2.4141 | LR: 2.09e-06
|
| 230 |
+
[2026-04-25 18:06:34] Epoch 1 | Step 70 | Loss: 2.3474 | LR: 2.28e-06
|
| 231 |
+
[2026-04-25 18:06:36] Epoch 1 | Step 80 | Loss: 2.3035 | LR: 2.46e-06
|
| 232 |
+
[2026-04-25 18:06:39] Epoch 1 | Step 90 | Loss: 2.2299 | LR: 2.64e-06
|
| 233 |
+
[2026-04-25 18:06:41] Epoch 1 | Step 100 | Loss: 2.1684 | LR: 2.82e-06
|
| 234 |
+
[2026-04-25 18:06:44] Epoch 1 | Step 110 | Loss: 2.1276 | LR: 3.00e-06
|
| 235 |
+
[2026-04-25 18:06:46] Epoch 1 | Step 120 | Loss: 2.0775 | LR: 3.19e-06
|
| 236 |
+
[2026-04-25 18:06:49] Epoch 1 | Step 130 | Loss: 2.0371 | LR: 3.37e-06
|
| 237 |
+
[2026-04-25 18:06:51] Epoch 1 | Step 140 | Loss: 1.9966 | LR: 3.55e-06
|
| 238 |
+
[2026-04-25 18:06:54] Epoch 1 | Step 150 | Loss: 1.9444 | LR: 3.73e-06
|
| 239 |
+
[2026-04-25 18:06:56] Epoch 1 | Step 160 | Loss: 1.9014 | LR: 3.91e-06
|
| 240 |
+
[2026-04-25 18:06:59] Epoch 1 | Step 170 | Loss: 1.8649 | LR: 4.10e-06
|
| 241 |
+
[2026-04-25 18:07:01] Epoch 1 | Step 180 | Loss: 1.8227 | LR: 4.28e-06
|
| 242 |
+
[2026-04-25 18:07:03] Epoch 1 | Step 190 | Loss: 1.7938 | LR: 4.46e-06
|
| 243 |
+
[2026-04-25 18:07:06] Epoch 1 | Step 200 | Loss: 1.7650 | LR: 4.64e-06
|
| 244 |
+
[2026-04-25 18:07:08] Epoch 1 | Step 210 | Loss: 1.7455 | LR: 4.83e-06
|
| 245 |
+
[2026-04-25 18:07:11] Epoch 1 | Step 220 | Loss: 1.7219 | LR: 5.01e-06
|
| 246 |
+
[2026-04-25 18:07:13] Epoch 1 | Step 230 | Loss: 1.6927 | LR: 5.19e-06
|
| 247 |
+
[2026-04-25 18:07:16] Epoch 1 | Step 240 | Loss: 1.6687 | LR: 5.37e-06
|
| 248 |
+
[2026-04-25 18:07:18] Epoch 1 | Step 250 | Loss: 1.6482 | LR: 5.55e-06
|
| 249 |
+
[2026-04-25 18:07:21] Epoch 1 | Step 260 | Loss: 1.6359 | LR: 5.74e-06
|
| 250 |
+
[2026-04-25 18:07:23] Epoch 1 | Step 270 | Loss: 1.6153 | LR: 5.92e-06
|
| 251 |
+
[2026-04-25 18:07:26] Epoch 1 | Step 280 | Loss: 1.5946 | LR: 6.10e-06
|
| 252 |
+
[2026-04-25 18:07:28] Epoch 1 | Step 290 | Loss: 1.5788 | LR: 6.28e-06
|
| 253 |
+
[2026-04-25 18:07:31] Epoch 1 | Step 300 | Loss: 1.5643 | LR: 6.47e-06
|
| 254 |
+
[2026-04-25 18:07:33] Epoch 1 | Step 310 | Loss: 1.5481 | LR: 6.65e-06
|
| 255 |
+
[2026-04-25 18:07:36] Epoch 1 | Step 320 | Loss: 1.5323 | LR: 6.83e-06
|
| 256 |
+
[2026-04-25 18:07:38] Epoch 1 | Step 330 | Loss: 1.5169 | LR: 7.01e-06
|
| 257 |
+
[2026-04-25 18:07:41] Epoch 1 | Step 340 | Loss: 1.5035 | LR: 7.19e-06
|
| 258 |
+
[2026-04-25 18:07:43] Epoch 1 | Step 350 | Loss: 1.4927 | LR: 7.38e-06
|
| 259 |
+
[2026-04-25 18:07:46] Epoch 1 | Step 360 | Loss: 1.4786 | LR: 7.56e-06
|
| 260 |
+
[2026-04-25 18:07:48] Epoch 1 | Step 370 | Loss: 1.4647 | LR: 7.74e-06
|
| 261 |
+
[2026-04-25 18:07:51] Epoch 1 | Step 380 | Loss: 1.4527 | LR: 7.92e-06
|
| 262 |
+
[2026-04-25 18:07:54] Epoch 1 | Step 390 | Loss: 1.4421 | LR: 8.11e-06
|
| 263 |
+
[2026-04-25 18:07:56] Epoch 1 | Step 400 | Loss: 1.4326 | LR: 8.29e-06
|
| 264 |
+
[2026-04-25 18:07:59] Epoch 1 | Step 410 | Loss: 1.4242 | LR: 8.47e-06
|
| 265 |
+
[2026-04-25 18:08:01] Epoch 1 | Step 420 | Loss: 1.4156 | LR: 8.65e-06
|
| 266 |
+
[2026-04-25 18:08:04] Epoch 1 | Step 430 | Loss: 1.4108 | LR: 8.83e-06
|
| 267 |
+
[2026-04-25 18:08:06] Epoch 1 | Step 440 | Loss: 1.4000 | LR: 9.02e-06
|
| 268 |
+
[2026-04-25 18:08:09] Epoch 1 | Step 450 | Loss: 1.3918 | LR: 9.20e-06
|
| 269 |
+
[2026-04-25 18:08:12] Epoch 1 | Step 460 | Loss: 1.3851 | LR: 9.38e-06
|
| 270 |
+
[2026-04-25 18:08:14] Epoch 1 | Step 470 | Loss: 1.3768 | LR: 9.56e-06
|
| 271 |
+
[2026-04-25 18:08:17] Epoch 1 | Step 480 | Loss: 1.3712 | LR: 9.74e-06
|
| 272 |
+
[2026-04-25 18:08:19] Epoch 1 | Step 490 | Loss: 1.3631 | LR: 9.93e-06
|
| 273 |
+
[2026-04-25 18:08:22] Epoch 1 | Step 500 | Loss: 1.3555 | LR: 1.00e-05
|
| 274 |
+
[2026-04-25 18:08:24] Epoch 1 | Step 510 | Loss: 1.3482 | LR: 1.00e-05
|
| 275 |
+
[2026-04-25 18:08:26] Epoch 1 | Step 520 | Loss: 1.3431 | LR: 1.00e-05
|
| 276 |
+
[2026-04-25 18:08:29] Epoch 1 | Step 530 | Loss: 1.3361 | LR: 1.00e-05
|
| 277 |
+
[2026-04-25 18:08:32] Epoch 1 | Step 540 | Loss: 1.3284 | LR: 1.00e-05
|
| 278 |
+
[2026-04-25 18:08:34] Epoch 1 | Step 550 | Loss: 1.3221 | LR: 1.00e-05
|
| 279 |
+
[2026-04-25 18:08:37] Epoch 1 | Step 560 | Loss: 1.3162 | LR: 1.00e-05
|
| 280 |
+
[2026-04-25 18:08:39] Epoch 1 | Step 570 | Loss: 1.3123 | LR: 1.00e-05
|
| 281 |
+
[2026-04-25 18:08:42] Epoch 1 | Step 580 | Loss: 1.3084 | LR: 1.00e-05
|
| 282 |
+
[2026-04-25 18:08:44] Epoch 1 | Step 590 | Loss: 1.3049 | LR: 1.00e-05
|
| 283 |
+
[2026-04-25 18:08:47] Epoch 1 | Step 600 | Loss: 1.3009 | LR: 1.00e-05
|
| 284 |
+
[2026-04-25 18:08:49] Epoch 1 | Step 610 | Loss: 1.2988 | LR: 1.00e-05
|
| 285 |
+
[2026-04-25 18:08:52] Epoch 1 | Step 620 | Loss: 1.2954 | LR: 1.00e-05
|
| 286 |
+
[2026-04-25 18:08:54] Epoch 1 | Step 630 | Loss: 1.2909 | LR: 1.00e-05
|
| 287 |
+
[2026-04-25 18:08:56] Epoch 1 | Step 640 | Loss: 1.2881 | LR: 1.00e-05
|
| 288 |
+
[2026-04-25 18:08:59] Epoch 1 | Step 650 | Loss: 1.2841 | LR: 1.00e-05
|
| 289 |
+
[2026-04-25 18:09:01] Epoch 1 | Step 660 | Loss: 1.2816 | LR: 1.00e-05
|
| 290 |
+
[2026-04-25 18:09:04] Epoch 1 | Step 670 | Loss: 1.2773 | LR: 1.00e-05
|
| 291 |
+
[2026-04-25 18:09:06] Epoch 1 | Step 680 | Loss: 1.2744 | LR: 1.00e-05
|
| 292 |
+
[2026-04-25 18:09:09] Epoch 1 | Step 690 | Loss: 1.2705 | LR: 1.00e-05
|
| 293 |
+
[2026-04-25 18:09:11] Epoch 1 | Step 700 | Loss: 1.2688 | LR: 1.00e-05
|
| 294 |
+
[2026-04-25 18:09:14] Epoch 1 | Step 710 | Loss: 1.2656 | LR: 1.00e-05
|
| 295 |
+
[2026-04-25 18:09:16] Epoch 1 | Step 720 | Loss: 1.2626 | LR: 1.00e-05
|
| 296 |
+
[2026-04-25 18:09:19] Epoch 1 | Step 730 | Loss: 1.2601 | LR: 1.00e-05
|
| 297 |
+
[2026-04-25 18:09:21] Epoch 1 | Step 740 | Loss: 1.2567 | LR: 1.00e-05
|
| 298 |
+
[2026-04-25 18:09:24] Epoch 1 | Step 750 | Loss: 1.2531 | LR: 1.00e-05
|
| 299 |
+
[2026-04-25 18:09:26] Epoch 1 | Step 760 | Loss: 1.2520 | LR: 1.00e-05
|
| 300 |
+
[2026-04-25 18:09:29] Epoch 1 | Step 770 | Loss: 1.2511 | LR: 1.00e-05
|
| 301 |
+
[2026-04-25 18:09:31] Epoch 1 | Step 780 | Loss: 1.2486 | LR: 1.00e-05
|
| 302 |
+
[2026-04-25 18:09:34] Epoch 1 | Step 790 | Loss: 1.2458 | LR: 1.00e-05
|
| 303 |
+
[2026-04-25 18:09:36] Epoch 1 | Step 800 | Loss: 1.2425 | LR: 1.00e-05
|
| 304 |
+
[2026-04-25 18:09:39] Epoch 1 | Step 810 | Loss: 1.2401 | LR: 1.00e-05
|
| 305 |
+
[2026-04-25 18:09:41] Epoch 1 | Step 820 | Loss: 1.2374 | LR: 1.00e-05
|
| 306 |
+
[2026-04-25 18:09:44] Epoch 1 | Step 830 | Loss: 1.2355 | LR: 1.00e-05
|
| 307 |
+
[2026-04-25 18:09:46] Epoch 1 | Step 840 | Loss: 1.2328 | LR: 1.00e-05
|
| 308 |
+
[2026-04-25 18:09:49] Epoch 1 | Step 850 | Loss: 1.2294 | LR: 1.00e-05
|
| 309 |
+
[2026-04-25 18:09:51] Epoch 1 | Step 860 | Loss: 1.2281 | LR: 1.00e-05
|
| 310 |
+
[2026-04-25 18:09:54] Epoch 1 | Step 870 | Loss: 1.2272 | LR: 1.00e-05
|
| 311 |
+
[2026-04-25 18:09:56] Epoch 1 | Step 880 | Loss: 1.2257 | LR: 1.00e-05
|
| 312 |
+
[2026-04-25 18:09:59] Epoch 1 | Step 890 | Loss: 1.2240 | LR: 1.00e-05
|
| 313 |
+
[2026-04-25 18:10:01] Epoch 1 | Step 900 | Loss: 1.2217 | LR: 1.00e-05
|
| 314 |
+
[2026-04-25 18:10:04] Epoch 1 | Step 910 | Loss: 1.2203 | LR: 1.00e-05
|
| 315 |
+
[2026-04-25 18:10:06] Epoch 1 | Step 920 | Loss: 1.2198 | LR: 1.00e-05
|
| 316 |
+
[2026-04-25 18:10:09] Epoch 1 | Step 930 | Loss: 1.2180 | LR: 1.00e-05
|
| 317 |
+
[2026-04-25 18:10:11] Epoch 1 | Step 940 | Loss: 1.2167 | LR: 1.00e-05
|
| 318 |
+
[2026-04-25 18:10:14] Epoch 1 | Step 950 | Loss: 1.2136 | LR: 1.00e-05
|
| 319 |
+
[2026-04-25 18:10:17] Epoch 1 | Step 960 | Loss: 1.2117 | LR: 1.00e-05
|
| 320 |
+
[2026-04-25 18:10:19] Epoch 1 | Step 970 | Loss: 1.2102 | LR: 1.00e-05
|
| 321 |
+
[2026-04-25 18:10:22] Epoch 1 | Step 980 | Loss: 1.2082 | LR: 1.00e-05
|
| 322 |
+
[2026-04-25 18:10:25] Epoch 1 | Step 990 | Loss: 1.2053 | LR: 1.00e-05
|
| 323 |
+
[2026-04-25 18:10:27] Epoch 1 | Step 1000 | Loss: 1.2034 | LR: 1.00e-05
|
| 324 |
+
[2026-04-25 18:10:30] Epoch 1 | Step 1010 | Loss: 1.2027 | LR: 1.00e-05
|
| 325 |
+
[2026-04-25 18:10:32] Epoch 1 | Step 1020 | Loss: 1.2012 | LR: 1.00e-05
|
| 326 |
+
[2026-04-25 18:10:34] Epoch 1 | Step 1030 | Loss: 1.2006 | LR: 1.00e-05
|
| 327 |
+
[2026-04-25 18:10:37] Epoch 1 | Step 1040 | Loss: 1.1980 | LR: 1.00e-05
|
| 328 |
+
[2026-04-25 18:10:39] Epoch 1 | Step 1050 | Loss: 1.1962 | LR: 1.00e-05
|
| 329 |
+
[2026-04-25 18:10:42] Epoch 1 | Step 1060 | Loss: 1.1930 | LR: 1.00e-05
|
| 330 |
+
[2026-04-25 18:10:44] Epoch 1 | Step 1070 | Loss: 1.1913 | LR: 1.00e-05
|
| 331 |
+
[2026-04-25 18:10:47] Epoch 1 | Step 1080 | Loss: 1.1911 | LR: 1.00e-05
|
| 332 |
+
[2026-04-25 18:10:49] Epoch 1 | Step 1090 | Loss: 1.1918 | LR: 1.00e-05
|
| 333 |
+
[2026-04-25 18:10:52] Epoch 1 | Step 1100 | Loss: 1.1904 | LR: 1.00e-05
|
| 334 |
+
[2026-04-25 18:10:54] Epoch 1 | Step 1110 | Loss: 1.1898 | LR: 1.00e-05
|
| 335 |
+
[2026-04-25 18:10:56] Epoch 1 | Step 1120 | Loss: 1.1893 | LR: 1.00e-05
|
| 336 |
+
[2026-04-25 18:10:59] Epoch 1 | Step 1130 | Loss: 1.1886 | LR: 1.00e-05
|
| 337 |
+
[2026-04-25 18:11:02] Epoch 1 | Step 1140 | Loss: 1.1871 | LR: 1.00e-05
|
| 338 |
+
[2026-04-25 18:11:05] Epoch 1 | Step 1150 | Loss: 1.1844 | LR: 1.00e-05
|
| 339 |
+
[2026-04-25 18:11:07] Epoch 1 | Step 1160 | Loss: 1.1840 | LR: 1.00e-05
|
| 340 |
+
[2026-04-25 18:11:09] Epoch 1 | Step 1170 | Loss: 1.1839 | LR: 1.00e-05
|
| 341 |
+
[2026-04-25 18:11:12] Epoch 1 | Step 1180 | Loss: 1.1826 | LR: 1.00e-05
|
| 342 |
+
[2026-04-25 18:11:15] Epoch 1 | Step 1190 | Loss: 1.1819 | LR: 1.00e-05
|
| 343 |
+
[2026-04-25 18:11:17] Epoch 1 | Step 1200 | Loss: 1.1801 | LR: 1.00e-05
|
| 344 |
+
[2026-04-25 18:11:20] Epoch 1 | Step 1210 | Loss: 1.1777 | LR: 1.00e-05
|
| 345 |
+
[2026-04-25 18:11:22] Epoch 1 | Step 1220 | Loss: 1.1754 | LR: 1.00e-05
|
| 346 |
+
[2026-04-25 18:11:25] Epoch 1 | Step 1230 | Loss: 1.1748 | LR: 1.00e-05
|
| 347 |
+
[2026-04-25 18:11:28] Epoch 1 | Step 1240 | Loss: 1.1738 | LR: 1.00e-05
|
| 348 |
+
[2026-04-25 18:11:30] Epoch 1 | Step 1250 | Loss: 1.1717 | LR: 1.00e-05
|
| 349 |
+
[2026-04-25 18:11:33] Epoch 1 | Step 1260 | Loss: 1.1706 | LR: 1.00e-05
|
| 350 |
+
[2026-04-25 18:11:35] Epoch 1 | Step 1270 | Loss: 1.1681 | LR: 1.00e-05
|
| 351 |
+
[2026-04-25 18:11:37] Epoch 1 | Step 1280 | Loss: 1.1678 | LR: 1.00e-05
|
| 352 |
+
[2026-04-25 18:11:40] Epoch 1 | Step 1290 | Loss: 1.1673 | LR: 1.00e-05
|
| 353 |
+
[2026-04-25 18:11:42] Epoch 1 | Step 1300 | Loss: 1.1655 | LR: 1.00e-05
|
| 354 |
+
[2026-04-25 18:11:45] Epoch 1 | Step 1310 | Loss: 1.1645 | LR: 1.00e-05
|
| 355 |
+
[2026-04-25 18:11:47] Epoch 1 | Step 1320 | Loss: 1.1639 | LR: 1.00e-05
|
| 356 |
+
[2026-04-25 18:11:50] Epoch 1 | Step 1330 | Loss: 1.1622 | LR: 1.00e-05
|
| 357 |
+
[2026-04-25 18:11:52] Epoch 1 | Step 1340 | Loss: 1.1614 | LR: 1.00e-05
|
| 358 |
+
[2026-04-25 18:11:55] Epoch 1 | Step 1350 | Loss: 1.1612 | LR: 1.00e-05
|
| 359 |
+
[2026-04-25 18:11:57] Epoch 1 | Step 1360 | Loss: 1.1602 | LR: 1.00e-05
|
| 360 |
+
[2026-04-25 18:12:00] Epoch 1 | Step 1370 | Loss: 1.1591 | LR: 1.00e-05
|
| 361 |
+
[2026-04-25 18:12:03] Epoch 1 | Step 1380 | Loss: 1.1592 | LR: 1.00e-05
|
| 362 |
+
[2026-04-25 18:12:05] Epoch 1 | Step 1390 | Loss: 1.1589 | LR: 1.00e-05
|
| 363 |
+
[2026-04-25 18:12:08] Epoch 1 | Step 1400 | Loss: 1.1582 | LR: 1.00e-05
|
| 364 |
+
[2026-04-25 18:12:10] Epoch 1 | Step 1410 | Loss: 1.1563 | LR: 1.00e-05
|
| 365 |
+
[2026-04-25 18:12:13] Epoch 1 | Step 1420 | Loss: 1.1555 | LR: 1.00e-05
|
| 366 |
+
[2026-04-25 18:12:15] Epoch 1 | Step 1430 | Loss: 1.1545 | LR: 1.00e-05
|
| 367 |
+
[2026-04-25 18:12:18] Epoch 1 | Step 1440 | Loss: 1.1532 | LR: 1.00e-05
|
| 368 |
+
[2026-04-25 18:12:20] Epoch 1 | Step 1450 | Loss: 1.1522 | LR: 1.00e-05
|
| 369 |
+
[2026-04-25 18:12:23] Epoch 1 | Step 1460 | Loss: 1.1504 | LR: 1.00e-05
|
| 370 |
+
[2026-04-25 18:12:25] Epoch 1 | Step 1470 | Loss: 1.1500 | LR: 1.00e-05
|
| 371 |
+
[2026-04-25 18:12:28] Epoch 1 | Step 1480 | Loss: 1.1496 | LR: 1.00e-05
|
| 372 |
+
[2026-04-25 18:12:30] Epoch 1 | Step 1490 | Loss: 1.1492 | LR: 1.00e-05
|
| 373 |
+
[2026-04-25 18:12:33] Epoch 1 | Step 1500 | Loss: 1.1478 | LR: 1.00e-05
|
| 374 |
+
[2026-04-25 18:12:36] Epoch 1 | Step 1510 | Loss: 1.1475 | LR: 1.00e-05
|
| 375 |
+
[2026-04-25 18:12:38] Epoch 1 | Step 1520 | Loss: 1.1470 | LR: 1.00e-05
|
| 376 |
+
[2026-04-25 18:12:40] Epoch 1 | Step 1530 | Loss: 1.1461 | LR: 1.00e-05
|
| 377 |
+
[2026-04-25 18:12:43] Epoch 1 | Step 1540 | Loss: 1.1459 | LR: 1.00e-05
|
| 378 |
+
[2026-04-25 18:12:45] Epoch 1 | Step 1550 | Loss: 1.1454 | LR: 1.00e-05
|
| 379 |
+
[2026-04-25 18:12:48] Epoch 1 | Step 1560 | Loss: 1.1443 | LR: 1.00e-05
|
| 380 |
+
[2026-04-25 18:12:50] Epoch 1 | Step 1570 | Loss: 1.1445 | LR: 1.00e-05
|
| 381 |
+
[2026-04-25 18:12:53] Epoch 1 | Step 1580 | Loss: 1.1432 | LR: 1.00e-05
|
| 382 |
+
[2026-04-25 18:12:55] Epoch 1 | Step 1590 | Loss: 1.1427 | LR: 1.00e-05
|
| 383 |
+
[2026-04-25 18:12:58] Epoch 1 | Step 1600 | Loss: 1.1422 | LR: 1.00e-05
|
| 384 |
+
[2026-04-25 18:13:00] Epoch 1 | Step 1610 | Loss: 1.1405 | LR: 1.00e-05
|
| 385 |
+
[2026-04-25 18:13:02] Epoch 1 | Step 1620 | Loss: 1.1392 | LR: 1.00e-05
|
| 386 |
+
[2026-04-25 18:13:05] Epoch 1 | Step 1630 | Loss: 1.1392 | LR: 1.00e-05
|
| 387 |
+
[2026-04-25 18:13:07] Epoch 1 | Step 1640 | Loss: 1.1384 | LR: 1.00e-05
|
| 388 |
+
[2026-04-25 18:13:10] Epoch 1 | Step 1650 | Loss: 1.1373 | LR: 1.00e-05
|
| 389 |
+
[2026-04-25 18:13:12] Epoch 1 | Step 1660 | Loss: 1.1363 | LR: 1.00e-05
|
| 390 |
+
[2026-04-25 18:13:14] Epoch 1 | Step 1670 | Loss: 1.1363 | LR: 1.00e-05
|
| 391 |
+
[2026-04-25 18:13:17] Epoch 1 | Step 1680 | Loss: 1.1360 | LR: 1.00e-05
|
| 392 |
+
[2026-04-25 18:13:20] Epoch 1 | Step 1690 | Loss: 1.1352 | LR: 1.00e-05
|
| 393 |
+
[2026-04-25 18:13:22] Epoch 1 | Step 1700 | Loss: 1.1334 | LR: 1.00e-05
|
| 394 |
+
[2026-04-25 18:13:25] Epoch 1 | Step 1710 | Loss: 1.1326 | LR: 1.00e-05
|
| 395 |
+
[2026-04-25 18:13:27] Epoch 1 | Step 1720 | Loss: 1.1316 | LR: 1.00e-05
|
| 396 |
+
[2026-04-25 18:13:29] Epoch 1 | Step 1730 | Loss: 1.1309 | LR: 1.00e-05
|
| 397 |
+
[2026-04-25 18:13:32] Epoch 1 | Step 1740 | Loss: 1.1307 | LR: 1.00e-05
|
| 398 |
+
[2026-04-25 18:13:34] Epoch 1 | Step 1750 | Loss: 1.1312 | LR: 1.00e-05
|
| 399 |
+
[2026-04-25 18:13:37] Epoch 1 | Step 1760 | Loss: 1.1301 | LR: 1.00e-05
|
| 400 |
+
[2026-04-25 18:13:39] Epoch 1 | Step 1770 | Loss: 1.1300 | LR: 1.00e-05
|
| 401 |
+
[2026-04-25 18:13:42] Epoch 1 | Step 1780 | Loss: 1.1292 | LR: 1.00e-05
|
| 402 |
+
[2026-04-25 18:13:44] Epoch 1 | Step 1790 | Loss: 1.1289 | LR: 1.00e-05
|
| 403 |
+
[2026-04-25 18:13:47] Epoch 1 | Step 1800 | Loss: 1.1278 | LR: 1.00e-05
|
| 404 |
+
[2026-04-25 18:13:50] Epoch 1 | Step 1810 | Loss: 1.1274 | LR: 1.00e-05
|
| 405 |
+
[2026-04-25 18:13:52] Epoch 1 | Step 1820 | Loss: 1.1276 | LR: 1.00e-05
|
| 406 |
+
[2026-04-25 18:13:55] Epoch 1 | Step 1830 | Loss: 1.1273 | LR: 1.00e-05
|
| 407 |
+
[2026-04-25 18:13:57] Epoch 1 | Step 1840 | Loss: 1.1271 | LR: 1.00e-05
|
| 408 |
+
[2026-04-25 18:14:00] Epoch 1 | Step 1850 | Loss: 1.1263 | LR: 1.00e-05
|
| 409 |
+
[2026-04-25 18:14:03] Epoch 1 | Step 1860 | Loss: 1.1259 | LR: 1.00e-05
|
| 410 |
+
[2026-04-25 18:14:05] Epoch 1 | Step 1870 | Loss: 1.1252 | LR: 1.00e-05
|
| 411 |
+
[2026-04-25 18:14:08] Epoch 1 | Step 1880 | Loss: 1.1244 | LR: 1.00e-05
|
| 412 |
+
[2026-04-25 18:14:10] Epoch 1 | Step 1890 | Loss: 1.1244 | LR: 1.00e-05
|
| 413 |
+
[2026-04-25 18:14:13] Epoch 1 | Step 1900 | Loss: 1.1237 | LR: 1.00e-05
|
| 414 |
+
[2026-04-25 18:14:15] Epoch 1 | Step 1910 | Loss: 1.1235 | LR: 1.00e-05
|
| 415 |
+
[2026-04-25 18:14:18] Epoch 1 | Step 1920 | Loss: 1.1235 | LR: 1.00e-05
|
| 416 |
+
[2026-04-25 18:14:20] Epoch 1 | Step 1930 | Loss: 1.1229 | LR: 1.00e-05
|
| 417 |
+
[2026-04-25 18:14:23] Epoch 1 | Step 1940 | Loss: 1.1220 | LR: 1.00e-05
|
| 418 |
+
[2026-04-25 18:14:25] Epoch 1 | Step 1950 | Loss: 1.1213 | LR: 1.00e-05
|
| 419 |
+
[2026-04-25 18:14:28] Epoch 1 | Step 1960 | Loss: 1.1209 | LR: 1.00e-05
|
| 420 |
+
[2026-04-25 18:14:30] Epoch 1 | Step 1970 | Loss: 1.1207 | LR: 1.00e-05
|
| 421 |
+
[2026-04-25 18:14:33] Epoch 1 | Step 1980 | Loss: 1.1208 | LR: 1.00e-05
|
| 422 |
+
[2026-04-25 18:14:35] Epoch 1 | Step 1990 | Loss: 1.1205 | LR: 1.00e-05
|
| 423 |
+
[2026-04-25 18:14:37] Epoch 1 | Step 2000 | Loss: 1.1201 | LR: 1.00e-05
|
| 424 |
+
[2026-04-25 18:14:38] Validation | Batch 10/84 | Loss: 1.0011
|
| 425 |
+
[2026-04-25 18:14:38] Validation | Batch 20/84 | Loss: 1.0029
|
| 426 |
+
[2026-04-25 18:14:39] Validation | Batch 30/84 | Loss: 1.0825
|
| 427 |
+
[2026-04-25 18:14:39] Validation | Batch 40/84 | Loss: 1.0824
|
| 428 |
+
[2026-04-25 18:14:39] Validation | Batch 50/84 | Loss: 1.0856
|
| 429 |
+
[2026-04-25 18:14:40] Validation | Batch 60/84 | Loss: 1.0582
|
| 430 |
+
[2026-04-25 18:14:41] Validation | Batch 70/84 | Loss: 1.0398
|
| 431 |
+
[2026-04-25 18:14:41] Validation | Batch 80/84 | Loss: 1.0455
|
| 432 |
+
[2026-04-25 18:14:41] Validation | Batch 84/84 | Loss: 1.0389
|
| 433 |
+
[2026-04-25 18:14:42] Validation | Loss: 1.0389 | PPL: 2.90 | Time: 3.90s
|
| 434 |
+
[2026-04-25 18:14:44] New best model saved! Val loss: 1.0389
|
| 435 |
+
[2026-04-25 18:14:46] Epoch 1 | Step 2010 | Loss: 1.1197 | LR: 1.00e-05
|
| 436 |
+
[2026-04-25 18:14:49] Epoch 1 | Step 2020 | Loss: 1.1193 | LR: 1.00e-05
|
| 437 |
+
[2026-04-25 18:14:51] Epoch 1 | Step 2030 | Loss: 1.1193 | LR: 1.00e-05
|
| 438 |
+
[2026-04-25 18:14:54] Epoch 1 | Step 2040 | Loss: 1.1188 | LR: 1.00e-05
|
| 439 |
+
[2026-04-25 18:14:56] Epoch 1 | Step 2050 | Loss: 1.1185 | LR: 1.00e-05
|
| 440 |
+
[2026-04-25 18:14:59] Epoch 1 | Step 2060 | Loss: 1.1177 | LR: 1.00e-05
|
| 441 |
+
[2026-04-25 18:15:02] Epoch 1 | Step 2070 | Loss: 1.1164 | LR: 1.00e-05
|
| 442 |
+
[2026-04-25 18:15:04] Epoch 1 | Step 2080 | Loss: 1.1156 | LR: 1.00e-05
|
| 443 |
+
[2026-04-25 18:15:07] Epoch 1 | Step 2090 | Loss: 1.1156 | LR: 1.00e-05
|
| 444 |
+
[2026-04-25 18:15:09] Epoch 1 | Step 2100 | Loss: 1.1154 | LR: 1.00e-05
|
| 445 |
+
[2026-04-25 18:15:13] Epoch 1 | Step 2110 | Loss: 1.1151 | LR: 1.00e-05
|
| 446 |
+
[2026-04-25 18:15:15] Epoch 1 | Step 2120 | Loss: 1.1144 | LR: 1.00e-05
|
| 447 |
+
[2026-04-25 18:15:18] Epoch 1 | Step 2130 | Loss: 1.1142 | LR: 1.00e-05
|
| 448 |
+
[2026-04-25 18:15:20] Epoch 1 | Step 2140 | Loss: 1.1136 | LR: 1.00e-05
|
| 449 |
+
[2026-04-25 18:15:23] Epoch 1 | Step 2150 | Loss: 1.1132 | LR: 1.00e-05
|
| 450 |
+
[2026-04-25 18:15:25] Epoch 1 | Step 2160 | Loss: 1.1132 | LR: 1.00e-05
|
| 451 |
+
[2026-04-25 18:15:27] Epoch 1 | Step 2170 | Loss: 1.1126 | LR: 1.00e-05
|
| 452 |
+
[2026-04-25 18:15:30] Epoch 1 | Step 2180 | Loss: 1.1119 | LR: 1.00e-05
|
| 453 |
+
[2026-04-25 18:15:32] Epoch 1 | Step 2190 | Loss: 1.1118 | LR: 1.00e-05
|
| 454 |
+
[2026-04-25 18:15:35] Epoch 1 | Step 2200 | Loss: 1.1113 | LR: 1.00e-05
|
| 455 |
+
[2026-04-25 18:15:37] Epoch 1 | Step 2210 | Loss: 1.1108 | LR: 1.00e-05
|
| 456 |
+
[2026-04-25 18:15:39] Epoch 1 | Step 2220 | Loss: 1.1111 | LR: 1.00e-05
|
| 457 |
+
[2026-04-25 18:15:42] Epoch 1 | Step 2230 | Loss: 1.1115 | LR: 1.00e-05
|
| 458 |
+
[2026-04-25 18:15:44] Epoch 1 | Step 2240 | Loss: 1.1118 | LR: 1.00e-05
|
| 459 |
+
[2026-04-25 18:15:47] Epoch 1 | Step 2250 | Loss: 1.1120 | LR: 1.00e-05
|
| 460 |
+
[2026-04-25 18:15:49] Epoch 1 | Step 2260 | Loss: 1.1115 | LR: 1.00e-05
|
| 461 |
+
[2026-04-25 18:15:52] Epoch 1 | Step 2270 | Loss: 1.1114 | LR: 1.00e-05
|
| 462 |
+
[2026-04-25 18:15:54] Epoch 1 | Step 2280 | Loss: 1.1113 | LR: 1.00e-05
|
| 463 |
+
[2026-04-25 18:15:57] Epoch 1 | Step 2290 | Loss: 1.1118 | LR: 1.00e-05
|
| 464 |
+
[2026-04-25 18:15:59] Epoch 1 | Step 2300 | Loss: 1.1116 | LR: 1.00e-05
|
| 465 |
+
[2026-04-25 18:16:02] Epoch 1 | Step 2310 | Loss: 1.1111 | LR: 1.00e-05
|
| 466 |
+
[2026-04-25 18:16:04] Epoch 1 | Step 2320 | Loss: 1.1110 | LR: 1.00e-05
|
| 467 |
+
[2026-04-25 18:16:06] Epoch 1 | Step 2330 | Loss: 1.1106 | LR: 1.00e-05
|
| 468 |
+
[2026-04-25 18:16:09] Epoch 1 | Step 2340 | Loss: 1.1101 | LR: 1.00e-05
|
| 469 |
+
[2026-04-25 18:16:11] Epoch 1 | Step 2350 | Loss: 1.1095 | LR: 1.00e-05
|
| 470 |
+
[2026-04-25 18:16:14] Epoch 1 | Step 2360 | Loss: 1.1094 | LR: 1.00e-05
|
| 471 |
+
[2026-04-25 18:16:16] Epoch 1 | Step 2370 | Loss: 1.1091 | LR: 1.00e-05
|
| 472 |
+
[2026-04-25 18:16:19] Epoch 1 | Step 2380 | Loss: 1.1084 | LR: 1.00e-05
|
| 473 |
+
[2026-04-25 18:16:21] Epoch 1 | Step 2390 | Loss: 1.1084 | LR: 1.00e-05
|
| 474 |
+
[2026-04-25 18:16:24] Epoch 1 | Step 2400 | Loss: 1.1077 | LR: 1.00e-05
|
| 475 |
+
[2026-04-25 18:16:26] Epoch 1 | Step 2410 | Loss: 1.1077 | LR: 1.00e-05
|
| 476 |
+
[2026-04-25 18:16:29] Epoch 1 | Step 2420 | Loss: 1.1074 | LR: 1.00e-05
|
| 477 |
+
[2026-04-25 18:16:31] Epoch 1 | Step 2430 | Loss: 1.1074 | LR: 1.00e-05
|
| 478 |
+
[2026-04-25 18:16:34] Epoch 1 | Step 2440 | Loss: 1.1069 | LR: 1.00e-05
|
| 479 |
+
[2026-04-25 18:16:36] Epoch 1 | Step 2450 | Loss: 1.1064 | LR: 1.00e-05
|
| 480 |
+
[2026-04-25 18:16:38] Epoch 1 | Step 2460 | Loss: 1.1061 | LR: 1.00e-05
|
| 481 |
+
[2026-04-25 18:16:41] Epoch 1 | Step 2470 | Loss: 1.1060 | LR: 1.00e-05
|
| 482 |
+
[2026-04-25 18:16:44] Epoch 1 | Step 2480 | Loss: 1.1058 | LR: 1.00e-05
|
| 483 |
+
[2026-04-25 18:16:46] Epoch 1 | Step 2490 | Loss: 1.1053 | LR: 1.00e-05
|
| 484 |
+
[2026-04-25 18:16:49] Epoch 1 | Step 2500 | Loss: 1.1047 | LR: 1.00e-05
|
| 485 |
+
[2026-04-25 18:16:51] Epoch 1 | Step 2510 | Loss: 1.1047 | LR: 1.00e-05
|
| 486 |
+
[2026-04-25 18:16:54] Epoch 1 | Step 2520 | Loss: 1.1038 | LR: 1.00e-05
|
| 487 |
+
[2026-04-25 18:16:56] Epoch 1 | Step 2530 | Loss: 1.1033 | LR: 1.00e-05
|
| 488 |
+
[2026-04-25 18:16:59] Epoch 1 | Step 2540 | Loss: 1.1029 | LR: 1.00e-05
|
| 489 |
+
[2026-04-25 18:17:01] Epoch 1 | Step 2550 | Loss: 1.1021 | LR: 1.00e-05
|
| 490 |
+
[2026-04-25 18:17:04] Epoch 1 | Step 2560 | Loss: 1.1019 | LR: 1.00e-05
|
| 491 |
+
[2026-04-25 18:17:06] Epoch 1 | Step 2570 | Loss: 1.1021 | LR: 1.00e-05
|
| 492 |
+
[2026-04-25 18:17:09] Epoch 1 | Step 2580 | Loss: 1.1022 | LR: 1.00e-05
|
| 493 |
+
[2026-04-25 18:17:12] Epoch 1 | Step 2590 | Loss: 1.1021 | LR: 1.00e-05
|
| 494 |
+
[2026-04-25 18:17:14] Epoch 1 | Step 2600 | Loss: 1.1020 | LR: 1.00e-05
|
| 495 |
+
[2026-04-25 18:17:16] Epoch 1 | Step 2610 | Loss: 1.1017 | LR: 1.00e-05
|
| 496 |
+
[2026-04-25 18:17:19] Epoch 1 | Step 2620 | Loss: 1.1011 | LR: 1.00e-05
|
| 497 |
+
[2026-04-25 18:17:21] Epoch 1 | Step 2630 | Loss: 1.1006 | LR: 1.00e-05
|
| 498 |
+
[2026-04-25 18:17:24] Epoch 1 | Step 2640 | Loss: 1.1005 | LR: 1.00e-05
|
| 499 |
+
[2026-04-25 18:17:26] Epoch 1 | Step 2650 | Loss: 1.1000 | LR: 1.00e-05
|
| 500 |
+
[2026-04-25 18:17:29] Epoch 1 | Step 2660 | Loss: 1.1000 | LR: 1.00e-05
|
| 501 |
+
[2026-04-25 18:17:31] Epoch 1 | Step 2670 | Loss: 1.0996 | LR: 1.00e-05
|
| 502 |
+
[2026-04-25 18:17:34] Epoch 1 | Step 2680 | Loss: 1.0992 | LR: 1.00e-05
|
| 503 |
+
[2026-04-25 18:17:36] Epoch 1 | Step 2690 | Loss: 1.0990 | LR: 1.00e-05
|
| 504 |
+
[2026-04-25 18:17:39] Epoch 1 | Step 2700 | Loss: 1.0984 | LR: 1.00e-05
|
| 505 |
+
[2026-04-25 18:17:41] Epoch 1 | Step 2710 | Loss: 1.0977 | LR: 1.00e-05
|
| 506 |
+
[2026-04-25 18:17:44] Epoch 1 | Step 2720 | Loss: 1.0977 | LR: 1.00e-05
|
| 507 |
+
[2026-04-25 18:17:46] Epoch 1 | Step 2730 | Loss: 1.0973 | LR: 1.00e-05
|
| 508 |
+
[2026-04-25 18:17:49] Epoch 1 | Step 2740 | Loss: 1.0976 | LR: 1.00e-05
|
| 509 |
+
[2026-04-25 18:17:51] Epoch 1 | Step 2750 | Loss: 1.0975 | LR: 1.00e-05
|
| 510 |
+
[2026-04-25 18:17:54] Epoch 1 | Step 2760 | Loss: 1.0970 | LR: 1.00e-05
|
| 511 |
+
[2026-04-25 18:17:56] Epoch 1 | Step 2770 | Loss: 1.0967 | LR: 1.00e-05
|
| 512 |
+
[2026-04-25 18:17:59] Epoch 1 | Step 2780 | Loss: 1.0968 | LR: 1.00e-05
|
| 513 |
+
[2026-04-25 18:18:01] Epoch 1 | Step 2790 | Loss: 1.0965 | LR: 1.00e-05
|
| 514 |
+
[2026-04-25 18:18:03] Epoch 1 | Step 2800 | Loss: 1.0961 | LR: 1.00e-05
|
| 515 |
+
[2026-04-25 18:18:06] Epoch 1 | Step 2810 | Loss: 1.0960 | LR: 1.00e-05
|
| 516 |
+
[2026-04-25 18:18:09] Epoch 1 | Step 2820 | Loss: 1.0957 | LR: 1.00e-05
|
| 517 |
+
[2026-04-25 18:18:11] Epoch 1 | Step 2830 | Loss: 1.0953 | LR: 1.00e-05
|
| 518 |
+
[2026-04-25 18:18:13] Epoch 1 | Step 2840 | Loss: 1.0958 | LR: 1.00e-05
|
| 519 |
+
[2026-04-25 18:18:16] Epoch 1 | Step 2850 | Loss: 1.0956 | LR: 1.00e-05
|
| 520 |
+
[2026-04-25 18:18:18] Epoch 1 | Step 2860 | Loss: 1.0953 | LR: 1.00e-05
|
| 521 |
+
[2026-04-25 18:18:21] Epoch 1 | Step 2870 | Loss: 1.0951 | LR: 1.00e-05
|
| 522 |
+
[2026-04-25 18:18:23] Epoch 1 | Step 2880 | Loss: 1.0947 | LR: 1.00e-05
|
| 523 |
+
[2026-04-25 18:18:26] Epoch 1 | Step 2890 | Loss: 1.0944 | LR: 1.00e-05
|
| 524 |
+
[2026-04-25 18:18:29] Epoch 1 | Step 2900 | Loss: 1.0939 | LR: 1.00e-05
|
| 525 |
+
[2026-04-25 18:18:31] Epoch 1 | Step 2910 | Loss: 1.0937 | LR: 1.00e-05
|
| 526 |
+
[2026-04-25 18:18:34] Epoch 1 | Step 2920 | Loss: 1.0938 | LR: 1.00e-05
|
| 527 |
+
[2026-04-25 18:18:37] Epoch 1 | Step 2930 | Loss: 1.0934 | LR: 1.00e-05
|
| 528 |
+
[2026-04-25 18:18:39] Epoch 1 | Step 2940 | Loss: 1.0929 | LR: 1.00e-05
|
| 529 |
+
[2026-04-25 18:18:41] Epoch 1 | Step 2950 | Loss: 1.0930 | LR: 1.00e-05
|
| 530 |
+
[2026-04-25 18:18:44] Epoch 1 | Step 2960 | Loss: 1.0929 | LR: 1.00e-05
|
| 531 |
+
[2026-04-25 18:18:47] Epoch 1 | Step 2970 | Loss: 1.0929 | LR: 1.00e-05
|
| 532 |
+
[2026-04-25 18:18:49] Epoch 1 | Step 2980 | Loss: 1.0925 | LR: 1.00e-05
|
| 533 |
+
[2026-04-25 18:18:52] Epoch 1 | Step 2990 | Loss: 1.0926 | LR: 1.00e-05
|
| 534 |
+
[2026-04-25 18:18:54] Epoch 1 | Step 3000 | Loss: 1.0924 | LR: 1.00e-05
|
| 535 |
+
[2026-04-25 18:18:57] Epoch 1 | Step 3010 | Loss: 1.0924 | LR: 1.00e-05
|
| 536 |
+
[2026-04-25 18:19:00] Epoch 1 | Step 3020 | Loss: 1.0920 | LR: 1.00e-05
|
| 537 |
+
[2026-04-25 18:19:02] Epoch 1 | Step 3030 | Loss: 1.0917 | LR: 1.00e-05
|
| 538 |
+
[2026-04-25 18:19:05] Epoch 1 | Step 3040 | Loss: 1.0911 | LR: 1.00e-05
|
| 539 |
+
[2026-04-25 18:19:07] Epoch 1 | Step 3050 | Loss: 1.0905 | LR: 1.00e-05
|
| 540 |
+
[2026-04-25 18:19:09] Epoch 1 | Step 3060 | Loss: 1.0902 | LR: 1.00e-05
|
| 541 |
+
[2026-04-25 18:19:12] Epoch 1 | Step 3070 | Loss: 1.0898 | LR: 1.00e-05
|
| 542 |
+
[2026-04-25 18:19:15] Epoch 1 | Step 3080 | Loss: 1.0898 | LR: 1.00e-05
|
| 543 |
+
[2026-04-25 18:19:17] Epoch 1 | Step 3090 | Loss: 1.0893 | LR: 1.00e-05
|
| 544 |
+
[2026-04-25 18:19:19] Epoch 1 | Step 3100 | Loss: 1.0890 | LR: 1.00e-05
|
| 545 |
+
[2026-04-25 18:19:22] Epoch 1 | Step 3110 | Loss: 1.0886 | LR: 1.00e-05
|
| 546 |
+
[2026-04-25 18:19:24] Epoch 1 | Step 3120 | Loss: 1.0890 | LR: 1.00e-05
|
| 547 |
+
[2026-04-25 18:19:27] Epoch 1 | Step 3130 | Loss: 1.0885 | LR: 1.00e-05
|
| 548 |
+
[2026-04-25 18:19:29] Epoch 1 | Step 3140 | Loss: 1.0885 | LR: 1.00e-05
|
| 549 |
+
[2026-04-25 18:19:32] Epoch 1 | Step 3150 | Loss: 1.0886 | LR: 1.00e-05
|
| 550 |
+
[2026-04-25 18:19:35] Epoch 1 | Step 3160 | Loss: 1.0886 | LR: 1.00e-05
|
| 551 |
+
[2026-04-25 18:19:37] Epoch 1 | Step 3170 | Loss: 1.0883 | LR: 1.00e-05
|
| 552 |
+
[2026-04-25 18:19:40] Epoch 1 | Step 3180 | Loss: 1.0883 | LR: 1.00e-05
|
| 553 |
+
[2026-04-25 18:19:42] Epoch 1 | Step 3190 | Loss: 1.0877 | LR: 1.00e-05
|
| 554 |
+
[2026-04-25 18:19:44] Epoch 1 | Step 3200 | Loss: 1.0874 | LR: 1.00e-05
|
| 555 |
+
[2026-04-25 18:19:47] Epoch 1 | Step 3210 | Loss: 1.0870 | LR: 1.00e-05
|
| 556 |
+
[2026-04-25 18:19:49] Epoch 1 | Step 3220 | Loss: 1.0865 | LR: 1.00e-05
|
| 557 |
+
[2026-04-25 18:19:52] Epoch 1 | Step 3230 | Loss: 1.0868 | LR: 1.00e-05
|
| 558 |
+
[2026-04-25 18:19:54] Epoch 1 | Step 3240 | Loss: 1.0866 | LR: 1.00e-05
|
| 559 |
+
[2026-04-25 18:19:57] Epoch 1 | Step 3250 | Loss: 1.0866 | LR: 1.00e-05
|
| 560 |
+
[2026-04-25 18:19:59] Epoch 1 | Step 3260 | Loss: 1.0862 | LR: 1.00e-05
|
| 561 |
+
[2026-04-25 18:20:01] Epoch 1 | Step 3270 | Loss: 1.0860 | LR: 1.00e-05
|
| 562 |
+
[2026-04-25 18:20:04] Epoch 1 | Step 3280 | Loss: 1.0854 | LR: 1.00e-05
|
| 563 |
+
[2026-04-25 18:20:07] Epoch 1 | Step 3290 | Loss: 1.0851 | LR: 1.00e-05
|
| 564 |
+
[2026-04-25 18:20:09] Epoch 1 | Step 3300 | Loss: 1.0850 | LR: 1.00e-05
|
| 565 |
+
[2026-04-25 18:20:12] Epoch 1 | Step 3310 | Loss: 1.0847 | LR: 1.00e-05
|
| 566 |
+
[2026-04-25 18:20:14] Epoch 1 | Step 3320 | Loss: 1.0845 | LR: 1.00e-05
|
| 567 |
+
[2026-04-25 18:20:17] Epoch 1 | Step 3330 | Loss: 1.0843 | LR: 1.00e-05
|
| 568 |
+
[2026-04-25 18:20:20] Epoch 1 | Step 3340 | Loss: 1.0843 | LR: 1.00e-05
|
| 569 |
+
[2026-04-25 18:20:22] Epoch 1 | Step 3350 | Loss: 1.0838 | LR: 1.00e-05
|
| 570 |
+
[2026-04-25 18:20:24] Epoch 1 | Step 3360 | Loss: 1.0836 | LR: 1.00e-05
|
| 571 |
+
[2026-04-25 18:20:27] Epoch 1 | Step 3370 | Loss: 1.0836 | LR: 1.00e-05
|
| 572 |
+
[2026-04-25 18:20:29] Epoch 1 | Step 3380 | Loss: 1.0831 | LR: 1.00e-05
|
| 573 |
+
[2026-04-25 18:20:32] Epoch 1 | Step 3390 | Loss: 1.0832 | LR: 1.00e-05
|
| 574 |
+
[2026-04-25 18:20:35] Epoch 1 | Step 3400 | Loss: 1.0835 | LR: 1.00e-05
|
| 575 |
+
[2026-04-25 18:20:38] Epoch 1 | Step 3410 | Loss: 1.0832 | LR: 1.00e-05
|
| 576 |
+
[2026-04-25 18:20:40] Epoch 1 | Step 3420 | Loss: 1.0828 | LR: 1.00e-05
|
| 577 |
+
[2026-04-25 18:20:43] Epoch 1 | Step 3430 | Loss: 1.0827 | LR: 1.00e-05
|
| 578 |
+
[2026-04-25 18:20:45] Epoch 1 | Step 3440 | Loss: 1.0828 | LR: 1.00e-05
|
| 579 |
+
[2026-04-25 18:20:48] Epoch 1 | Step 3450 | Loss: 1.0825 | LR: 1.00e-05
|
| 580 |
+
[2026-04-25 18:20:50] Epoch 1 | Step 3460 | Loss: 1.0823 | LR: 1.00e-05
|
| 581 |
+
[2026-04-25 18:20:53] Epoch 1 | Step 3470 | Loss: 1.0822 | LR: 1.00e-05
|
| 582 |
+
[2026-04-25 18:20:55] Epoch 1 | Step 3480 | Loss: 1.0820 | LR: 1.00e-05
|
| 583 |
+
[2026-04-25 18:20:57] Epoch 1 | Step 3490 | Loss: 1.0818 | LR: 1.00e-05
|
| 584 |
+
[2026-04-25 18:21:00] Epoch 1 | Step 3500 | Loss: 1.0814 | LR: 1.00e-05
|
| 585 |
+
[2026-04-25 18:21:03] Epoch 1 | Step 3510 | Loss: 1.0815 | LR: 1.00e-05
|
| 586 |
+
[2026-04-25 18:21:05] Epoch 1 | Step 3520 | Loss: 1.0811 | LR: 1.00e-05
|
| 587 |
+
[2026-04-25 18:21:08] Epoch 1 | Step 3530 | Loss: 1.0812 | LR: 1.00e-05
|
| 588 |
+
[2026-04-25 18:21:10] Epoch 1 | Step 3540 | Loss: 1.0808 | LR: 1.00e-05
|
| 589 |
+
[2026-04-25 18:21:13] Epoch 1 | Step 3550 | Loss: 1.0806 | LR: 1.00e-05
|
| 590 |
+
[2026-04-25 18:21:16] Epoch 1 | Step 3560 | Loss: 1.0806 | LR: 1.00e-05
|
| 591 |
+
[2026-04-25 18:21:18] Epoch 1 | Step 3570 | Loss: 1.0804 | LR: 1.00e-05
|
| 592 |
+
[2026-04-25 18:21:21] Epoch 1 | Step 3580 | Loss: 1.0803 | LR: 1.00e-05
|
| 593 |
+
[2026-04-25 18:21:23] Epoch 1 | Step 3590 | Loss: 1.0801 | LR: 1.00e-05
|
| 594 |
+
[2026-04-25 18:21:25] Epoch 1 | Step 3600 | Loss: 1.0797 | LR: 1.00e-05
|
| 595 |
+
[2026-04-25 18:21:28] Epoch 1 | Step 3610 | Loss: 1.0794 | LR: 1.00e-05
|
| 596 |
+
[2026-04-25 18:21:30] Epoch 1 | Step 3620 | Loss: 1.0792 | LR: 1.00e-05
|
| 597 |
+
[2026-04-25 18:21:33] Epoch 1 | Step 3630 | Loss: 1.0794 | LR: 1.00e-05
|
| 598 |
+
[2026-04-25 18:21:35] Epoch 1 | Step 3640 | Loss: 1.0795 | LR: 1.00e-05
|
| 599 |
+
[2026-04-25 18:21:38] Epoch 1 | Step 3650 | Loss: 1.0795 | LR: 1.00e-05
|
| 600 |
+
[2026-04-25 18:21:40] Epoch 1 | Step 3660 | Loss: 1.0793 | LR: 1.00e-05
|
| 601 |
+
[2026-04-25 18:21:43] Epoch 1 | Step 3670 | Loss: 1.0789 | LR: 1.00e-05
|
| 602 |
+
[2026-04-25 18:21:45] Epoch 1 | Step 3680 | Loss: 1.0788 | LR: 1.00e-05
|
| 603 |
+
[2026-04-25 18:21:48] Epoch 1 | Step 3690 | Loss: 1.0786 | LR: 1.00e-05
|
| 604 |
+
[2026-04-25 18:21:50] Epoch 1 | Step 3700 | Loss: 1.0782 | LR: 1.00e-05
|
| 605 |
+
[2026-04-25 18:21:53] Epoch 1 | Step 3710 | Loss: 1.0779 | LR: 1.00e-05
|
| 606 |
+
[2026-04-25 18:21:55] Epoch 1 | Step 3720 | Loss: 1.0778 | LR: 1.00e-05
|
| 607 |
+
[2026-04-25 18:21:58] Epoch 1 | Step 3730 | Loss: 1.0778 | LR: 1.00e-05
|
| 608 |
+
[2026-04-25 18:22:00] Epoch 1 | Step 3740 | Loss: 1.0779 | LR: 1.00e-05
|
| 609 |
+
[2026-04-25 18:22:03] Epoch 1 | Step 3750 | Loss: 1.0776 | LR: 1.00e-05
|
| 610 |
+
[2026-04-25 18:22:06] Epoch 1 | Step 3760 | Loss: 1.0776 | LR: 1.00e-05
|
| 611 |
+
[2026-04-25 18:22:08] Epoch 1 | Step 3770 | Loss: 1.0776 | LR: 1.00e-05
|
| 612 |
+
[2026-04-25 18:22:10] Epoch 1 | Step 3780 | Loss: 1.0775 | LR: 1.00e-05
|
| 613 |
+
[2026-04-25 18:22:13] Epoch 1 | Step 3790 | Loss: 1.0775 | LR: 1.00e-05
|
| 614 |
+
[2026-04-25 18:22:15] Epoch 1 | Step 3800 | Loss: 1.0776 | LR: 1.00e-05
|
| 615 |
+
[2026-04-25 18:22:18] Epoch 1 | Step 3810 | Loss: 1.0770 | LR: 1.00e-05
|
| 616 |
+
[2026-04-25 18:22:20] Epoch 1 | Step 3820 | Loss: 1.0767 | LR: 1.00e-05
|
| 617 |
+
[2026-04-25 18:22:23] Epoch 1 | Step 3830 | Loss: 1.0765 | LR: 1.00e-05
|
| 618 |
+
[2026-04-25 18:22:25] Epoch 1 | Step 3840 | Loss: 1.0764 | LR: 1.00e-05
|
| 619 |
+
[2026-04-25 18:22:28] Epoch 1 | Step 3850 | Loss: 1.0760 | LR: 1.00e-05
|
| 620 |
+
[2026-04-25 18:22:30] Epoch 1 | Step 3860 | Loss: 1.0758 | LR: 1.00e-05
|
| 621 |
+
[2026-04-25 18:22:33] Epoch 1 | Step 3870 | Loss: 1.0757 | LR: 1.00e-05
|
| 622 |
+
[2026-04-25 18:22:35] Epoch 1 | Step 3880 | Loss: 1.0751 | LR: 1.00e-05
|
| 623 |
+
[2026-04-25 18:22:38] Epoch 1 | Step 3890 | Loss: 1.0748 | LR: 1.00e-05
|
| 624 |
+
[2026-04-25 18:22:40] Epoch 1 | Step 3900 | Loss: 1.0747 | LR: 1.00e-05
|
| 625 |
+
[2026-04-25 18:22:43] Epoch 1 | Step 3910 | Loss: 1.0750 | LR: 1.00e-05
|
| 626 |
+
[2026-04-25 18:22:45] Epoch 1 | Step 3920 | Loss: 1.0749 | LR: 1.00e-05
|
| 627 |
+
[2026-04-25 18:22:48] Epoch 1 | Step 3930 | Loss: 1.0747 | LR: 1.00e-05
|
| 628 |
+
[2026-04-25 18:22:51] Epoch 1 | Step 3940 | Loss: 1.0746 | LR: 1.00e-05
|
| 629 |
+
[2026-04-25 18:22:53] Epoch 1 | Step 3950 | Loss: 1.0743 | LR: 1.00e-05
|
| 630 |
+
[2026-04-25 18:22:56] Epoch 1 | Step 3960 | Loss: 1.0743 | LR: 1.00e-05
|
| 631 |
+
[2026-04-25 18:22:58] Epoch 1 | Step 3970 | Loss: 1.0740 | LR: 9.99e-06
|
| 632 |
+
[2026-04-25 18:23:01] Epoch 1 | Step 3980 | Loss: 1.0739 | LR: 9.99e-06
|
| 633 |
+
[2026-04-25 18:23:03] Epoch 1 | Step 3990 | Loss: 1.0735 | LR: 9.97e-06
|
| 634 |
+
[2026-04-25 18:23:06] Epoch 1 | Step 4000 | Loss: 1.0735 | LR: 9.95e-06
|
| 635 |
+
[2026-04-25 18:23:06] Validation | Batch 10/84 | Loss: 0.9900
|
| 636 |
+
[2026-04-25 18:23:07] Validation | Batch 20/84 | Loss: 0.9870
|
| 637 |
+
[2026-04-25 18:23:07] Validation | Batch 30/84 | Loss: 1.0641
|
| 638 |
+
[2026-04-25 18:23:08] Validation | Batch 40/84 | Loss: 1.0678
|
| 639 |
+
[2026-04-25 18:23:08] Validation | Batch 50/84 | Loss: 1.0680
|
| 640 |
+
[2026-04-25 18:23:08] Validation | Batch 60/84 | Loss: 1.0429
|
| 641 |
+
[2026-04-25 18:23:09] Validation | Batch 70/84 | Loss: 1.0235
|
| 642 |
+
[2026-04-25 18:23:09] Validation | Batch 80/84 | Loss: 1.0302
|
| 643 |
+
[2026-04-25 18:23:09] Validation | Batch 84/84 | Loss: 1.0235
|
| 644 |
+
[2026-04-25 18:23:10] Validation | Loss: 1.0235 | PPL: 2.85 | Time: 3.75s
|
| 645 |
+
[2026-04-25 18:23:12] New best model saved! Val loss: 1.0235
|
| 646 |
+
[2026-04-25 18:23:15] Epoch 1 | Step 4010 | Loss: 1.0733 | LR: 9.93e-06
|
| 647 |
+
[2026-04-25 18:23:17] Epoch 1 | Step 4020 | Loss: 1.0734 | LR: 9.90e-06
|
| 648 |
+
[2026-04-25 18:23:20] Epoch 1 | Step 4030 | Loss: 1.0730 | LR: 9.87e-06
|
| 649 |
+
[2026-04-25 18:23:22] Epoch 1 | Step 4040 | Loss: 1.0725 | LR: 9.84e-06
|
| 650 |
+
[2026-04-25 18:23:24] Epoch 1 | Step 4050 | Loss: 1.0722 | LR: 9.80e-06
|
| 651 |
+
[2026-04-25 18:23:27] Epoch 1 | Step 4060 | Loss: 1.0716 | LR: 9.75e-06
|
| 652 |
+
[2026-04-25 18:23:29] Epoch 1 | Step 4070 | Loss: 1.0715 | LR: 9.70e-06
|
| 653 |
+
[2026-04-25 18:23:32] Epoch 1 | Step 4080 | Loss: 1.0715 | LR: 9.65e-06
|
| 654 |
+
[2026-04-25 18:23:34] Epoch 1 | Step 4090 | Loss: 1.0715 | LR: 9.59e-06
|
| 655 |
+
[2026-04-25 18:23:37] Epoch 1 | Step 4100 | Loss: 1.0715 | LR: 9.53e-06
|
| 656 |
+
[2026-04-25 18:23:39] Epoch 1 | Step 4110 | Loss: 1.0714 | LR: 9.46e-06
|
| 657 |
+
[2026-04-25 18:23:42] Epoch 1 | Step 4120 | Loss: 1.0717 | LR: 9.40e-06
|
| 658 |
+
[2026-04-25 18:23:44] Epoch 1 | Step 4130 | Loss: 1.0714 | LR: 9.32e-06
|
| 659 |
+
[2026-04-25 18:23:47] Epoch 1 | Step 4140 | Loss: 1.0715 | LR: 9.24e-06
|
| 660 |
+
[2026-04-25 18:23:50] Epoch 1 | Step 4150 | Loss: 1.0719 | LR: 9.16e-06
|
| 661 |
+
[2026-04-25 18:23:52] Epoch 1 | Step 4160 | Loss: 1.0721 | LR: 9.08e-06
|
| 662 |
+
[2026-04-25 18:23:55] Epoch 1 | Step 4170 | Loss: 1.0718 | LR: 8.99e-06
|
| 663 |
+
[2026-04-25 18:23:57] Epoch 1 | Step 4180 | Loss: 1.0717 | LR: 8.90e-06
|
| 664 |
+
[2026-04-25 18:24:00] Epoch 1 | Step 4190 | Loss: 1.0715 | LR: 8.80e-06
|
| 665 |
+
[2026-04-25 18:24:03] Epoch 1 | Step 4200 | Loss: 1.0718 | LR: 8.70e-06
|
| 666 |
+
[2026-04-25 18:24:05] Epoch 1 | Step 4210 | Loss: 1.0716 | LR: 8.60e-06
|
| 667 |
+
[2026-04-25 18:24:08] Epoch 1 | Step 4220 | Loss: 1.0721 | LR: 8.50e-06
|
| 668 |
+
[2026-04-25 18:24:11] Epoch 1 | Step 4230 | Loss: 1.0721 | LR: 8.39e-06
|
| 669 |
+
[2026-04-25 18:24:13] Epoch 1 | Step 4240 | Loss: 1.0721 | LR: 8.28e-06
|
| 670 |
+
[2026-04-25 18:24:16] Epoch 1 | Step 4250 | Loss: 1.0721 | LR: 8.16e-06
|
| 671 |
+
[2026-04-25 18:24:18] Epoch 1 | Step 4260 | Loss: 1.0717 | LR: 8.05e-06
|
| 672 |
+
[2026-04-25 18:24:21] Epoch 1 | Step 4270 | Loss: 1.0719 | LR: 7.93e-06
|
| 673 |
+
[2026-04-25 18:24:23] Epoch 1 | Step 4280 | Loss: 1.0717 | LR: 7.81e-06
|
| 674 |
+
[2026-04-25 18:24:26] Epoch 1 | Step 4290 | Loss: 1.0714 | LR: 7.68e-06
|
| 675 |
+
[2026-04-25 18:24:28] Epoch 1 | Step 4300 | Loss: 1.0714 | LR: 7.56e-06
|
| 676 |
+
[2026-04-25 18:24:31] Epoch 1 | Step 4310 | Loss: 1.0715 | LR: 7.43e-06
|
| 677 |
+
[2026-04-25 18:24:33] Epoch 1 | Step 4320 | Loss: 1.0715 | LR: 7.30e-06
|
| 678 |
+
[2026-04-25 18:24:36] Epoch 1 | Step 4330 | Loss: 1.0713 | LR: 7.16e-06
|
| 679 |
+
[2026-04-25 18:24:38] Epoch 1 | Step 4340 | Loss: 1.0712 | LR: 7.03e-06
|
| 680 |
+
[2026-04-25 18:24:41] Epoch 1 | Step 4350 | Loss: 1.0709 | LR: 6.90e-06
|
| 681 |
+
[2026-04-25 18:24:43] Epoch 1 | Step 4360 | Loss: 1.0708 | LR: 6.76e-06
|
| 682 |
+
[2026-04-25 18:24:46] Epoch 1 | Step 4370 | Loss: 1.0708 | LR: 6.62e-06
|
| 683 |
+
[2026-04-25 18:24:48] Epoch 1 | Step 4380 | Loss: 1.0706 | LR: 6.48e-06
|
| 684 |
+
[2026-04-25 18:24:51] Epoch 1 | Step 4390 | Loss: 1.0706 | LR: 6.34e-06
|
| 685 |
+
[2026-04-25 18:24:53] Epoch 1 | Step 4400 | Loss: 1.0704 | LR: 6.20e-06
|
| 686 |
+
[2026-04-25 18:24:56] Epoch 1 | Step 4410 | Loss: 1.0699 | LR: 6.06e-06
|
| 687 |
+
[2026-04-25 18:24:58] Epoch 1 | Step 4420 | Loss: 1.0701 | LR: 5.92e-06
|
| 688 |
+
[2026-04-25 18:25:01] Epoch 1 | Step 4430 | Loss: 1.0700 | LR: 5.78e-06
|
| 689 |
+
[2026-04-25 18:25:03] Epoch 1 | Step 4440 | Loss: 1.0702 | LR: 5.63e-06
|
| 690 |
+
[2026-04-25 18:25:06] Epoch 1 | Step 4450 | Loss: 1.0700 | LR: 5.49e-06
|
| 691 |
+
[2026-04-25 18:25:08] Epoch 1 | Step 4460 | Loss: 1.0703 | LR: 5.35e-06
|
| 692 |
+
[2026-04-25 18:25:11] Epoch 1 | Step 4470 | Loss: 1.0700 | LR: 5.20e-06
|
| 693 |
+
[2026-04-25 18:25:13] Epoch 1 | Step 4480 | Loss: 1.0698 | LR: 5.06e-06
|
| 694 |
+
[2026-04-25 18:25:16] Epoch 1 | Step 4490 | Loss: 1.0696 | LR: 4.92e-06
|
| 695 |
+
[2026-04-25 18:25:18] Epoch 1 | Step 4500 | Loss: 1.0697 | LR: 4.78e-06
|
| 696 |
+
[2026-04-25 18:25:21] Epoch 1 | Step 4510 | Loss: 1.0692 | LR: 4.64e-06
|
| 697 |
+
[2026-04-25 18:25:23] Epoch 1 | Step 4520 | Loss: 1.0690 | LR: 4.50e-06
|
| 698 |
+
[2026-04-25 18:25:26] Epoch 1 | Step 4530 | Loss: 1.0687 | LR: 4.36e-06
|
| 699 |
+
[2026-04-25 18:25:29] Epoch 1 | Step 4540 | Loss: 1.0685 | LR: 4.22e-06
|
| 700 |
+
[2026-04-25 18:25:32] Epoch 1 | Step 4550 | Loss: 1.0681 | LR: 4.08e-06
|
| 701 |
+
[2026-04-25 18:25:34] Epoch 1 | Step 4560 | Loss: 1.0681 | LR: 3.95e-06
|
| 702 |
+
[2026-04-25 18:25:37] Epoch 1 | Step 4570 | Loss: 1.0681 | LR: 3.82e-06
|
| 703 |
+
[2026-04-25 18:25:39] Epoch 1 | Step 4580 | Loss: 1.0679 | LR: 3.68e-06
|
| 704 |
+
[2026-04-25 18:25:42] Epoch 1 | Step 4590 | Loss: 1.0677 | LR: 3.55e-06
|
| 705 |
+
[2026-04-25 18:25:44] Epoch 1 | Step 4600 | Loss: 1.0675 | LR: 3.43e-06
|
| 706 |
+
[2026-04-25 18:25:46] Epoch 1 | Step 4610 | Loss: 1.0673 | LR: 3.30e-06
|
| 707 |
+
[2026-04-25 18:25:49] Epoch 1 | Step 4620 | Loss: 1.0674 | LR: 3.18e-06
|
| 708 |
+
[2026-04-25 18:25:51] Epoch 1 | Step 4630 | Loss: 1.0672 | LR: 3.05e-06
|
| 709 |
+
[2026-04-25 18:25:54] Epoch 1 | Step 4640 | Loss: 1.0671 | LR: 2.94e-06
|
| 710 |
+
[2026-04-25 18:25:56] Epoch 1 | Step 4650 | Loss: 1.0671 | LR: 2.82e-06
|
| 711 |
+
[2026-04-25 18:25:59] Epoch 1 | Step 4660 | Loss: 1.0669 | LR: 2.71e-06
|
| 712 |
+
[2026-04-25 18:26:01] Epoch 1 | Step 4670 | Loss: 1.0667 | LR: 2.60e-06
|
| 713 |
+
[2026-04-25 18:26:04] Epoch 1 | Step 4680 | Loss: 1.0668 | LR: 2.49e-06
|
| 714 |
+
[2026-04-25 18:26:07] Epoch 1 | Step 4690 | Loss: 1.0666 | LR: 2.38e-06
|
| 715 |
+
[2026-04-25 18:26:09] Epoch 1 | Step 4700 | Loss: 1.0668 | LR: 2.28e-06
|
| 716 |
+
[2026-04-25 18:26:12] Epoch 1 | Step 4710 | Loss: 1.0666 | LR: 2.18e-06
|
| 717 |
+
[2026-04-25 18:26:14] Epoch 1 | Step 4720 | Loss: 1.0664 | LR: 2.09e-06
|
| 718 |
+
[2026-04-25 18:26:16] Epoch 1 | Step 4730 | Loss: 1.0664 | LR: 2.00e-06
|
| 719 |
+
[2026-04-25 18:26:19] Epoch 1 | Step 4740 | Loss: 1.0662 | LR: 1.91e-06
|
| 720 |
+
[2026-04-25 18:26:21] Epoch 1 | Step 4750 | Loss: 1.0661 | LR: 1.82e-06
|
| 721 |
+
[2026-04-25 18:26:23] Epoch 1 | Step 4760 | Loss: 1.0659 | LR: 1.74e-06
|
| 722 |
+
[2026-04-25 18:26:26] Epoch 1 | Step 4770 | Loss: 1.0655 | LR: 1.67e-06
|
| 723 |
+
[2026-04-25 18:26:28] Epoch 1 | Step 4780 | Loss: 1.0655 | LR: 1.59e-06
|
| 724 |
+
[2026-04-25 18:26:31] Epoch 1 | Step 4790 | Loss: 1.0654 | LR: 1.52e-06
|
| 725 |
+
[2026-04-25 18:26:34] Epoch 1 | Step 4800 | Loss: 1.0652 | LR: 1.46e-06
|
| 726 |
+
[2026-04-25 18:26:36] Epoch 1 | Step 4810 | Loss: 1.0648 | LR: 1.40e-06
|
| 727 |
+
[2026-04-25 18:26:39] Epoch 1 | Step 4820 | Loss: 1.0646 | LR: 1.34e-06
|
| 728 |
+
[2026-04-25 18:26:41] Epoch 1 | Step 4830 | Loss: 1.0642 | LR: 1.29e-06
|
| 729 |
+
[2026-04-25 18:26:44] Epoch 1 | Step 4840 | Loss: 1.0641 | LR: 1.24e-06
|
| 730 |
+
[2026-04-25 18:26:47] Epoch 1 | Step 4850 | Loss: 1.0642 | LR: 1.20e-06
|
| 731 |
+
[2026-04-25 18:26:49] Epoch 1 | Step 4860 | Loss: 1.0644 | LR: 1.16e-06
|
| 732 |
+
[2026-04-25 18:26:52] Epoch 1 | Step 4870 | Loss: 1.0645 | LR: 1.12e-06
|
| 733 |
+
[2026-04-25 18:26:54] Epoch 1 | Step 4880 | Loss: 1.0643 | LR: 1.09e-06
|
| 734 |
+
[2026-04-25 18:26:57] Epoch 1 | Step 4890 | Loss: 1.0640 | LR: 1.06e-06
|
| 735 |
+
[2026-04-25 18:27:00] Epoch 1 | Step 4900 | Loss: 1.0640 | LR: 1.04e-06
|
| 736 |
+
[2026-04-25 18:27:02] Epoch 1 | Step 4910 | Loss: 1.0639 | LR: 1.03e-06
|
| 737 |
+
[2026-04-25 18:27:04] Epoch 1 | Step 4920 | Loss: 1.0638 | LR: 1.01e-06
|
| 738 |
+
[2026-04-25 18:27:07] Epoch 1 | Step 4930 | Loss: 1.0637 | LR: 1.00e-06
|
| 739 |
+
[2026-04-25 18:27:09] Epoch 1 | Step 4940 | Loss: 1.0636 | LR: 1.00e-06
|
| 740 |
+
[2026-04-25 18:27:12] Epoch 1 | Step 4950 | Loss: 1.0636 | LR: 1.00e-06
|
| 741 |
+
[2026-04-25 18:27:14] Epoch 1 | Step 4960 | Loss: 1.0635 | LR: 1.00e-06
|
| 742 |
+
[2026-04-25 18:27:17] Epoch 1 | Step 4970 | Loss: 1.0633 | LR: 1.00e-06
|
| 743 |
+
[2026-04-25 18:27:19] Epoch 1 | Step 4980 | Loss: 1.0632 | LR: 1.00e-06
|
| 744 |
+
[2026-04-25 18:27:21] Epoch 1 | Step 4990 | Loss: 1.0629 | LR: 1.00e-06
|
| 745 |
+
[2026-04-25 18:27:24] Epoch 1 | Step 5000 | Loss: 1.0631 | LR: 1.00e-06
|
| 746 |
+
[2026-04-25 18:27:26] Epoch 1 | Step 5010 | Loss: 1.0628 | LR: 1.00e-06
|
| 747 |
+
[2026-04-25 18:27:29] Epoch 1 | Step 5020 | Loss: 1.0626 | LR: 1.00e-06
|
| 748 |
+
[2026-04-25 18:27:31] Epoch 1 | Step 5030 | Loss: 1.0626 | LR: 1.00e-06
|
| 749 |
+
[2026-04-25 18:27:33] Epoch 1 | Step 5040 | Loss: 1.0624 | LR: 1.00e-06
|
| 750 |
+
[2026-04-25 18:27:36] Epoch 1 | Step 5050 | Loss: 1.0622 | LR: 1.00e-06
|
| 751 |
+
[2026-04-25 18:27:39] Epoch 1 | Step 5060 | Loss: 1.0621 | LR: 1.00e-06
|
| 752 |
+
[2026-04-25 18:27:41] Epoch 1 | Step 5070 | Loss: 1.0621 | LR: 1.00e-06
|
| 753 |
+
[2026-04-25 18:27:44] Epoch 1 | Step 5080 | Loss: 1.0622 | LR: 1.00e-06
|
| 754 |
+
[2026-04-25 18:27:46] Epoch 1 | Step 5090 | Loss: 1.0622 | LR: 1.00e-06
|
| 755 |
+
[2026-04-25 18:27:49] Epoch 1 | Step 5100 | Loss: 1.0620 | LR: 1.00e-06
|
| 756 |
+
[2026-04-25 18:27:51] Epoch 1 | Step 5110 | Loss: 1.0619 | LR: 1.00e-06
|
| 757 |
+
[2026-04-25 18:27:54] Epoch 1 | Step 5120 | Loss: 1.0620 | LR: 1.00e-06
|
| 758 |
+
[2026-04-25 18:27:57] Epoch 1 | Step 5130 | Loss: 1.0619 | LR: 1.00e-06
|
| 759 |
+
[2026-04-25 18:27:59] Epoch 1 | Step 5140 | Loss: 1.0617 | LR: 1.00e-06
|
| 760 |
+
[2026-04-25 18:28:01] Epoch 1 | Step 5150 | Loss: 1.0615 | LR: 1.00e-06
|
| 761 |
+
[2026-04-25 18:28:04] Epoch 1 | Step 5160 | Loss: 1.0610 | LR: 1.00e-06
|
| 762 |
+
[2026-04-25 18:28:06] Epoch 1 | Step 5170 | Loss: 1.0610 | LR: 1.00e-06
|
| 763 |
+
[2026-04-25 18:28:09] Epoch 1 | Step 5180 | Loss: 1.0608 | LR: 1.00e-06
|
| 764 |
+
[2026-04-25 18:28:12] Epoch 1 | Step 5190 | Loss: 1.0608 | LR: 1.00e-06
|
| 765 |
+
[2026-04-25 18:28:14] Epoch 1 | Step 5200 | Loss: 1.0607 | LR: 1.00e-06
|
| 766 |
+
[2026-04-25 18:28:17] Epoch 1 | Step 5210 | Loss: 1.0605 | LR: 1.00e-06
|
| 767 |
+
[2026-04-25 18:28:19] Epoch 1 | Step 5220 | Loss: 1.0605 | LR: 1.00e-06
|
| 768 |
+
[2026-04-25 18:28:22] Epoch 1 | Step 5230 | Loss: 1.0604 | LR: 1.00e-06
|
| 769 |
+
[2026-04-25 18:28:25] Epoch 1 | Step 5240 | Loss: 1.0603 | LR: 1.00e-06
|
| 770 |
+
[2026-04-25 18:28:27] Epoch 1 | Step 5250 | Loss: 1.0603 | LR: 1.00e-06
|
| 771 |
+
[2026-04-25 18:28:30] Epoch 1 | Step 5260 | Loss: 1.0602 | LR: 1.00e-06
|
| 772 |
+
[2026-04-25 18:28:32] Epoch 1 | Step 5270 | Loss: 1.0601 | LR: 1.00e-06
|
| 773 |
+
[2026-04-25 18:28:35] Epoch 1 | Step 5280 | Loss: 1.0598 | LR: 1.00e-06
|
| 774 |
+
[2026-04-25 18:28:37] Epoch 1 | Step 5290 | Loss: 1.0595 | LR: 1.00e-06
|
| 775 |
+
[2026-04-25 18:28:40] Epoch 1 | Step 5300 | Loss: 1.0594 | LR: 1.00e-06
|
| 776 |
+
[2026-04-25 18:28:42] Epoch 1 | Step 5310 | Loss: 1.0595 | LR: 1.00e-06
|
| 777 |
+
[2026-04-25 18:28:45] Epoch 1 | Step 5320 | Loss: 1.0593 | LR: 1.00e-06
|
| 778 |
+
[2026-04-25 18:28:47] Epoch 1 | Step 5330 | Loss: 1.0593 | LR: 1.00e-06
|
| 779 |
+
[2026-04-25 18:28:49] Epoch 1 | Step 5340 | Loss: 1.0591 | LR: 1.00e-06
|
| 780 |
+
[2026-04-25 18:28:52] Epoch 1 | Step 5350 | Loss: 1.0589 | LR: 1.00e-06
|
| 781 |
+
[2026-04-25 18:28:54] Epoch 1 | Step 5360 | Loss: 1.0591 | LR: 1.00e-06
|
| 782 |
+
[2026-04-25 18:28:57] Epoch 1 | Step 5370 | Loss: 1.0590 | LR: 1.00e-06
|
| 783 |
+
[2026-04-25 18:28:59] Epoch 1 | Step 5380 | Loss: 1.0588 | LR: 1.00e-06
|
| 784 |
+
[2026-04-25 18:29:02] Epoch 1 | Step 5390 | Loss: 1.0585 | LR: 1.00e-06
|
| 785 |
+
[2026-04-25 18:29:04] Epoch 1 | Step 5400 | Loss: 1.0583 | LR: 1.00e-06
|
| 786 |
+
[2026-04-25 18:29:07] Epoch 1 | Step 5410 | Loss: 1.0583 | LR: 1.00e-06
|
| 787 |
+
[2026-04-25 18:29:09] Epoch 1 | Step 5420 | Loss: 1.0580 | LR: 1.00e-06
|
| 788 |
+
[2026-04-25 18:29:12] Epoch 1 | Step 5430 | Loss: 1.0580 | LR: 1.00e-06
|
| 789 |
+
[2026-04-25 18:29:14] Epoch 1 | Step 5440 | Loss: 1.0580 | LR: 1.00e-06
|
| 790 |
+
[2026-04-25 18:29:17] Epoch 1 | Step 5450 | Loss: 1.0582 | LR: 1.00e-06
|
| 791 |
+
[2026-04-25 18:29:19] Epoch 1 | Step 5460 | Loss: 1.0580 | LR: 1.00e-06
|
| 792 |
+
[2026-04-25 18:29:22] Epoch 1 | Step 5470 | Loss: 1.0578 | LR: 1.00e-06
|
| 793 |
+
[2026-04-25 18:29:24] Epoch 1 | Step 5480 | Loss: 1.0578 | LR: 1.00e-06
|
| 794 |
+
[2026-04-25 18:29:27] Epoch 1 | Step 5490 | Loss: 1.0578 | LR: 1.00e-06
|
| 795 |
+
[2026-04-25 18:29:29] Epoch 1 | Step 5500 | Loss: 1.0577 | LR: 1.00e-06
|
| 796 |
+
[2026-04-25 18:29:32] Epoch 1 | Step 5510 | Loss: 1.0579 | LR: 1.00e-06
|
| 797 |
+
[2026-04-25 18:29:34] Epoch 1 | Step 5520 | Loss: 1.0578 | LR: 1.00e-06
|
| 798 |
+
[2026-04-25 18:29:37] Epoch 1 | Step 5530 | Loss: 1.0577 | LR: 1.00e-06
|
| 799 |
+
[2026-04-25 18:29:39] Epoch 1 | Step 5540 | Loss: 1.0573 | LR: 1.00e-06
|
| 800 |
+
[2026-04-25 18:29:42] Epoch 1 | Step 5550 | Loss: 1.0573 | LR: 1.00e-06
|
| 801 |
+
[2026-04-25 18:29:44] Epoch 1 | Step 5560 | Loss: 1.0572 | LR: 1.00e-06
|
| 802 |
+
[2026-04-25 18:29:47] Epoch 1 | Step 5570 | Loss: 1.0574 | LR: 1.00e-06
|
| 803 |
+
[2026-04-25 18:29:49] Epoch 1 | Step 5580 | Loss: 1.0572 | LR: 1.00e-06
|
| 804 |
+
[2026-04-25 18:29:52] Epoch 1 | Step 5590 | Loss: 1.0570 | LR: 1.00e-06
|
| 805 |
+
[2026-04-25 18:29:54] Epoch 1 | Step 5600 | Loss: 1.0571 | LR: 1.00e-06
|
| 806 |
+
[2026-04-25 18:29:57] Epoch 1 | Step 5610 | Loss: 1.0572 | LR: 1.00e-06
|
| 807 |
+
[2026-04-25 18:29:59] Epoch 1 | Step 5620 | Loss: 1.0570 | LR: 1.00e-06
|
| 808 |
+
[2026-04-25 18:30:02] Epoch 1 | Step 5630 | Loss: 1.0571 | LR: 1.00e-06
|
| 809 |
+
[2026-04-25 18:30:04] Epoch 1 | Step 5640 | Loss: 1.0570 | LR: 1.00e-06
|
| 810 |
+
[2026-04-25 18:30:07] Epoch 1 | Step 5650 | Loss: 1.0569 | LR: 1.00e-06
|
| 811 |
+
[2026-04-25 18:30:09] Epoch 1 | Step 5660 | Loss: 1.0567 | LR: 1.00e-06
|
| 812 |
+
[2026-04-25 18:30:12] Epoch 1 | Step 5670 | Loss: 1.0566 | LR: 1.00e-06
|
| 813 |
+
[2026-04-25 18:30:14] Epoch 1 | Step 5680 | Loss: 1.0564 | LR: 1.00e-06
|
| 814 |
+
[2026-04-25 18:30:17] Epoch 1 | Step 5690 | Loss: 1.0564 | LR: 1.00e-06
|
| 815 |
+
[2026-04-25 18:30:19] Epoch 1 | Step 5700 | Loss: 1.0563 | LR: 1.00e-06
|
| 816 |
+
[2026-04-25 18:30:22] Epoch 1 | Step 5710 | Loss: 1.0563 | LR: 1.00e-06
|
| 817 |
+
[2026-04-25 18:30:25] Epoch 1 | Step 5720 | Loss: 1.0564 | LR: 1.00e-06
|
| 818 |
+
[2026-04-25 18:30:27] Epoch 1 | Step 5730 | Loss: 1.0563 | LR: 1.00e-06
|
| 819 |
+
[2026-04-25 18:30:30] Epoch 1 | Step 5740 | Loss: 1.0563 | LR: 1.00e-06
|
| 820 |
+
[2026-04-25 18:30:32] Epoch 1 | Step 5750 | Loss: 1.0562 | LR: 1.00e-06
|
| 821 |
+
[2026-04-25 18:30:35] Epoch 1 | Step 5760 | Loss: 1.0562 | LR: 1.00e-06
|
| 822 |
+
[2026-04-25 18:30:37] Epoch 1 | Step 5770 | Loss: 1.0562 | LR: 1.00e-06
|
| 823 |
+
[2026-04-25 18:30:40] Epoch 1 | Step 5780 | Loss: 1.0560 | LR: 1.00e-06
|
| 824 |
+
[2026-04-25 18:30:42] Epoch 1 | Step 5790 | Loss: 1.0561 | LR: 1.00e-06
|
| 825 |
+
[2026-04-25 18:30:45] Epoch 1 | Step 5800 | Loss: 1.0563 | LR: 1.00e-06
|
| 826 |
+
[2026-04-25 18:30:47] Epoch 1 | Step 5810 | Loss: 1.0562 | LR: 1.00e-06
|
| 827 |
+
[2026-04-25 18:30:50] Epoch 1 | Step 5820 | Loss: 1.0560 | LR: 1.00e-06
|
| 828 |
+
[2026-04-25 18:30:52] Epoch 1 | Step 5830 | Loss: 1.0559 | LR: 1.00e-06
|
| 829 |
+
[2026-04-25 18:30:55] Epoch 1 | Step 5840 | Loss: 1.0560 | LR: 1.00e-06
|
| 830 |
+
[2026-04-25 18:30:57] Epoch 1 | Step 5850 | Loss: 1.0561 | LR: 1.00e-06
|
| 831 |
+
[2026-04-25 18:31:00] Epoch 1 | Step 5860 | Loss: 1.0560 | LR: 1.00e-06
|
| 832 |
+
[2026-04-25 18:31:02] Epoch 1 | Step 5870 | Loss: 1.0559 | LR: 1.00e-06
|
| 833 |
+
[2026-04-25 18:31:05] Epoch 1 | Step 5880 | Loss: 1.0560 | LR: 1.00e-06
|
| 834 |
+
[2026-04-25 18:31:07] Epoch 1 | Step 5890 | Loss: 1.0560 | LR: 1.00e-06
|
| 835 |
+
[2026-04-25 18:31:10] Epoch 1 | Step 5900 | Loss: 1.0558 | LR: 1.00e-06
|
| 836 |
+
[2026-04-25 18:31:13] Epoch 1 | Step 5910 | Loss: 1.0558 | LR: 1.00e-06
|
| 837 |
+
[2026-04-25 18:31:15] Epoch 1 | Step 5920 | Loss: 1.0555 | LR: 1.00e-06
|
| 838 |
+
[2026-04-25 18:31:18] Epoch 1 | Step 5930 | Loss: 1.0556 | LR: 1.00e-06
|
| 839 |
+
[2026-04-25 18:31:21] Epoch 1 | Step 5940 | Loss: 1.0555 | LR: 1.00e-06
|
| 840 |
+
[2026-04-25 18:31:23] Epoch 1 | Step 5950 | Loss: 1.0555 | LR: 1.00e-06
|
| 841 |
+
[2026-04-25 18:31:26] Epoch 1 | Step 5960 | Loss: 1.0555 | LR: 1.00e-06
|
| 842 |
+
[2026-04-25 18:31:28] Epoch 1 | Step 5970 | Loss: 1.0556 | LR: 1.00e-06
|
| 843 |
+
[2026-04-25 18:31:31] Epoch 1 | Step 5980 | Loss: 1.0555 | LR: 1.00e-06
|
| 844 |
+
[2026-04-25 18:31:33] Epoch 1 | Step 5990 | Loss: 1.0557 | LR: 1.00e-06
|
| 845 |
+
[2026-04-25 18:31:36] Epoch 1 | Step 6000 | Loss: 1.0555 | LR: 1.00e-06
|
| 846 |
+
[2026-04-25 18:31:36] Validation | Batch 10/84 | Loss: 0.9832
|
| 847 |
+
[2026-04-25 18:31:37] Validation | Batch 20/84 | Loss: 0.9809
|
| 848 |
+
[2026-04-25 18:31:37] Validation | Batch 30/84 | Loss: 1.0582
|
| 849 |
+
[2026-04-25 18:31:38] Validation | Batch 40/84 | Loss: 1.0622
|
| 850 |
+
[2026-04-25 18:31:38] Validation | Batch 50/84 | Loss: 1.0617
|
| 851 |
+
[2026-04-25 18:31:39] Validation | Batch 60/84 | Loss: 1.0346
|
| 852 |
+
[2026-04-25 18:31:39] Validation | Batch 70/84 | Loss: 1.0156
|
| 853 |
+
[2026-04-25 18:31:39] Validation | Batch 80/84 | Loss: 1.0225
|
| 854 |
+
[2026-04-25 18:31:40] Validation | Batch 84/84 | Loss: 1.0160
|
| 855 |
+
[2026-04-25 18:31:40] Validation | Loss: 1.0160 | PPL: 2.84 | Time: 3.75s
|
| 856 |
+
[2026-04-25 18:31:43] New best model saved! Val loss: 1.0160
|
| 857 |
+
[2026-04-25 18:31:45] Epoch 1 | Step 6010 | Loss: 1.0555 | LR: 1.00e-06
|
| 858 |
+
[2026-04-25 18:31:48] Epoch 1 | Step 6020 | Loss: 1.0553 | LR: 1.00e-06
|
| 859 |
+
[2026-04-25 18:31:50] Epoch 1 | Step 6030 | Loss: 1.0554 | LR: 1.00e-06
|
| 860 |
+
[2026-04-25 18:31:53] Epoch 1 | Step 6040 | Loss: 1.0554 | LR: 1.00e-06
|
| 861 |
+
[2026-04-25 18:31:55] Epoch 1 | Step 6050 | Loss: 1.0555 | LR: 1.00e-06
|
| 862 |
+
[2026-04-25 18:31:58] Epoch 1 | Step 6060 | Loss: 1.0554 | LR: 1.00e-06
|
| 863 |
+
[2026-04-25 18:32:01] Epoch 1 | Step 6070 | Loss: 1.0552 | LR: 1.00e-06
|
| 864 |
+
[2026-04-25 18:32:03] Epoch 1 | Step 6080 | Loss: 1.0553 | LR: 1.00e-06
|
| 865 |
+
[2026-04-25 18:32:06] Epoch 1 | Step 6090 | Loss: 1.0553 | LR: 1.00e-06
|
| 866 |
+
[2026-04-25 18:32:08] Epoch 1 | Step 6100 | Loss: 1.0554 | LR: 1.00e-06
|
| 867 |
+
[2026-04-25 18:32:11] Epoch 1 | Step 6110 | Loss: 1.0554 | LR: 1.00e-06
|
| 868 |
+
[2026-04-25 18:32:13] Epoch 1 | Step 6120 | Loss: 1.0553 | LR: 1.00e-06
|
| 869 |
+
[2026-04-25 18:32:16] Epoch 1 | Step 6130 | Loss: 1.0552 | LR: 1.00e-06
|
| 870 |
+
[2026-04-25 18:32:18] Epoch 1 | Step 6140 | Loss: 1.0548 | LR: 1.00e-06
|
| 871 |
+
[2026-04-25 18:32:21] Epoch 1 | Step 6150 | Loss: 1.0547 | LR: 1.00e-06
|
| 872 |
+
[2026-04-25 18:32:23] Epoch 1 | Step 6160 | Loss: 1.0547 | LR: 1.00e-06
|
| 873 |
+
[2026-04-25 18:32:26] Epoch 1 | Step 6170 | Loss: 1.0548 | LR: 1.00e-06
|
| 874 |
+
[2026-04-25 18:32:28] Epoch 1 | Step 6180 | Loss: 1.0546 | LR: 1.00e-06
|
| 875 |
+
[2026-04-25 18:32:31] Epoch 1 | Step 6190 | Loss: 1.0543 | LR: 1.00e-06
|
| 876 |
+
[2026-04-25 18:32:33] Epoch 1 | Step 6200 | Loss: 1.0542 | LR: 1.00e-06
|
| 877 |
+
[2026-04-25 18:32:36] Epoch 1 | Step 6210 | Loss: 1.0542 | LR: 1.00e-06
|
| 878 |
+
[2026-04-25 18:32:39] Epoch 1 | Step 6220 | Loss: 1.0543 | LR: 1.00e-06
|
| 879 |
+
[2026-04-25 18:32:41] Epoch 1 | Step 6230 | Loss: 1.0541 | LR: 1.00e-06
|
| 880 |
+
[2026-04-25 18:32:44] Epoch 1 | Step 6240 | Loss: 1.0541 | LR: 1.00e-06
|
| 881 |
+
[2026-04-25 18:32:46] Epoch 1 | Step 6250 | Loss: 1.0538 | LR: 1.00e-06
|
| 882 |
+
[2026-04-25 18:32:49] Epoch 1 | Step 6260 | Loss: 1.0538 | LR: 1.00e-06
|
| 883 |
+
[2026-04-25 18:32:51] Epoch 1 | Step 6270 | Loss: 1.0537 | LR: 1.00e-06
|
| 884 |
+
[2026-04-25 18:32:54] Epoch 1 | Step 6280 | Loss: 1.0535 | LR: 1.00e-06
|
| 885 |
+
[2026-04-25 18:32:56] Epoch 1 | Step 6290 | Loss: 1.0534 | LR: 1.00e-06
|
| 886 |
+
[2026-04-25 18:32:59] Epoch 1 | Step 6300 | Loss: 1.0534 | LR: 1.00e-06
|
| 887 |
+
[2026-04-25 18:33:01] Epoch 1 | Step 6310 | Loss: 1.0535 | LR: 1.00e-06
|
| 888 |
+
[2026-04-25 18:33:04] Epoch 1 | Step 6320 | Loss: 1.0534 | LR: 1.00e-06
|
| 889 |
+
[2026-04-25 18:33:07] Epoch 1 | Step 6330 | Loss: 1.0536 | LR: 1.00e-06
|
| 890 |
+
[2026-04-25 18:33:09] Epoch 1 | Step 6340 | Loss: 1.0536 | LR: 1.00e-06
|
| 891 |
+
[2026-04-25 18:33:11] Epoch 1 | Step 6350 | Loss: 1.0536 | LR: 1.00e-06
|
| 892 |
+
[2026-04-25 18:33:14] Epoch 1 | Step 6360 | Loss: 1.0536 | LR: 1.00e-06
|
| 893 |
+
[2026-04-25 18:33:16] Epoch 1 | Step 6370 | Loss: 1.0536 | LR: 1.00e-06
|
| 894 |
+
[2026-04-25 18:33:19] Epoch 1 | Step 6380 | Loss: 1.0536 | LR: 1.00e-06
|
| 895 |
+
[2026-04-25 18:33:21] Epoch 1 | Step 6390 | Loss: 1.0534 | LR: 1.00e-06
|
| 896 |
+
[2026-04-25 18:33:24] Epoch 1 | Step 6400 | Loss: 1.0534 | LR: 1.00e-06
|
| 897 |
+
[2026-04-25 18:33:26] Epoch 1 | Step 6410 | Loss: 1.0533 | LR: 1.00e-06
|
| 898 |
+
[2026-04-25 18:33:28] Epoch 1 | Step 6420 | Loss: 1.0531 | LR: 1.00e-06
|
| 899 |
+
[2026-04-25 18:33:31] Epoch 1 | Step 6430 | Loss: 1.0530 | LR: 1.00e-06
|
| 900 |
+
[2026-04-25 18:33:33] Epoch 1 | Step 6440 | Loss: 1.0530 | LR: 1.00e-06
|
| 901 |
+
[2026-04-25 18:33:35] Epoch 1 | Step 6450 | Loss: 1.0529 | LR: 1.00e-06
|
| 902 |
+
[2026-04-25 18:33:38] Epoch 1 | Step 6460 | Loss: 1.0525 | LR: 1.00e-06
|
| 903 |
+
[2026-04-25 18:33:41] Epoch 1 | Step 6470 | Loss: 1.0525 | LR: 1.00e-06
|
| 904 |
+
[2026-04-25 18:33:43] Epoch 1 | Step 6480 | Loss: 1.0526 | LR: 1.00e-06
|
| 905 |
+
[2026-04-25 18:33:45] Epoch 1 | Step 6490 | Loss: 1.0527 | LR: 1.00e-06
|
| 906 |
+
[2026-04-25 18:33:48] Epoch 1 | Step 6500 | Loss: 1.0524 | LR: 1.00e-06
|
| 907 |
+
[2026-04-25 18:33:50] Epoch 1 | Step 6510 | Loss: 1.0523 | LR: 1.00e-06
|
| 908 |
+
[2026-04-25 18:33:53] Epoch 1 | Step 6520 | Loss: 1.0521 | LR: 1.00e-06
|
| 909 |
+
[2026-04-25 18:33:55] Epoch 1 | Step 6530 | Loss: 1.0518 | LR: 1.00e-06
|
| 910 |
+
[2026-04-25 18:33:58] Epoch 1 | Step 6540 | Loss: 1.0518 | LR: 1.00e-06
|
| 911 |
+
[2026-04-25 18:34:00] Epoch 1 | Step 6550 | Loss: 1.0516 | LR: 1.00e-06
|
| 912 |
+
[2026-04-25 18:34:02] Epoch 1 | Step 6560 | Loss: 1.0515 | LR: 1.00e-06
|
| 913 |
+
[2026-04-25 18:34:05] Epoch 1 | Step 6570 | Loss: 1.0515 | LR: 1.00e-06
|
| 914 |
+
[2026-04-25 18:34:07] Epoch 1 | Step 6580 | Loss: 1.0515 | LR: 1.00e-06
|
| 915 |
+
[2026-04-25 18:34:10] Epoch 1 | Step 6590 | Loss: 1.0514 | LR: 1.00e-06
|
| 916 |
+
[2026-04-25 18:34:13] Epoch 1 | Step 6600 | Loss: 1.0513 | LR: 1.00e-06
|
| 917 |
+
[2026-04-25 18:34:15] Epoch 1 | Step 6610 | Loss: 1.0512 | LR: 1.00e-06
|
| 918 |
+
[2026-04-25 18:34:18] Epoch 1 | Step 6620 | Loss: 1.0511 | LR: 1.00e-06
|
| 919 |
+
[2026-04-25 18:34:20] Epoch 1 | Step 6630 | Loss: 1.0510 | LR: 1.00e-06
|
| 920 |
+
[2026-04-25 18:34:23] Epoch 1 | Step 6640 | Loss: 1.0510 | LR: 1.00e-06
|
| 921 |
+
[2026-04-25 18:34:25] Epoch 1 | Step 6650 | Loss: 1.0511 | LR: 1.00e-06
|
| 922 |
+
[2026-04-25 18:34:27] Epoch 1 | Step 6660 | Loss: 1.0508 | LR: 1.00e-06
|
| 923 |
+
[2026-04-25 18:34:30] Epoch 1 | Step 6670 | Loss: 1.0508 | LR: 1.00e-06
|
| 924 |
+
[2026-04-25 18:34:33] Epoch 1 | Step 6680 | Loss: 1.0508 | LR: 1.00e-06
|
| 925 |
+
[2026-04-25 18:34:35] Epoch 1 | Step 6690 | Loss: 1.0507 | LR: 1.00e-06
|
| 926 |
+
[2026-04-25 18:34:38] Epoch 1 | Step 6700 | Loss: 1.0507 | LR: 1.00e-06
|
| 927 |
+
[2026-04-25 18:34:40] Epoch 1 | Step 6710 | Loss: 1.0507 | LR: 1.00e-06
|
| 928 |
+
[2026-04-25 18:34:42] Epoch 1 | Step 6720 | Loss: 1.0506 | LR: 1.00e-06
|
| 929 |
+
[2026-04-25 18:34:45] Epoch 1 | Step 6730 | Loss: 1.0508 | LR: 1.00e-06
|
| 930 |
+
[2026-04-25 18:34:48] Epoch 1 | Step 6740 | Loss: 1.0506 | LR: 1.00e-06
|
| 931 |
+
[2026-04-25 18:34:50] Epoch 1 | Step 6750 | Loss: 1.0504 | LR: 1.00e-06
|
| 932 |
+
[2026-04-25 18:34:52] Epoch 1 | Step 6760 | Loss: 1.0505 | LR: 1.00e-06
|
| 933 |
+
[2026-04-25 18:34:55] Epoch 1 | Step 6770 | Loss: 1.0504 | LR: 1.00e-06
|
| 934 |
+
[2026-04-25 18:34:57] Epoch 1 | Step 6780 | Loss: 1.0503 | LR: 1.00e-06
|
| 935 |
+
[2026-04-25 18:35:00] Epoch 1 | Step 6790 | Loss: 1.0504 | LR: 1.00e-06
|
| 936 |
+
[2026-04-25 18:35:02] Epoch 1 | Step 6800 | Loss: 1.0506 | LR: 1.00e-06
|
| 937 |
+
[2026-04-25 18:35:05] Epoch 1 | Step 6810 | Loss: 1.0506 | LR: 1.00e-06
|
| 938 |
+
[2026-04-25 18:35:07] Epoch 1 | Step 6820 | Loss: 1.0507 | LR: 1.00e-06
|
| 939 |
+
[2026-04-25 18:35:10] Epoch 1 | Step 6830 | Loss: 1.0508 | LR: 1.00e-06
|
| 940 |
+
[2026-04-25 18:35:12] Epoch 1 | Step 6840 | Loss: 1.0509 | LR: 1.00e-06
|
| 941 |
+
[2026-04-25 18:35:15] Epoch 1 | Step 6850 | Loss: 1.0509 | LR: 1.00e-06
|
| 942 |
+
[2026-04-25 18:35:17] Epoch 1 | Step 6860 | Loss: 1.0508 | LR: 1.00e-06
|
| 943 |
+
[2026-04-25 18:35:20] Epoch 1 | Step 6870 | Loss: 1.0507 | LR: 1.00e-06
|
| 944 |
+
[2026-04-25 18:35:22] Epoch 1 | Step 6880 | Loss: 1.0507 | LR: 1.00e-06
|
| 945 |
+
[2026-04-25 18:35:25] Epoch 1 | Step 6890 | Loss: 1.0508 | LR: 1.00e-06
|
| 946 |
+
[2026-04-25 18:35:27] Epoch 1 | Step 6900 | Loss: 1.0508 | LR: 1.00e-06
|
| 947 |
+
[2026-04-25 18:35:30] Epoch 1 | Step 6910 | Loss: 1.0504 | LR: 1.00e-06
|
| 948 |
+
[2026-04-25 18:35:32] Epoch 1 | Step 6920 | Loss: 1.0504 | LR: 1.00e-06
|
| 949 |
+
[2026-04-25 18:35:35] Epoch 1 | Step 6930 | Loss: 1.0504 | LR: 1.00e-06
|
| 950 |
+
[2026-04-25 18:35:38] Epoch 1 | Step 6940 | Loss: 1.0503 | LR: 1.00e-06
|
| 951 |
+
[2026-04-25 18:35:40] Epoch 1 | Step 6950 | Loss: 1.0502 | LR: 1.00e-06
|
| 952 |
+
[2026-04-25 18:35:43] Epoch 1 | Step 6960 | Loss: 1.0502 | LR: 1.00e-06
|
| 953 |
+
[2026-04-25 18:35:45] Epoch 1 | Step 6970 | Loss: 1.0501 | LR: 1.00e-06
|
| 954 |
+
[2026-04-25 18:35:47] Epoch 1 | Step 6980 | Loss: 1.0501 | LR: 1.00e-06
|
| 955 |
+
[2026-04-25 18:35:50] Epoch 1 | Step 6990 | Loss: 1.0498 | LR: 1.00e-06
|
| 956 |
+
[2026-04-25 18:35:52] Epoch 1 | Step 7000 | Loss: 1.0497 | LR: 1.00e-06
|
| 957 |
+
[2026-04-25 18:35:55] Epoch 1 | Step 7010 | Loss: 1.0496 | LR: 1.00e-06
|
| 958 |
+
[2026-04-25 18:35:57] Epoch 1 | Step 7020 | Loss: 1.0497 | LR: 1.00e-06
|
| 959 |
+
[2026-04-25 18:35:59] Epoch 1 | Step 7030 | Loss: 1.0496 | LR: 1.00e-06
|
| 960 |
+
[2026-04-25 18:36:02] Epoch 1 | Step 7040 | Loss: 1.0496 | LR: 1.00e-06
|
| 961 |
+
[2026-04-25 18:36:04] Epoch 1 | Step 7050 | Loss: 1.0495 | LR: 1.00e-06
|
| 962 |
+
[2026-04-25 18:36:07] Epoch 1 | Step 7060 | Loss: 1.0494 | LR: 1.00e-06
|
| 963 |
+
[2026-04-25 18:36:09] Epoch 1 | Step 7070 | Loss: 1.0495 | LR: 1.00e-06
|
| 964 |
+
[2026-04-25 18:36:12] Epoch 1 | Step 7080 | Loss: 1.0494 | LR: 1.00e-06
|
| 965 |
+
[2026-04-25 18:36:14] Epoch 1 | Step 7090 | Loss: 1.0494 | LR: 1.00e-06
|
| 966 |
+
[2026-04-25 18:36:17] Epoch 1 | Step 7100 | Loss: 1.0492 | LR: 1.00e-06
|
| 967 |
+
[2026-04-25 18:36:20] Epoch 1 | Step 7110 | Loss: 1.0491 | LR: 1.00e-06
|
| 968 |
+
[2026-04-25 18:36:22] Epoch 1 | Step 7120 | Loss: 1.0492 | LR: 1.00e-06
|
| 969 |
+
[2026-04-25 18:36:25] Epoch 1 | Step 7130 | Loss: 1.0490 | LR: 1.00e-06
|
| 970 |
+
[2026-04-25 18:36:27] Epoch 1 | Step 7140 | Loss: 1.0489 | LR: 1.00e-06
|
| 971 |
+
[2026-04-25 18:36:29] Epoch 1 | Step 7150 | Loss: 1.0490 | LR: 1.00e-06
|
| 972 |
+
[2026-04-25 18:36:32] Epoch 1 | Step 7160 | Loss: 1.0488 | LR: 1.00e-06
|
| 973 |
+
[2026-04-25 18:36:35] Epoch 1 | Step 7170 | Loss: 1.0488 | LR: 1.00e-06
|
| 974 |
+
[2026-04-25 18:36:37] Epoch 1 | Step 7180 | Loss: 1.0488 | LR: 1.00e-06
|
| 975 |
+
[2026-04-25 18:36:39] Epoch 1 | Step 7190 | Loss: 1.0489 | LR: 1.00e-06
|
| 976 |
+
[2026-04-25 18:36:42] Epoch 1 | Step 7200 | Loss: 1.0487 | LR: 1.00e-06
|
| 977 |
+
[2026-04-25 18:36:45] Epoch 1 | Step 7210 | Loss: 1.0486 | LR: 1.00e-06
|
| 978 |
+
[2026-04-25 18:36:47] Epoch 1 | Step 7220 | Loss: 1.0487 | LR: 1.00e-06
|
| 979 |
+
[2026-04-25 18:36:50] Epoch 1 | Step 7230 | Loss: 1.0487 | LR: 1.00e-06
|
| 980 |
+
[2026-04-25 18:36:52] Epoch 1 | Step 7240 | Loss: 1.0486 | LR: 1.00e-06
|
| 981 |
+
[2026-04-25 18:36:55] Epoch 1 | Step 7250 | Loss: 1.0485 | LR: 1.00e-06
|
| 982 |
+
[2026-04-25 18:36:58] Epoch 1 | Step 7260 | Loss: 1.0485 | LR: 1.00e-06
|
| 983 |
+
[2026-04-25 18:37:00] Epoch 1 | Step 7270 | Loss: 1.0486 | LR: 1.00e-06
|
| 984 |
+
[2026-04-25 18:37:03] Epoch 1 | Step 7280 | Loss: 1.0486 | LR: 1.00e-06
|
| 985 |
+
[2026-04-25 18:37:05] Epoch 1 | Step 7290 | Loss: 1.0484 | LR: 1.00e-06
|
| 986 |
+
[2026-04-25 18:37:08] Epoch 1 | Step 7300 | Loss: 1.0483 | LR: 1.00e-06
|
| 987 |
+
[2026-04-25 18:37:10] Epoch 1 | Step 7310 | Loss: 1.0481 | LR: 1.00e-06
|
| 988 |
+
[2026-04-25 18:37:13] Epoch 1 | Step 7320 | Loss: 1.0479 | LR: 1.00e-06
|
| 989 |
+
[2026-04-25 18:37:15] Epoch 1 | Step 7330 | Loss: 1.0479 | LR: 1.00e-06
|
| 990 |
+
[2026-04-25 18:37:18] Epoch 1 | Step 7340 | Loss: 1.0480 | LR: 1.00e-06
|
| 991 |
+
[2026-04-25 18:37:21] Epoch 1 | Step 7350 | Loss: 1.0481 | LR: 1.00e-06
|
| 992 |
+
[2026-04-25 18:37:23] Epoch 1 | Step 7360 | Loss: 1.0480 | LR: 1.00e-06
|
| 993 |
+
[2026-04-25 18:37:26] Epoch 1 | Step 7370 | Loss: 1.0478 | LR: 1.00e-06
|
| 994 |
+
[2026-04-25 18:37:28] Epoch 1 | Step 7380 | Loss: 1.0477 | LR: 1.00e-06
|
| 995 |
+
[2026-04-25 18:37:31] Epoch 1 | Step 7390 | Loss: 1.0475 | LR: 1.00e-06
|
| 996 |
+
[2026-04-25 18:37:33] Epoch 1 | Step 7400 | Loss: 1.0474 | LR: 1.00e-06
|
| 997 |
+
[2026-04-25 18:37:36] Epoch 1 | Step 7410 | Loss: 1.0475 | LR: 1.00e-06
|
| 998 |
+
[2026-04-25 18:37:38] Epoch 1 | Step 7420 | Loss: 1.0475 | LR: 1.00e-06
|
| 999 |
+
[2026-04-25 18:37:41] Epoch 1 | Step 7430 | Loss: 1.0474 | LR: 1.00e-06
|
| 1000 |
+
[2026-04-25 18:37:43] Epoch 1 | Step 7440 | Loss: 1.0474 | LR: 1.00e-06
|
| 1001 |
+
[2026-04-25 18:37:46] Epoch 1 | Step 7450 | Loss: 1.0473 | LR: 1.00e-06
|
| 1002 |
+
[2026-04-25 18:37:48] Epoch 1 | Step 7460 | Loss: 1.0472 | LR: 1.00e-06
|
| 1003 |
+
[2026-04-25 18:37:51] Epoch 1 | Step 7470 | Loss: 1.0472 | LR: 1.00e-06
|
| 1004 |
+
[2026-04-25 18:37:53] Epoch 1 | Step 7480 | Loss: 1.0472 | LR: 1.00e-06
|
| 1005 |
+
[2026-04-25 18:37:56] Epoch 1 | Step 7490 | Loss: 1.0473 | LR: 1.00e-06
|
| 1006 |
+
[2026-04-25 18:37:58] Epoch 1 | Step 7500 | Loss: 1.0473 | LR: 1.00e-06
|
| 1007 |
+
[2026-04-25 18:38:01] Epoch 1 | Step 7510 | Loss: 1.0474 | LR: 1.00e-06
|
| 1008 |
+
[2026-04-25 18:38:03] Epoch 1 | Step 7520 | Loss: 1.0473 | LR: 1.00e-06
|
| 1009 |
+
[2026-04-25 18:38:06] Epoch 1 | Step 7530 | Loss: 1.0472 | LR: 1.00e-06
|
| 1010 |
+
[2026-04-25 18:38:08] Epoch 1 | Step 7540 | Loss: 1.0471 | LR: 1.00e-06
|
| 1011 |
+
[2026-04-25 18:38:11] Epoch 1 | Step 7550 | Loss: 1.0472 | LR: 1.00e-06
|
| 1012 |
+
[2026-04-25 18:38:13] Epoch 1 | Step 7560 | Loss: 1.0472 | LR: 1.00e-06
|
| 1013 |
+
[2026-04-25 18:38:16] Epoch 1 | Step 7570 | Loss: 1.0471 | LR: 1.00e-06
|
| 1014 |
+
[2026-04-25 18:38:18] Epoch 1 | Step 7580 | Loss: 1.0471 | LR: 1.00e-06
|
| 1015 |
+
[2026-04-25 18:38:20] Epoch 1 | Step 7590 | Loss: 1.0470 | LR: 1.00e-06
|
| 1016 |
+
[2026-04-25 18:38:23] Epoch 1 | Step 7600 | Loss: 1.0468 | LR: 1.00e-06
|
| 1017 |
+
[2026-04-25 18:38:26] Epoch 1 | Step 7610 | Loss: 1.0468 | LR: 1.00e-06
|
| 1018 |
+
[2026-04-25 18:38:28] Epoch 1 | Step 7620 | Loss: 1.0466 | LR: 1.00e-06
|
| 1019 |
+
[2026-04-25 18:38:31] Epoch 1 | Step 7630 | Loss: 1.0465 | LR: 1.00e-06
|
| 1020 |
+
[2026-04-25 18:38:33] Epoch 1 | Step 7640 | Loss: 1.0464 | LR: 1.00e-06
|
| 1021 |
+
[2026-04-25 18:38:36] Epoch 1 | Step 7650 | Loss: 1.0463 | LR: 1.00e-06
|
| 1022 |
+
[2026-04-25 18:38:39] Epoch 1 | Step 7660 | Loss: 1.0462 | LR: 1.00e-06
|
| 1023 |
+
[2026-04-25 18:38:41] Epoch 1 | Step 7670 | Loss: 1.0460 | LR: 1.00e-06
|
| 1024 |
+
[2026-04-25 18:38:44] Epoch 1 | Step 7680 | Loss: 1.0460 | LR: 1.00e-06
|
| 1025 |
+
[2026-04-25 18:38:46] Epoch 1 | Step 7690 | Loss: 1.0461 | LR: 1.00e-06
|
| 1026 |
+
[2026-04-25 18:38:49] Epoch 1 | Step 7700 | Loss: 1.0460 | LR: 1.00e-06
|
| 1027 |
+
[2026-04-25 18:38:52] Epoch 1 | Step 7710 | Loss: 1.0458 | LR: 1.00e-06
|
| 1028 |
+
[2026-04-25 18:38:54] Epoch 1 | Step 7720 | Loss: 1.0459 | LR: 1.00e-06
|
| 1029 |
+
[2026-04-25 18:38:57] Epoch 1 | Step 7730 | Loss: 1.0460 | LR: 1.00e-06
|
| 1030 |
+
[2026-04-25 18:38:59] Epoch 1 | Step 7740 | Loss: 1.0461 | LR: 1.00e-06
|
| 1031 |
+
[2026-04-25 18:39:02] Epoch 1 | Step 7750 | Loss: 1.0461 | LR: 1.00e-06
|
| 1032 |
+
[2026-04-25 18:39:05] Epoch 1 | Step 7760 | Loss: 1.0459 | LR: 1.00e-06
|
| 1033 |
+
[2026-04-25 18:39:07] Epoch 1 | Step 7770 | Loss: 1.0458 | LR: 1.00e-06
|
| 1034 |
+
[2026-04-25 18:39:09] Epoch 1 | Step 7780 | Loss: 1.0457 | LR: 1.00e-06
|
| 1035 |
+
[2026-04-25 18:39:12] Epoch 1 | Step 7790 | Loss: 1.0456 | LR: 1.00e-06
|
| 1036 |
+
[2026-04-25 18:39:14] Epoch 1 | Step 7800 | Loss: 1.0455 | LR: 1.00e-06
|
| 1037 |
+
[2026-04-25 18:39:17] Epoch 1 | Step 7810 | Loss: 1.0456 | LR: 1.00e-06
|
| 1038 |
+
[2026-04-25 18:39:20] Epoch 1 | Step 7820 | Loss: 1.0457 | LR: 1.00e-06
|
| 1039 |
+
[2026-04-25 18:39:22] Epoch 1 | Step 7830 | Loss: 1.0456 | LR: 1.00e-06
|
| 1040 |
+
[2026-04-25 18:39:25] Epoch 1 | Step 7840 | Loss: 1.0455 | LR: 1.00e-06
|
| 1041 |
+
[2026-04-25 18:39:27] Epoch 1 | Step 7850 | Loss: 1.0452 | LR: 1.00e-06
|
| 1042 |
+
[2026-04-25 18:39:30] Epoch 1 | Step 7860 | Loss: 1.0453 | LR: 1.00e-06
|
| 1043 |
+
[2026-04-25 18:39:32] Epoch 1 | Step 7870 | Loss: 1.0451 | LR: 1.00e-06
|
| 1044 |
+
[2026-04-25 18:39:35] Epoch 1 | Step 7880 | Loss: 1.0452 | LR: 1.00e-06
|
| 1045 |
+
[2026-04-25 18:39:38] Epoch 1 | Step 7890 | Loss: 1.0452 | LR: 1.00e-06
|
| 1046 |
+
[2026-04-25 18:39:41] Epoch 1 | Step 7900 | Loss: 1.0452 | LR: 1.00e-06
|
| 1047 |
+
[2026-04-25 18:39:44] Epoch 1 | Step 7910 | Loss: 1.0452 | LR: 1.00e-06
|
| 1048 |
+
[2026-04-25 18:39:46] Epoch 1 | Step 7920 | Loss: 1.0452 | LR: 1.00e-06
|
| 1049 |
+
[2026-04-25 18:39:49] Epoch 1 | Step 7930 | Loss: 1.0453 | LR: 1.00e-06
|
| 1050 |
+
[2026-04-25 18:39:51] Epoch 1 | Step 7940 | Loss: 1.0453 | LR: 1.00e-06
|
| 1051 |
+
[2026-04-25 18:39:54] Epoch 1 | Step 7950 | Loss: 1.0455 | LR: 1.00e-06
|
| 1052 |
+
[2026-04-25 18:39:56] Epoch 1 | Step 7960 | Loss: 1.0455 | LR: 1.00e-06
|
| 1053 |
+
[2026-04-25 18:39:58] Epoch 1 | Step 7970 | Loss: 1.0455 | LR: 1.00e-06
|
| 1054 |
+
[2026-04-25 18:40:01] Epoch 1 | Step 7980 | Loss: 1.0454 | LR: 1.00e-06
|
| 1055 |
+
[2026-04-25 18:40:03] Epoch 1 | Step 7990 | Loss: 1.0454 | LR: 1.00e-06
|
| 1056 |
+
[2026-04-25 18:40:06] Epoch 1 | Step 8000 | Loss: 1.0453 | LR: 1.00e-06
|
| 1057 |
+
[2026-04-25 18:40:06] Validation | Batch 10/84 | Loss: 0.9831
|
| 1058 |
+
[2026-04-25 18:40:07] Validation | Batch 20/84 | Loss: 0.9812
|
| 1059 |
+
[2026-04-25 18:40:07] Validation | Batch 30/84 | Loss: 1.0587
|
| 1060 |
+
[2026-04-25 18:40:08] Validation | Batch 40/84 | Loss: 1.0625
|
| 1061 |
+
[2026-04-25 18:40:08] Validation | Batch 50/84 | Loss: 1.0617
|
| 1062 |
+
[2026-04-25 18:40:09] Validation | Batch 60/84 | Loss: 1.0346
|
| 1063 |
+
[2026-04-25 18:40:09] Validation | Batch 70/84 | Loss: 1.0155
|
| 1064 |
+
[2026-04-25 18:40:10] Validation | Batch 80/84 | Loss: 1.0222
|
| 1065 |
+
[2026-04-25 18:40:10] Validation | Batch 84/84 | Loss: 1.0157
|
| 1066 |
+
[2026-04-25 18:40:10] Validation | Loss: 1.0157 | PPL: 2.84 | Time: 3.75s
|
| 1067 |
+
[2026-04-25 18:40:13] New best model saved! Val loss: 1.0157
|
| 1068 |
+
[2026-04-25 18:40:15] Epoch 1 | Step 8010 | Loss: 1.0452 | LR: 1.00e-06
|
| 1069 |
+
[2026-04-25 18:40:18] Epoch 1 | Step 8020 | Loss: 1.0450 | LR: 1.00e-06
|
| 1070 |
+
[2026-04-25 18:40:21] Epoch 1 | Step 8030 | Loss: 1.0450 | LR: 1.00e-06
|
| 1071 |
+
[2026-04-25 18:40:24] Epoch 1 | Step 8040 | Loss: 1.0451 | LR: 1.00e-06
|
| 1072 |
+
[2026-04-25 18:40:26] Epoch 1 | Step 8050 | Loss: 1.0449 | LR: 1.00e-06
|
| 1073 |
+
[2026-04-25 18:40:29] Epoch 1 | Step 8060 | Loss: 1.0449 | LR: 1.00e-06
|
| 1074 |
+
[2026-04-25 18:40:31] Epoch 1 | Step 8070 | Loss: 1.0448 | LR: 1.00e-06
|
| 1075 |
+
[2026-04-25 18:40:34] Epoch 1 | Step 8080 | Loss: 1.0448 | LR: 1.00e-06
|
| 1076 |
+
[2026-04-25 18:40:37] Epoch 1 | Step 8090 | Loss: 1.0446 | LR: 1.00e-06
|
| 1077 |
+
[2026-04-25 18:40:39] Epoch 1 | Step 8100 | Loss: 1.0445 | LR: 1.00e-06
|
| 1078 |
+
[2026-04-25 18:40:42] Epoch 1 | Step 8110 | Loss: 1.0446 | LR: 1.00e-06
|
| 1079 |
+
[2026-04-25 18:40:44] Epoch 1 | Step 8120 | Loss: 1.0446 | LR: 1.00e-06
|
| 1080 |
+
[2026-04-25 18:40:47] Epoch 1 | Step 8130 | Loss: 1.0445 | LR: 1.00e-06
|
| 1081 |
+
[2026-04-25 18:40:49] Epoch 1 | Step 8140 | Loss: 1.0446 | LR: 1.00e-06
|
| 1082 |
+
[2026-04-25 18:40:52] Epoch 1 | Step 8150 | Loss: 1.0446 | LR: 1.00e-06
|
| 1083 |
+
[2026-04-25 18:40:54] Epoch 1 | Step 8160 | Loss: 1.0444 | LR: 1.00e-06
|
| 1084 |
+
[2026-04-25 18:40:56] Epoch 1 | Step 8170 | Loss: 1.0444 | LR: 1.00e-06
|
| 1085 |
+
[2026-04-25 18:40:59] Epoch 1 | Step 8180 | Loss: 1.0444 | LR: 1.00e-06
|
| 1086 |
+
[2026-04-25 18:41:01] Epoch 1 | Step 8190 | Loss: 1.0443 | LR: 1.00e-06
|
| 1087 |
+
[2026-04-25 18:41:04] Epoch 1 | Step 8200 | Loss: 1.0443 | LR: 1.00e-06
|
| 1088 |
+
[2026-04-25 18:41:06] Epoch 1 | Step 8210 | Loss: 1.0443 | LR: 1.00e-06
|
| 1089 |
+
[2026-04-25 18:41:09] Epoch 1 | Step 8220 | Loss: 1.0443 | LR: 1.00e-06
|
| 1090 |
+
[2026-04-25 18:41:12] Epoch 1 | Step 8230 | Loss: 1.0443 | LR: 1.00e-06
|
| 1091 |
+
[2026-04-25 18:41:14] Epoch 1 | Step 8240 | Loss: 1.0444 | LR: 1.00e-06
|
| 1092 |
+
[2026-04-25 18:41:16] Epoch 1 | Step 8250 | Loss: 1.0443 | LR: 1.00e-06
|
| 1093 |
+
[2026-04-25 18:41:19] Epoch 1 | Step 8260 | Loss: 1.0444 | LR: 1.00e-06
|
| 1094 |
+
[2026-04-25 18:41:21] Epoch 1 | Step 8270 | Loss: 1.0445 | LR: 1.00e-06
|
| 1095 |
+
[2026-04-25 18:41:24] Epoch 1 | Step 8280 | Loss: 1.0446 | LR: 1.00e-06
|
| 1096 |
+
[2026-04-25 18:41:27] Epoch 1 | Step 8290 | Loss: 1.0446 | LR: 1.00e-06
|
| 1097 |
+
[2026-04-25 18:41:29] Epoch 1 | Step 8300 | Loss: 1.0445 | LR: 1.00e-06
|
| 1098 |
+
[2026-04-25 18:41:31] Epoch 1 | Step 8310 | Loss: 1.0445 | LR: 1.00e-06
|
| 1099 |
+
[2026-04-25 18:41:34] Epoch 1 | Step 8320 | Loss: 1.0445 | LR: 1.00e-06
|
| 1100 |
+
[2026-04-25 18:41:36] Epoch 1 | Step 8330 | Loss: 1.0445 | LR: 1.00e-06
|
| 1101 |
+
[2026-04-25 18:41:39] Epoch 1 | Step 8340 | Loss: 1.0445 | LR: 1.00e-06
|
| 1102 |
+
[2026-04-25 18:41:41] Epoch 1 | Step 8350 | Loss: 1.0444 | LR: 1.00e-06
|
| 1103 |
+
[2026-04-25 18:41:43] Epoch 1 | Step 8360 | Loss: 1.0443 | LR: 1.00e-06
|
| 1104 |
+
[2026-04-25 18:41:46] Epoch 1 | Step 8370 | Loss: 1.0443 | LR: 1.00e-06
|
| 1105 |
+
[2026-04-25 18:41:48] Epoch 1 | Step 8380 | Loss: 1.0443 | LR: 1.00e-06
|
| 1106 |
+
[2026-04-25 18:41:51] Epoch 1 | Step 8390 | Loss: 1.0443 | LR: 1.00e-06
|
| 1107 |
+
[2026-04-25 18:41:53] Epoch 1 | Step 8400 | Loss: 1.0443 | LR: 1.00e-06
|
| 1108 |
+
[2026-04-25 18:41:56] Epoch 1 | Step 8410 | Loss: 1.0443 | LR: 1.00e-06
|
| 1109 |
+
[2026-04-25 18:41:58] Epoch 1 | Step 8420 | Loss: 1.0444 | LR: 1.00e-06
|
| 1110 |
+
[2026-04-25 18:42:01] Epoch 1 | Step 8430 | Loss: 1.0443 | LR: 1.00e-06
|
| 1111 |
+
[2026-04-25 18:42:03] Epoch 1 | Step 8440 | Loss: 1.0443 | LR: 1.00e-06
|
| 1112 |
+
[2026-04-25 18:42:06] Epoch 1 | Step 8450 | Loss: 1.0442 | LR: 1.00e-06
|
| 1113 |
+
[2026-04-25 18:42:08] Epoch 1 | Step 8460 | Loss: 1.0442 | LR: 1.00e-06
|
| 1114 |
+
[2026-04-25 18:42:11] Epoch 1 | Step 8470 | Loss: 1.0442 | LR: 1.00e-06
|
| 1115 |
+
[2026-04-25 18:42:13] Epoch 1 | Step 8480 | Loss: 1.0442 | LR: 1.00e-06
|
| 1116 |
+
[2026-04-25 18:42:15] Epoch 1 | Step 8490 | Loss: 1.0440 | LR: 1.00e-06
|
| 1117 |
+
[2026-04-25 18:42:18] Epoch 1 | Step 8500 | Loss: 1.0441 | LR: 1.00e-06
|
| 1118 |
+
[2026-04-25 18:42:20] Epoch 1 | Step 8510 | Loss: 1.0440 | LR: 1.00e-06
|
| 1119 |
+
[2026-04-25 18:42:23] Epoch 1 | Step 8520 | Loss: 1.0440 | LR: 1.00e-06
|
| 1120 |
+
[2026-04-25 18:42:26] Epoch 1 | Step 8530 | Loss: 1.0440 | LR: 1.00e-06
|
| 1121 |
+
[2026-04-25 18:42:28] Epoch 1 | Step 8540 | Loss: 1.0441 | LR: 1.00e-06
|
| 1122 |
+
[2026-04-25 18:42:31] Epoch 1 | Step 8550 | Loss: 1.0442 | LR: 1.00e-06
|
| 1123 |
+
[2026-04-25 18:42:33] Epoch 1 | Step 8560 | Loss: 1.0441 | LR: 1.00e-06
|
| 1124 |
+
[2026-04-25 18:42:35] Epoch 1 | Step 8570 | Loss: 1.0442 | LR: 1.00e-06
|
| 1125 |
+
[2026-04-25 18:42:38] Epoch 1 | Step 8580 | Loss: 1.0440 | LR: 1.00e-06
|
| 1126 |
+
[2026-04-25 18:42:41] Epoch 1 | Step 8590 | Loss: 1.0439 | LR: 1.00e-06
|
| 1127 |
+
[2026-04-25 18:42:43] Epoch 1 | Step 8600 | Loss: 1.0438 | LR: 1.00e-06
|
| 1128 |
+
[2026-04-25 18:42:46] Epoch 1 | Step 8610 | Loss: 1.0439 | LR: 1.00e-06
|
| 1129 |
+
[2026-04-25 18:42:48] Epoch 1 | Step 8620 | Loss: 1.0438 | LR: 1.00e-06
|
| 1130 |
+
[2026-04-25 18:42:51] Epoch 1 | Step 8630 | Loss: 1.0436 | LR: 1.00e-06
|
| 1131 |
+
[2026-04-25 18:42:53] Epoch 1 | Step 8640 | Loss: 1.0438 | LR: 1.00e-06
|
| 1132 |
+
[2026-04-25 18:42:55] Epoch 1 | Step 8650 | Loss: 1.0438 | LR: 1.00e-06
|
| 1133 |
+
[2026-04-25 18:42:58] Epoch 1 | Step 8660 | Loss: 1.0437 | LR: 1.00e-06
|
| 1134 |
+
[2026-04-25 18:43:01] Epoch 1 | Step 8670 | Loss: 1.0437 | LR: 1.00e-06
|
| 1135 |
+
[2026-04-25 18:43:03] Epoch 1 | Step 8680 | Loss: 1.0438 | LR: 1.00e-06
|
| 1136 |
+
[2026-04-25 18:43:05] Epoch 1 | Step 8690 | Loss: 1.0436 | LR: 1.00e-06
|
| 1137 |
+
[2026-04-25 18:43:08] Epoch 1 | Step 8700 | Loss: 1.0436 | LR: 1.00e-06
|
| 1138 |
+
[2026-04-25 18:43:11] Epoch 1 | Step 8710 | Loss: 1.0433 | LR: 1.00e-06
|
| 1139 |
+
[2026-04-25 18:43:13] Epoch 1 | Step 8720 | Loss: 1.0432 | LR: 1.00e-06
|
| 1140 |
+
[2026-04-25 18:43:16] Epoch 1 | Step 8730 | Loss: 1.0433 | LR: 1.00e-06
|
| 1141 |
+
[2026-04-25 18:43:18] Epoch 1 | Step 8740 | Loss: 1.0433 | LR: 1.00e-06
|
| 1142 |
+
[2026-04-25 18:43:21] Epoch 1 | Step 8750 | Loss: 1.0433 | LR: 1.00e-06
|
| 1143 |
+
[2026-04-25 18:43:23] Epoch 1 | Step 8760 | Loss: 1.0432 | LR: 1.00e-06
|
| 1144 |
+
[2026-04-25 18:43:26] Epoch 1 | Step 8770 | Loss: 1.0431 | LR: 1.00e-06
|
| 1145 |
+
[2026-04-25 18:43:29] Epoch 1 | Step 8780 | Loss: 1.0431 | LR: 1.00e-06
|
| 1146 |
+
[2026-04-25 18:43:31] Epoch 1 | Step 8790 | Loss: 1.0431 | LR: 1.00e-06
|
| 1147 |
+
[2026-04-25 18:43:34] Epoch 1 | Step 8800 | Loss: 1.0429 | LR: 1.00e-06
|
| 1148 |
+
[2026-04-25 18:43:36] Epoch 1 | Step 8810 | Loss: 1.0429 | LR: 1.00e-06
|
| 1149 |
+
[2026-04-25 18:43:39] Epoch 1 | Step 8820 | Loss: 1.0428 | LR: 1.00e-06
|
| 1150 |
+
[2026-04-25 18:43:41] Epoch 1 | Step 8830 | Loss: 1.0428 | LR: 1.00e-06
|
| 1151 |
+
[2026-04-25 18:43:44] Epoch 1 | Step 8840 | Loss: 1.0428 | LR: 1.00e-06
|
| 1152 |
+
[2026-04-25 18:43:46] Epoch 1 | Step 8850 | Loss: 1.0428 | LR: 1.00e-06
|
| 1153 |
+
[2026-04-25 18:43:49] Epoch 1 | Step 8860 | Loss: 1.0427 | LR: 1.00e-06
|
| 1154 |
+
[2026-04-25 18:43:51] Epoch 1 | Step 8870 | Loss: 1.0428 | LR: 1.00e-06
|
| 1155 |
+
[2026-04-25 18:43:54] Epoch 1 | Step 8880 | Loss: 1.0427 | LR: 1.00e-06
|
| 1156 |
+
[2026-04-25 18:43:56] Epoch 1 | Step 8890 | Loss: 1.0426 | LR: 1.00e-06
|
| 1157 |
+
[2026-04-25 18:43:59] Epoch 1 | Step 8900 | Loss: 1.0424 | LR: 1.00e-06
|
| 1158 |
+
[2026-04-25 18:44:01] Epoch 1 | Step 8910 | Loss: 1.0425 | LR: 1.00e-06
|
| 1159 |
+
[2026-04-25 18:44:04] Epoch 1 | Step 8920 | Loss: 1.0423 | LR: 1.00e-06
|
| 1160 |
+
[2026-04-25 18:44:06] Epoch 1 | Step 8930 | Loss: 1.0422 | LR: 1.00e-06
|
| 1161 |
+
[2026-04-25 18:44:09] Epoch 1 | Step 8940 | Loss: 1.0422 | LR: 1.00e-06
|
| 1162 |
+
[2026-04-25 18:44:11] Epoch 1 | Step 8950 | Loss: 1.0423 | LR: 1.00e-06
|
| 1163 |
+
[2026-04-25 18:44:14] Epoch 1 | Step 8960 | Loss: 1.0422 | LR: 1.00e-06
|
| 1164 |
+
[2026-04-25 18:44:16] Epoch 1 | Step 8970 | Loss: 1.0422 | LR: 1.00e-06
|
| 1165 |
+
[2026-04-25 18:44:19] Epoch 1 | Step 8980 | Loss: 1.0421 | LR: 1.00e-06
|
| 1166 |
+
[2026-04-25 18:44:21] Epoch 1 | Step 8990 | Loss: 1.0420 | LR: 1.00e-06
|
| 1167 |
+
[2026-04-25 18:44:24] Epoch 1 | Step 9000 | Loss: 1.0419 | LR: 1.00e-06
|
| 1168 |
+
[2026-04-25 18:44:26] Epoch 1 | Step 9010 | Loss: 1.0420 | LR: 1.00e-06
|
| 1169 |
+
[2026-04-25 18:44:29] Epoch 1 | Step 9020 | Loss: 1.0421 | LR: 1.00e-06
|
| 1170 |
+
[2026-04-25 18:44:31] Epoch 1 | Step 9030 | Loss: 1.0421 | LR: 1.00e-06
|
| 1171 |
+
[2026-04-25 18:44:34] Epoch 1 | Step 9040 | Loss: 1.0420 | LR: 1.00e-06
|
| 1172 |
+
[2026-04-25 18:44:36] Epoch 1 | Step 9050 | Loss: 1.0419 | LR: 1.00e-06
|
| 1173 |
+
[2026-04-25 18:44:39] Epoch 1 | Step 9060 | Loss: 1.0420 | LR: 1.00e-06
|
| 1174 |
+
[2026-04-25 18:44:41] Epoch 1 | Step 9070 | Loss: 1.0419 | LR: 1.00e-06
|
| 1175 |
+
[2026-04-25 18:44:44] Epoch 1 | Step 9080 | Loss: 1.0420 | LR: 1.00e-06
|
| 1176 |
+
[2026-04-25 18:44:46] Epoch 1 | Step 9090 | Loss: 1.0419 | LR: 1.00e-06
|
| 1177 |
+
[2026-04-25 18:44:48] Epoch 1 | Step 9100 | Loss: 1.0419 | LR: 1.00e-06
|
| 1178 |
+
[2026-04-25 18:44:51] Epoch 1 | Step 9110 | Loss: 1.0419 | LR: 1.00e-06
|
| 1179 |
+
[2026-04-25 18:44:53] Epoch 1 | Step 9120 | Loss: 1.0420 | LR: 1.00e-06
|
| 1180 |
+
[2026-04-25 18:44:56] Epoch 1 | Step 9130 | Loss: 1.0419 | LR: 1.00e-06
|
| 1181 |
+
[2026-04-25 18:44:58] Epoch 1 | Step 9140 | Loss: 1.0419 | LR: 1.00e-06
|
| 1182 |
+
[2026-04-25 18:45:00] Epoch 1 | Step 9150 | Loss: 1.0420 | LR: 1.00e-06
|
| 1183 |
+
[2026-04-25 18:45:03] Epoch 1 | Step 9160 | Loss: 1.0419 | LR: 1.00e-06
|
| 1184 |
+
[2026-04-25 18:45:06] Epoch 1 | Step 9170 | Loss: 1.0417 | LR: 1.00e-06
|
| 1185 |
+
[2026-04-25 18:45:08] Epoch 1 | Step 9180 | Loss: 1.0416 | LR: 1.00e-06
|
| 1186 |
+
[2026-04-25 18:45:11] Epoch 1 | Step 9190 | Loss: 1.0414 | LR: 1.00e-06
|
| 1187 |
+
[2026-04-25 18:45:13] Epoch 1 | Step 9200 | Loss: 1.0414 | LR: 1.00e-06
|
| 1188 |
+
[2026-04-25 18:45:16] Epoch 1 | Step 9210 | Loss: 1.0415 | LR: 1.00e-06
|
| 1189 |
+
[2026-04-25 18:45:18] Epoch 1 | Step 9220 | Loss: 1.0414 | LR: 1.00e-06
|
| 1190 |
+
[2026-04-25 18:45:21] Epoch 1 | Step 9230 | Loss: 1.0413 | LR: 1.00e-06
|
| 1191 |
+
[2026-04-25 18:45:23] Epoch 1 | Step 9240 | Loss: 1.0411 | LR: 1.00e-06
|
| 1192 |
+
[2026-04-25 18:45:26] Epoch 1 | Step 9250 | Loss: 1.0410 | LR: 1.00e-06
|
| 1193 |
+
[2026-04-25 18:45:28] Epoch 1 | Step 9260 | Loss: 1.0409 | LR: 1.00e-06
|
| 1194 |
+
[2026-04-25 18:45:31] Epoch 1 | Step 9270 | Loss: 1.0408 | LR: 1.00e-06
|
| 1195 |
+
[2026-04-25 18:45:33] Epoch 1 | Step 9280 | Loss: 1.0408 | LR: 1.00e-06
|
| 1196 |
+
[2026-04-25 18:45:36] Epoch 1 | Step 9290 | Loss: 1.0408 | LR: 1.00e-06
|
| 1197 |
+
[2026-04-25 18:45:38] Epoch 1 | Step 9300 | Loss: 1.0408 | LR: 1.00e-06
|
| 1198 |
+
[2026-04-25 18:45:41] Epoch 1 | Step 9310 | Loss: 1.0407 | LR: 1.00e-06
|
| 1199 |
+
[2026-04-25 18:45:43] Epoch 1 | Step 9320 | Loss: 1.0407 | LR: 1.00e-06
|
| 1200 |
+
[2026-04-25 18:45:46] Epoch 1 | Step 9330 | Loss: 1.0407 | LR: 1.00e-06
|
| 1201 |
+
[2026-04-25 18:45:48] Epoch 1 | Step 9340 | Loss: 1.0406 | LR: 1.00e-06
|
| 1202 |
+
[2026-04-25 18:45:51] Epoch 1 | Step 9350 | Loss: 1.0405 | LR: 1.00e-06
|
| 1203 |
+
[2026-04-25 18:45:53] Epoch 1 | Step 9360 | Loss: 1.0405 | LR: 1.00e-06
|
| 1204 |
+
[2026-04-25 18:45:56] Epoch 1 | Step 9370 | Loss: 1.0404 | LR: 1.00e-06
|
| 1205 |
+
[2026-04-25 18:45:58] Epoch 1 | Step 9380 | Loss: 1.0404 | LR: 1.00e-06
|
| 1206 |
+
[2026-04-25 18:46:01] Epoch 1 | Step 9390 | Loss: 1.0402 | LR: 1.00e-06
|
| 1207 |
+
[2026-04-25 18:46:03] Epoch 1 | Step 9400 | Loss: 1.0403 | LR: 1.00e-06
|
| 1208 |
+
[2026-04-25 18:46:06] Epoch 1 | Step 9410 | Loss: 1.0403 | LR: 1.00e-06
|
| 1209 |
+
[2026-04-25 18:46:09] Epoch 1 | Step 9420 | Loss: 1.0404 | LR: 1.00e-06
|
| 1210 |
+
[2026-04-25 18:46:11] Epoch 1 | Step 9430 | Loss: 1.0404 | LR: 1.00e-06
|
| 1211 |
+
[2026-04-25 18:46:13] Epoch 1 | Step 9440 | Loss: 1.0404 | LR: 1.00e-06
|
| 1212 |
+
[2026-04-25 18:46:16] Epoch 1 | Step 9450 | Loss: 1.0405 | LR: 1.00e-06
|
| 1213 |
+
[2026-04-25 18:46:19] Epoch 1 | Step 9460 | Loss: 1.0404 | LR: 1.00e-06
|
| 1214 |
+
[2026-04-25 18:46:21] Epoch 1 | Step 9470 | Loss: 1.0403 | LR: 1.00e-06
|
| 1215 |
+
[2026-04-25 18:46:24] Epoch 1 | Step 9480 | Loss: 1.0401 | LR: 1.00e-06
|
| 1216 |
+
[2026-04-25 18:46:26] Epoch 1 | Step 9490 | Loss: 1.0401 | LR: 1.00e-06
|
| 1217 |
+
[2026-04-25 18:46:28] Epoch 1 | Step 9500 | Loss: 1.0401 | LR: 1.00e-06
|
| 1218 |
+
[2026-04-25 18:46:31] Epoch 1 | Step 9510 | Loss: 1.0401 | LR: 1.00e-06
|
| 1219 |
+
[2026-04-25 18:46:33] Epoch 1 | Step 9520 | Loss: 1.0401 | LR: 1.00e-06
|
| 1220 |
+
[2026-04-25 18:46:36] Epoch 1 | Step 9530 | Loss: 1.0401 | LR: 1.00e-06
|
| 1221 |
+
[2026-04-25 18:46:39] Epoch 1 | Step 9540 | Loss: 1.0399 | LR: 1.00e-06
|
| 1222 |
+
[2026-04-25 18:46:41] Epoch 1 | Step 9550 | Loss: 1.0400 | LR: 1.00e-06
|
| 1223 |
+
[2026-04-25 18:46:44] Epoch 1 | Step 9560 | Loss: 1.0400 | LR: 1.00e-06
|
| 1224 |
+
[2026-04-25 18:46:46] Epoch 1 | Step 9570 | Loss: 1.0401 | LR: 1.00e-06
|
| 1225 |
+
[2026-04-25 18:46:48] Epoch 1 | Step 9580 | Loss: 1.0402 | LR: 1.00e-06
|
| 1226 |
+
[2026-04-25 18:46:51] Epoch 1 | Step 9590 | Loss: 1.0401 | LR: 1.00e-06
|
| 1227 |
+
[2026-04-25 18:46:54] Epoch 1 | Step 9600 | Loss: 1.0400 | LR: 1.00e-06
|
| 1228 |
+
[2026-04-25 18:46:57] Epoch 1 | Step 9610 | Loss: 1.0400 | LR: 1.00e-06
|
| 1229 |
+
[2026-04-25 18:46:59] Epoch 1 | Step 9620 | Loss: 1.0400 | LR: 1.00e-06
|
| 1230 |
+
[2026-04-25 18:47:02] Epoch 1 | Step 9630 | Loss: 1.0402 | LR: 1.00e-06
|
| 1231 |
+
[2026-04-25 18:47:05] Epoch 1 | Step 9640 | Loss: 1.0401 | LR: 1.00e-06
|
| 1232 |
+
[2026-04-25 18:47:08] Epoch 1 | Step 9650 | Loss: 1.0401 | LR: 1.00e-06
|
| 1233 |
+
[2026-04-25 18:47:10] Epoch 1 | Step 9660 | Loss: 1.0401 | LR: 1.00e-06
|
| 1234 |
+
[2026-04-25 18:47:13] Epoch 1 | Step 9670 | Loss: 1.0401 | LR: 1.00e-06
|
| 1235 |
+
[2026-04-25 18:47:15] Epoch 1 | Step 9680 | Loss: 1.0401 | LR: 1.00e-06
|
| 1236 |
+
[2026-04-25 18:47:17] Epoch 1 | Step 9690 | Loss: 1.0400 | LR: 1.00e-06
|
| 1237 |
+
[2026-04-25 18:47:20] Epoch 1 | Step 9700 | Loss: 1.0401 | LR: 1.00e-06
|
| 1238 |
+
[2026-04-25 18:47:23] Epoch 1 | Step 9710 | Loss: 1.0400 | LR: 1.00e-06
|
| 1239 |
+
[2026-04-25 18:47:25] Epoch 1 | Step 9720 | Loss: 1.0400 | LR: 1.00e-06
|
| 1240 |
+
[2026-04-25 18:47:28] Epoch 1 | Step 9730 | Loss: 1.0400 | LR: 1.00e-06
|
| 1241 |
+
[2026-04-25 18:47:30] Epoch 1 | Step 9740 | Loss: 1.0399 | LR: 1.00e-06
|
| 1242 |
+
[2026-04-25 18:47:32] Epoch 1 | Step 9750 | Loss: 1.0399 | LR: 1.00e-06
|
| 1243 |
+
[2026-04-25 18:47:35] Epoch 1 | Step 9760 | Loss: 1.0399 | LR: 1.00e-06
|
| 1244 |
+
[2026-04-25 18:47:37] Epoch 1 | Step 9770 | Loss: 1.0399 | LR: 1.00e-06
|
| 1245 |
+
[2026-04-25 18:47:40] Epoch 1 | Step 9780 | Loss: 1.0398 | LR: 1.00e-06
|
| 1246 |
+
[2026-04-25 18:47:42] Epoch 1 | Step 9790 | Loss: 1.0398 | LR: 1.00e-06
|
| 1247 |
+
[2026-04-25 18:47:45] Epoch 1 | Step 9800 | Loss: 1.0398 | LR: 1.00e-06
|
| 1248 |
+
[2026-04-25 18:47:48] Epoch 1 | Step 9810 | Loss: 1.0397 | LR: 1.00e-06
|
| 1249 |
+
[2026-04-25 18:47:50] Epoch 1 | Step 9820 | Loss: 1.0396 | LR: 1.00e-06
|
| 1250 |
+
[2026-04-25 18:47:53] Epoch 1 | Step 9830 | Loss: 1.0396 | LR: 1.00e-06
|
| 1251 |
+
[2026-04-25 18:47:56] Epoch 1 | Step 9840 | Loss: 1.0398 | LR: 1.00e-06
|
| 1252 |
+
[2026-04-25 18:47:58] Epoch 1 | Step 9850 | Loss: 1.0397 | LR: 1.00e-06
|
| 1253 |
+
[2026-04-25 18:48:01] Epoch 1 | Step 9860 | Loss: 1.0396 | LR: 1.00e-06
|
| 1254 |
+
[2026-04-25 18:48:03] Epoch 1 | Step 9870 | Loss: 1.0397 | LR: 1.00e-06
|
| 1255 |
+
[2026-04-25 18:48:06] Epoch 1 | Step 9880 | Loss: 1.0397 | LR: 1.00e-06
|
| 1256 |
+
[2026-04-25 18:48:08] Epoch 1 completed in 2512.48s | Loss: 1.0397
|
| 1257 |
+
[2026-04-25 18:48:08]
|
| 1258 |
+
Training completed!
|
| 1259 |
+
[2026-04-25 18:48:10] Final model: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5/model_final.pt
|
lr_sweep/pythia_1b_lr_1e-5/wandb/debug-internal.log
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{"time":"2026-04-25T18:06:09.99885055Z","level":"INFO","msg":"stream: starting","core version":"0.24.0"}
|
| 2 |
+
{"time":"2026-04-25T18:06:10.474895883Z","level":"INFO","msg":"stream: created new stream","id":"3z5g26qd"}
|
| 3 |
+
{"time":"2026-04-25T18:06:10.474937729Z","level":"INFO","msg":"handler: started","stream_id":"3z5g26qd"}
|
| 4 |
+
{"time":"2026-04-25T18:06:10.475009823Z","level":"INFO","msg":"stream: started","id":"3z5g26qd"}
|
| 5 |
+
{"time":"2026-04-25T18:06:10.475020366Z","level":"INFO","msg":"writer: started","stream_id":"3z5g26qd"}
|
| 6 |
+
{"time":"2026-04-25T18:06:10.475023072Z","level":"INFO","msg":"sender: started","stream_id":"3z5g26qd"}
|
| 7 |
+
{"time":"2026-04-25T18:06:10.687143958Z","level":"ERROR","msg":"git repo not found","error":"repository does not exist"}
|
| 8 |
+
{"time":"2026-04-25T18:48:11.476724146Z","level":"INFO","msg":"fileTransfer: Close: file transfer manager closed"}
|
| 9 |
+
{"time":"2026-04-25T18:48:11.625156726Z","level":"INFO","msg":"handler: operation stats","stats":{}}
|
| 10 |
+
{"time":"2026-04-25T18:48:11.627697754Z","level":"INFO","msg":"stream: closing","id":"3z5g26qd"}
|
| 11 |
+
{"time":"2026-04-25T18:48:11.627706958Z","level":"INFO","msg":"handler: closed","stream_id":"3z5g26qd"}
|
| 12 |
+
{"time":"2026-04-25T18:48:11.62778746Z","level":"INFO","msg":"sender: closed","stream_id":"3z5g26qd"}
|
| 13 |
+
{"time":"2026-04-25T18:48:11.627798934Z","level":"INFO","msg":"stream: closed","id":"3z5g26qd"}
|
lr_sweep/pythia_1b_lr_1e-5/wandb/debug.log
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
2026-04-25 18:06:09,711 INFO MainThread:65409 [wandb_setup.py:_flush():81] Current SDK version is 0.24.0
|
| 2 |
+
2026-04-25 18:06:09,711 INFO MainThread:65409 [wandb_setup.py:_flush():81] Configure stats pid to 65409
|
| 3 |
+
2026-04-25 18:06:09,711 INFO MainThread:65409 [wandb_setup.py:_flush():81] Loading settings from environment variables
|
| 4 |
+
2026-04-25 18:06:09,711 INFO MainThread:65409 [wandb_init.py:setup_run_log_directory():717] Logging user logs to /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_180609-3z5g26qd/logs/debug.log
|
| 5 |
+
2026-04-25 18:06:09,711 INFO MainThread:65409 [wandb_init.py:setup_run_log_directory():718] Logging internal logs to /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_180609-3z5g26qd/logs/debug-internal.log
|
| 6 |
+
2026-04-25 18:06:09,711 INFO MainThread:65409 [wandb_init.py:init():844] calling init triggers
|
| 7 |
+
2026-04-25 18:06:09,711 INFO MainThread:65409 [wandb_init.py:init():849] wandb.init called with sweep_config: {}
|
| 8 |
+
config: {'model': {'name': 'EleutherAI/pythia-1b', 'checkpoint_path': None, 'from_scratch': False}, 'training': {'epochs': 1, 'batch_size': 4, 'eval_batch_size': 12, 'gradient_accumulation_steps': 4, 'lr': 1e-05, 'weight_decay': 0.1, 'betas': [0.9, 0.95], 'eps': 1e-08, 'lr_scheduler': 'wsd', 'warmup_ratio': 0.1, 'decay_ratio': 0.2, 'warmup_steps': 100, 'min_lr_ratio': 0.1, 'max_grad_norm': 1.0, 'use_amp': True, 'resume': False, 'resume_checkpoint': None}, 'data': {'path': '/workspace/byte-llms-code/code_completion_exp/datasets/data_V4_full', 'max_context_len': 4096, 'max_target_len': 256, 'num_workers': 4, 'pin_memory': True, 'max_train_samples': None, 'max_val_samples': 2000}, 'logging': {'log_interval': 10, 'save_interval': 0, 'eval_interval': 2000, 'save_every_epoch': False}, 'tracking': {'enabled': True, 'backend': 'wandb', 'project': 'code-completion_lr-sweep', 'run_name': 'pythia_1b_lr_1e-5', 'entity': None, 'base_url': 'https://wandb.platun0v.ru', 'local_dir': '/workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5'}, 'paths': {'output_dir': '/workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5'}, 'seed': 42, 'device': 'cuda', '_wandb': {'code_path': 'code/code_completion_exp/train_pythia/train.py'}}
|
| 9 |
+
2026-04-25 18:06:09,711 INFO MainThread:65409 [wandb_init.py:init():892] starting backend
|
| 10 |
+
2026-04-25 18:06:09,975 INFO MainThread:65409 [wandb_init.py:init():895] sending inform_init request
|
| 11 |
+
2026-04-25 18:06:09,997 INFO MainThread:65409 [wandb_init.py:init():903] backend started and connected
|
| 12 |
+
2026-04-25 18:06:10,000 INFO MainThread:65409 [wandb_init.py:init():973] updated telemetry
|
| 13 |
+
2026-04-25 18:06:10,016 INFO MainThread:65409 [wandb_init.py:init():997] communicating run to backend with 90.0 second timeout
|
| 14 |
+
2026-04-25 18:06:10,686 INFO MainThread:65409 [wandb_init.py:init():1044] starting run threads in backend
|
| 15 |
+
2026-04-25 18:06:10,846 INFO MainThread:65409 [wandb_run.py:_console_start():2529] atexit reg
|
| 16 |
+
2026-04-25 18:06:10,846 INFO MainThread:65409 [wandb_run.py:_redirect():2377] redirect: wrap_raw
|
| 17 |
+
2026-04-25 18:06:10,846 INFO MainThread:65409 [wandb_run.py:_redirect():2446] Wrapping output streams.
|
| 18 |
+
2026-04-25 18:06:10,846 INFO MainThread:65409 [wandb_run.py:_redirect():2469] Redirects installed.
|
| 19 |
+
2026-04-25 18:06:10,849 INFO MainThread:65409 [wandb_init.py:init():1084] run started, returning control to user process
|
| 20 |
+
2026-04-25 18:48:10,640 INFO MainThread:65409 [wandb_run.py:_finish():2295] finishing run nikita/code-completion_lr-sweep/3z5g26qd
|
| 21 |
+
2026-04-25 18:48:10,640 INFO MainThread:65409 [wandb_run.py:_atexit_cleanup():2494] got exitcode: 0
|
| 22 |
+
2026-04-25 18:48:10,640 INFO MainThread:65409 [wandb_run.py:_restore():2476] restore
|
| 23 |
+
2026-04-25 18:48:10,640 INFO MainThread:65409 [wandb_run.py:_restore():2482] restore done
|
| 24 |
+
2026-04-25 18:48:11,627 INFO MainThread:65409 [wandb_run.py:_footer_sync_info():3870] logging synced files
|
lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_175124-r2u423d8/files/code/code_completion_exp/train_pythia/train.py
ADDED
|
@@ -0,0 +1,606 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Training Pipeline для Pythia (decoder-only transformer) на задаче Code Completion.
|
| 3 |
+
|
| 4 |
+
Конфигурация через Hydra + OmegaConf, логирование в Trackio.
|
| 5 |
+
Поддержка DDP через Accelerate для multi-GPU тренировки.
|
| 6 |
+
|
| 7 |
+
Использование:
|
| 8 |
+
# Базовый запуск (single GPU)
|
| 9 |
+
python train.py
|
| 10 |
+
|
| 11 |
+
# Multi-GPU с Accelerate
|
| 12 |
+
accelerate launch train.py
|
| 13 |
+
|
| 14 |
+
# Multi-GPU с указанием количества GPU
|
| 15 |
+
accelerate launch --num_processes=4 train.py
|
| 16 |
+
|
| 17 |
+
# Переопределение параметров через CLI
|
| 18 |
+
python train.py training.lr=1e-4 training.epochs=5
|
| 19 |
+
|
| 20 |
+
# Выбор другого конфига модели
|
| 21 |
+
python train.py model=pythia_160m
|
| 22 |
+
|
| 23 |
+
# Multirun (sweep)
|
| 24 |
+
python train.py --multirun training.lr=1e-4,3e-4,1e-3
|
| 25 |
+
|
| 26 |
+
# Без логирования
|
| 27 |
+
python train.py tracking.enabled=false
|
| 28 |
+
"""
|
| 29 |
+
|
| 30 |
+
import os
|
| 31 |
+
import math
|
| 32 |
+
import time
|
| 33 |
+
from pathlib import Path
|
| 34 |
+
|
| 35 |
+
import torch
|
| 36 |
+
import torch.nn as nn
|
| 37 |
+
import torch.nn.functional as F
|
| 38 |
+
from torch.utils.data import DataLoader
|
| 39 |
+
from datasets import load_from_disk
|
| 40 |
+
|
| 41 |
+
import hydra
|
| 42 |
+
from hydra.core.hydra_config import HydraConfig
|
| 43 |
+
from omegaconf import DictConfig, OmegaConf
|
| 44 |
+
from transformers import (
|
| 45 |
+
AutoTokenizer,
|
| 46 |
+
AutoModelForCausalLM,
|
| 47 |
+
AutoConfig,
|
| 48 |
+
PreTrainedTokenizerBase,
|
| 49 |
+
)
|
| 50 |
+
from accelerate import Accelerator
|
| 51 |
+
from accelerate.utils import set_seed as accelerate_set_seed
|
| 52 |
+
|
| 53 |
+
# Ensure repo root is on sys.path (needed when running from subdirectory)
|
| 54 |
+
import sys
|
| 55 |
+
sys.path.insert(0, str(Path(__file__).resolve().parents[2]))
|
| 56 |
+
|
| 57 |
+
# Shared training library
|
| 58 |
+
from training_lib.utils import AverageMeter, log_message
|
| 59 |
+
from training_lib.checkpointing import save_checkpoint, load_checkpoint
|
| 60 |
+
from training_lib.schedulers import get_lr_scheduler
|
| 61 |
+
from training_lib.tracking import init_tracking, log_metrics, finish_tracking
|
| 62 |
+
from training_lib.validation import run_validation
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
# ============================================================================
|
| 66 |
+
# ДАННЫЕ
|
| 67 |
+
# ============================================================================
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
class CodeCompletionCollator:
|
| 71 |
+
"""Collate function для батчирования примеров code completion."""
|
| 72 |
+
|
| 73 |
+
def __init__(
|
| 74 |
+
self,
|
| 75 |
+
tokenizer: PreTrainedTokenizerBase,
|
| 76 |
+
max_context_len: int = 1024,
|
| 77 |
+
max_target_len: int = 256,
|
| 78 |
+
):
|
| 79 |
+
self.tokenizer = tokenizer
|
| 80 |
+
self.max_context_len = max_context_len
|
| 81 |
+
self.max_target_len = max_target_len
|
| 82 |
+
self.pad_token_id = tokenizer.pad_token_id
|
| 83 |
+
|
| 84 |
+
def __call__(self, batch: list[dict]) -> dict:
|
| 85 |
+
contexts = [item["context"] for item in batch]
|
| 86 |
+
targets = [item["target"] for item in batch]
|
| 87 |
+
|
| 88 |
+
encoded_contexts = self.tokenizer(
|
| 89 |
+
contexts,
|
| 90 |
+
add_special_tokens=True,
|
| 91 |
+
truncation=True,
|
| 92 |
+
max_length=self.max_context_len,
|
| 93 |
+
return_tensors=None,
|
| 94 |
+
)
|
| 95 |
+
encoded_targets = self.tokenizer(
|
| 96 |
+
targets,
|
| 97 |
+
add_special_tokens=False,
|
| 98 |
+
truncation=True,
|
| 99 |
+
max_length=self.max_target_len,
|
| 100 |
+
return_tensors=None,
|
| 101 |
+
)
|
| 102 |
+
|
| 103 |
+
input_ids_list = []
|
| 104 |
+
context_lengths = []
|
| 105 |
+
|
| 106 |
+
for ctx_ids, tgt_ids in zip(
|
| 107 |
+
encoded_contexts["input_ids"], encoded_targets["input_ids"]
|
| 108 |
+
):
|
| 109 |
+
tgt_ids = tgt_ids + [self.tokenizer.eos_token_id]
|
| 110 |
+
context_lengths.append(len(ctx_ids))
|
| 111 |
+
input_ids_list.append(ctx_ids + tgt_ids)
|
| 112 |
+
|
| 113 |
+
max_len = max(len(ids) for ids in input_ids_list)
|
| 114 |
+
|
| 115 |
+
padded_input_ids = []
|
| 116 |
+
attention_mask = []
|
| 117 |
+
|
| 118 |
+
for ids in input_ids_list:
|
| 119 |
+
padding_len = max_len - len(ids)
|
| 120 |
+
padded_input_ids.append(ids + [self.pad_token_id] * padding_len)
|
| 121 |
+
attention_mask.append([1] * len(ids) + [0] * padding_len)
|
| 122 |
+
|
| 123 |
+
return {
|
| 124 |
+
"input_ids": torch.tensor(padded_input_ids, dtype=torch.long),
|
| 125 |
+
"attention_mask": torch.tensor(attention_mask, dtype=torch.long),
|
| 126 |
+
"context_lengths": torch.tensor(context_lengths, dtype=torch.long),
|
| 127 |
+
}
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
def create_dataloaders(
|
| 131 |
+
cfg: DictConfig, tokenizer: PreTrainedTokenizerBase
|
| 132 |
+
) -> dict[str, DataLoader]:
|
| 133 |
+
"""Создание DataLoader'ов для train и validation."""
|
| 134 |
+
dataset_dict = load_from_disk(cfg.data.path)
|
| 135 |
+
|
| 136 |
+
collator = CodeCompletionCollator(
|
| 137 |
+
tokenizer=tokenizer,
|
| 138 |
+
max_context_len=cfg.data.max_context_len,
|
| 139 |
+
max_target_len=cfg.data.max_target_len,
|
| 140 |
+
)
|
| 141 |
+
|
| 142 |
+
dataloaders = {}
|
| 143 |
+
|
| 144 |
+
if "train" in dataset_dict:
|
| 145 |
+
train_dataset = dataset_dict["train"]
|
| 146 |
+
max_train = cfg.data.get("max_train_samples", None)
|
| 147 |
+
if max_train is not None:
|
| 148 |
+
train_dataset = train_dataset.select(range(min(max_train, len(train_dataset))))
|
| 149 |
+
dataloaders["train"] = DataLoader(
|
| 150 |
+
train_dataset,
|
| 151 |
+
batch_size=cfg.training.batch_size,
|
| 152 |
+
shuffle=True,
|
| 153 |
+
collate_fn=collator,
|
| 154 |
+
num_workers=cfg.data.num_workers,
|
| 155 |
+
pin_memory=cfg.data.pin_memory,
|
| 156 |
+
)
|
| 157 |
+
|
| 158 |
+
if "validation" in dataset_dict:
|
| 159 |
+
val_dataset = dataset_dict["validation"]
|
| 160 |
+
max_val = cfg.data.get("max_val_samples", None)
|
| 161 |
+
if max_val is not None:
|
| 162 |
+
val_dataset = val_dataset.select(range(min(max_val, len(val_dataset))))
|
| 163 |
+
eval_batch_size = cfg.training.get("eval_batch_size", cfg.training.batch_size)
|
| 164 |
+
dataloaders["validation"] = DataLoader(
|
| 165 |
+
val_dataset,
|
| 166 |
+
batch_size=eval_batch_size,
|
| 167 |
+
shuffle=False,
|
| 168 |
+
collate_fn=collator,
|
| 169 |
+
num_workers=cfg.data.num_workers,
|
| 170 |
+
pin_memory=cfg.data.pin_memory,
|
| 171 |
+
)
|
| 172 |
+
|
| 173 |
+
return dataloaders
|
| 174 |
+
|
| 175 |
+
|
| 176 |
+
|
| 177 |
+
|
| 178 |
+
# ============================================================================
|
| 179 |
+
# LOSS ФУНКЦИИ
|
| 180 |
+
# ============================================================================
|
| 181 |
+
|
| 182 |
+
|
| 183 |
+
def compute_loss(
|
| 184 |
+
logits: torch.Tensor,
|
| 185 |
+
input_ids: torch.Tensor,
|
| 186 |
+
context_lengths: torch.Tensor,
|
| 187 |
+
attention_mask: torch.Tensor,
|
| 188 |
+
) -> dict:
|
| 189 |
+
"""Вычисление loss для авторегрессионной модели."""
|
| 190 |
+
batch_size, seq_len, vocab_size = logits.shape
|
| 191 |
+
|
| 192 |
+
shift_logits = logits[:, :-1, :].contiguous()
|
| 193 |
+
shift_labels = input_ids[:, 1:].contiguous()
|
| 194 |
+
shift_mask = attention_mask[:, 1:].contiguous()
|
| 195 |
+
|
| 196 |
+
target_mask = torch.zeros_like(shift_labels, dtype=torch.bool)
|
| 197 |
+
for i in range(batch_size):
|
| 198 |
+
ctx_len = context_lengths[i].item()
|
| 199 |
+
target_mask[i, ctx_len - 1 :] = True
|
| 200 |
+
|
| 201 |
+
final_mask = target_mask & shift_mask.bool()
|
| 202 |
+
|
| 203 |
+
if final_mask.sum() > 0:
|
| 204 |
+
loss = F.cross_entropy(
|
| 205 |
+
shift_logits[final_mask], shift_labels[final_mask], reduction="mean"
|
| 206 |
+
)
|
| 207 |
+
else:
|
| 208 |
+
loss = torch.tensor(0.0, device=logits.device)
|
| 209 |
+
|
| 210 |
+
return {"loss": loss}
|
| 211 |
+
|
| 212 |
+
|
| 213 |
+
def _pythia_forward_loss(
|
| 214 |
+
model: nn.Module,
|
| 215 |
+
batch: dict,
|
| 216 |
+
cfg: DictConfig,
|
| 217 |
+
accelerator: Accelerator,
|
| 218 |
+
) -> dict:
|
| 219 |
+
"""Forward + loss for a plain HF causal LM (attention_mask= kwarg, .logits)."""
|
| 220 |
+
input_ids = batch["input_ids"]
|
| 221 |
+
attention_mask = batch["attention_mask"]
|
| 222 |
+
context_lengths = batch["context_lengths"]
|
| 223 |
+
output = model(input_ids, attention_mask=attention_mask)
|
| 224 |
+
return compute_loss(output.logits, input_ids, context_lengths, attention_mask)
|
| 225 |
+
|
| 226 |
+
|
| 227 |
+
# ============================================================================
|
| 228 |
+
# PARAMETER GROUPING
|
| 229 |
+
# ============================================================================
|
| 230 |
+
|
| 231 |
+
|
| 232 |
+
def group_params(model: nn.Module, weight_decay: float) -> list[dict]:
|
| 233 |
+
"""Группировка параметров для optimizer."""
|
| 234 |
+
decay_params = []
|
| 235 |
+
no_decay_params = []
|
| 236 |
+
|
| 237 |
+
for name, param in model.named_parameters():
|
| 238 |
+
if not param.requires_grad:
|
| 239 |
+
continue
|
| 240 |
+
|
| 241 |
+
if "bias" in name or "LayerNorm" in name or "layernorm" in name:
|
| 242 |
+
no_decay_params.append(param)
|
| 243 |
+
else:
|
| 244 |
+
decay_params.append(param)
|
| 245 |
+
|
| 246 |
+
return [
|
| 247 |
+
{"params": decay_params, "weight_decay": weight_decay},
|
| 248 |
+
{"params": no_decay_params, "weight_decay": 0.0},
|
| 249 |
+
]
|
| 250 |
+
|
| 251 |
+
|
| 252 |
+
|
| 253 |
+
|
| 254 |
+
# ============================================================================
|
| 255 |
+
# TRAINING LOOP
|
| 256 |
+
# ============================================================================
|
| 257 |
+
|
| 258 |
+
|
| 259 |
+
def train_epoch(
|
| 260 |
+
model: nn.Module,
|
| 261 |
+
dataloader: DataLoader,
|
| 262 |
+
optimizer: torch.optim.Optimizer,
|
| 263 |
+
scheduler,
|
| 264 |
+
cfg: DictConfig,
|
| 265 |
+
epoch: int,
|
| 266 |
+
global_step: int,
|
| 267 |
+
accelerator: Accelerator,
|
| 268 |
+
val_dataloader: DataLoader | None = None,
|
| 269 |
+
best_val_loss: float = float("inf"),
|
| 270 |
+
) -> tuple[int, float]:
|
| 271 |
+
"""Один epoch тренировки. Возвращает (global_step, best_val_loss)."""
|
| 272 |
+
model.train()
|
| 273 |
+
|
| 274 |
+
loss_meter = AverageMeter()
|
| 275 |
+
|
| 276 |
+
optimizer.zero_grad()
|
| 277 |
+
accumulated_loss = 0.0
|
| 278 |
+
accumulated_steps = 0
|
| 279 |
+
|
| 280 |
+
epoch_start_time = time.time()
|
| 281 |
+
step_start_time = time.time()
|
| 282 |
+
|
| 283 |
+
for batch_idx, batch in enumerate(dataloader):
|
| 284 |
+
input_ids = batch["input_ids"]
|
| 285 |
+
attention_mask = batch["attention_mask"]
|
| 286 |
+
context_lengths = batch["context_lengths"]
|
| 287 |
+
|
| 288 |
+
with accelerator.autocast():
|
| 289 |
+
output = model(input_ids, attention_mask=attention_mask)
|
| 290 |
+
logits = output.logits
|
| 291 |
+
loss_dict = compute_loss(
|
| 292 |
+
logits, input_ids, context_lengths, attention_mask
|
| 293 |
+
)
|
| 294 |
+
|
| 295 |
+
loss = loss_dict["loss"] / cfg.training.gradient_accumulation_steps
|
| 296 |
+
accelerator.backward(loss)
|
| 297 |
+
|
| 298 |
+
accumulated_loss += loss_dict["loss"].item()
|
| 299 |
+
accumulated_steps += 1
|
| 300 |
+
|
| 301 |
+
if accumulated_steps == cfg.training.gradient_accumulation_steps:
|
| 302 |
+
if cfg.training.max_grad_norm > 0:
|
| 303 |
+
accelerator.clip_grad_norm_(
|
| 304 |
+
model.parameters(), cfg.training.max_grad_norm
|
| 305 |
+
)
|
| 306 |
+
|
| 307 |
+
optimizer.step()
|
| 308 |
+
scheduler.step()
|
| 309 |
+
optimizer.zero_grad()
|
| 310 |
+
|
| 311 |
+
avg_loss = accumulated_loss / cfg.training.gradient_accumulation_steps
|
| 312 |
+
loss_meter.update(avg_loss)
|
| 313 |
+
|
| 314 |
+
global_step += 1
|
| 315 |
+
|
| 316 |
+
if global_step % cfg.logging.log_interval == 0:
|
| 317 |
+
step_time = time.time() - step_start_time
|
| 318 |
+
current_lr = scheduler.get_last_lr()[0]
|
| 319 |
+
|
| 320 |
+
metrics = {
|
| 321 |
+
"train/loss": loss_meter.val,
|
| 322 |
+
"train/loss_avg": loss_meter.avg,
|
| 323 |
+
"train/lr": current_lr,
|
| 324 |
+
"train/epoch": epoch,
|
| 325 |
+
"train/step_time": step_time / cfg.logging.log_interval,
|
| 326 |
+
}
|
| 327 |
+
|
| 328 |
+
log_metrics(metrics, step=global_step)
|
| 329 |
+
|
| 330 |
+
log_message(
|
| 331 |
+
f"Epoch {epoch} | Step {global_step} | "
|
| 332 |
+
f"Loss: {loss_meter.avg:.4f} | "
|
| 333 |
+
f"LR: {current_lr:.2e}",
|
| 334 |
+
cfg,
|
| 335 |
+
accelerator,
|
| 336 |
+
)
|
| 337 |
+
|
| 338 |
+
step_start_time = time.time()
|
| 339 |
+
|
| 340 |
+
if (
|
| 341 |
+
cfg.logging.save_interval > 0
|
| 342 |
+
and global_step % cfg.logging.save_interval == 0
|
| 343 |
+
):
|
| 344 |
+
save_checkpoint(
|
| 345 |
+
model, optimizer, scheduler, global_step, epoch, cfg, accelerator
|
| 346 |
+
)
|
| 347 |
+
|
| 348 |
+
eval_interval = cfg.logging.get("eval_interval", 0)
|
| 349 |
+
if (
|
| 350 |
+
eval_interval > 0
|
| 351 |
+
and val_dataloader is not None
|
| 352 |
+
and global_step % eval_interval == 0
|
| 353 |
+
):
|
| 354 |
+
val_metrics = run_validation(
|
| 355 |
+
model=model,
|
| 356 |
+
dataloader=val_dataloader,
|
| 357 |
+
cfg=cfg,
|
| 358 |
+
global_step=global_step,
|
| 359 |
+
accelerator=accelerator,
|
| 360 |
+
forward_loss_fn=_pythia_forward_loss,
|
| 361 |
+
)
|
| 362 |
+
|
| 363 |
+
if val_metrics["val/loss"] < best_val_loss:
|
| 364 |
+
best_val_loss = val_metrics["val/loss"]
|
| 365 |
+
if accelerator.is_main_process:
|
| 366 |
+
best_model_path = Path(cfg.paths.output_dir) / "model_best.pt"
|
| 367 |
+
unwrapped_model = accelerator.unwrap_model(model)
|
| 368 |
+
torch.save(unwrapped_model.state_dict(), best_model_path)
|
| 369 |
+
log_message(
|
| 370 |
+
f"New best model saved! Val loss: {best_val_loss:.4f}",
|
| 371 |
+
cfg,
|
| 372 |
+
accelerator
|
| 373 |
+
)
|
| 374 |
+
|
| 375 |
+
log_metrics(
|
| 376 |
+
{
|
| 377 |
+
"best/val_loss": best_val_loss,
|
| 378 |
+
"best/val_perplexity": val_metrics["val/perplexity"],
|
| 379 |
+
"best/step": global_step,
|
| 380 |
+
},
|
| 381 |
+
step=global_step,
|
| 382 |
+
)
|
| 383 |
+
|
| 384 |
+
model.train()
|
| 385 |
+
|
| 386 |
+
accumulated_loss = 0.0
|
| 387 |
+
accumulated_steps = 0
|
| 388 |
+
|
| 389 |
+
epoch_time = time.time() - epoch_start_time
|
| 390 |
+
|
| 391 |
+
log_message(
|
| 392 |
+
f"Epoch {epoch} completed in {epoch_time:.2f}s | "
|
| 393 |
+
f"Loss: {loss_meter.avg:.4f}",
|
| 394 |
+
cfg,
|
| 395 |
+
accelerator,
|
| 396 |
+
)
|
| 397 |
+
|
| 398 |
+
log_metrics({
|
| 399 |
+
"epoch/loss": loss_meter.avg,
|
| 400 |
+
"epoch/time": epoch_time,
|
| 401 |
+
})
|
| 402 |
+
|
| 403 |
+
return global_step, best_val_loss
|
| 404 |
+
|
| 405 |
+
|
| 406 |
+
# ============================================================================
|
| 407 |
+
# MAIN
|
| 408 |
+
# ============================================================================
|
| 409 |
+
|
| 410 |
+
|
| 411 |
+
@hydra.main(version_base=None, config_path="configs", config_name="config")
|
| 412 |
+
def main(cfg: DictConfig):
|
| 413 |
+
"""Главная функция тренировки с поддержкой DDP через Accelerate."""
|
| 414 |
+
|
| 415 |
+
# === Performance: Enable TF32 for faster matmuls on Ampere+ GPUs ===
|
| 416 |
+
torch.set_float32_matmul_precision('high')
|
| 417 |
+
|
| 418 |
+
# === Accelerator Setup ===
|
| 419 |
+
mixed_precision = "bf16" if cfg.training.use_amp else "no"
|
| 420 |
+
|
| 421 |
+
accelerator = Accelerator(
|
| 422 |
+
mixed_precision=mixed_precision,
|
| 423 |
+
gradient_accumulation_steps=cfg.training.gradient_accumulation_steps,
|
| 424 |
+
)
|
| 425 |
+
|
| 426 |
+
# === Setup ===
|
| 427 |
+
accelerate_set_seed(cfg.seed)
|
| 428 |
+
|
| 429 |
+
if cfg.paths.output_dir is None:
|
| 430 |
+
cfg.paths.output_dir = HydraConfig.get().runtime.output_dir
|
| 431 |
+
|
| 432 |
+
OmegaConf.resolve(cfg)
|
| 433 |
+
|
| 434 |
+
log_message(f"CUDA_VISIBLE_DEVICES: {os.environ.get('CUDA_VISIBLE_DEVICES', 'not set')}", cfg, accelerator)
|
| 435 |
+
log_message(f"Number of processes: {accelerator.num_processes}", cfg, accelerator)
|
| 436 |
+
log_message(f"Process index: {accelerator.process_index}", cfg, accelerator)
|
| 437 |
+
log_message(f"Mixed precision: {mixed_precision}", cfg, accelerator)
|
| 438 |
+
|
| 439 |
+
log_message("=" * 60, cfg, accelerator)
|
| 440 |
+
log_message("Pythia Training Pipeline (Hydra + Trackio + Accelerate)", cfg, accelerator)
|
| 441 |
+
log_message("=" * 60, cfg, accelerator)
|
| 442 |
+
log_message(f"Config:\n{OmegaConf.to_yaml(cfg)}", cfg, accelerator)
|
| 443 |
+
|
| 444 |
+
# === Trackio Init ===
|
| 445 |
+
init_tracking(cfg, accelerator)
|
| 446 |
+
|
| 447 |
+
# === Tokenizer ===
|
| 448 |
+
log_message("Initializing tokenizer...", cfg, accelerator)
|
| 449 |
+
tokenizer = AutoTokenizer.from_pretrained(cfg.model.name)
|
| 450 |
+
|
| 451 |
+
if tokenizer.pad_token is None:
|
| 452 |
+
tokenizer.pad_token = tokenizer.eos_token
|
| 453 |
+
tokenizer.pad_token_id = tokenizer.eos_token_id
|
| 454 |
+
|
| 455 |
+
# === Model ===
|
| 456 |
+
log_message("Loading model...", cfg, accelerator)
|
| 457 |
+
|
| 458 |
+
# Flash Attention 2
|
| 459 |
+
torch_dtype = torch.bfloat16 if cfg.training.use_amp else torch.float32
|
| 460 |
+
|
| 461 |
+
if cfg.model.checkpoint_path:
|
| 462 |
+
model = AutoModelForCausalLM.from_pretrained(
|
| 463 |
+
cfg.model.name,
|
| 464 |
+
attn_implementation="flash_attention_2",
|
| 465 |
+
torch_dtype=torch_dtype,
|
| 466 |
+
)
|
| 467 |
+
checkpoint = torch.load(cfg.model.checkpoint_path, map_location="cpu")
|
| 468 |
+
model.load_state_dict(checkpoint["model_state_dict"] if "model_state_dict" in checkpoint else checkpoint)
|
| 469 |
+
log_message(f"Loaded checkpoint: {cfg.model.checkpoint_path}", cfg, accelerator)
|
| 470 |
+
elif cfg.model.from_scratch:
|
| 471 |
+
config = AutoConfig.from_pretrained(cfg.model.name)
|
| 472 |
+
config._attn_implementation = "flash_attention_2"
|
| 473 |
+
model = AutoModelForCausalLM.from_config(config, torch_dtype=torch_dtype)
|
| 474 |
+
log_message(f"Initialized from scratch: {cfg.model.name}", cfg, accelerator)
|
| 475 |
+
else:
|
| 476 |
+
model = AutoModelForCausalLM.from_pretrained(
|
| 477 |
+
cfg.model.name,
|
| 478 |
+
attn_implementation="flash_attention_2",
|
| 479 |
+
torch_dtype=torch_dtype,
|
| 480 |
+
)
|
| 481 |
+
log_message(f"Loaded pretrained: {cfg.model.name}", cfg, accelerator)
|
| 482 |
+
|
| 483 |
+
model.train()
|
| 484 |
+
|
| 485 |
+
# Log model info
|
| 486 |
+
total_params = sum(p.numel() for p in model.parameters())
|
| 487 |
+
trainable_params = sum(p.numel() for p in model.parameters() if p.requires_grad)
|
| 488 |
+
log_message(f"Total params: {total_params:,}", cfg, accelerator)
|
| 489 |
+
log_message(f"Trainable params: {trainable_params:,}", cfg, accelerator)
|
| 490 |
+
|
| 491 |
+
# === Data ===
|
| 492 |
+
log_message("Creating dataloaders...", cfg, accelerator)
|
| 493 |
+
dataloaders = create_dataloaders(cfg, tokenizer)
|
| 494 |
+
|
| 495 |
+
train_dataloader = dataloaders["train"]
|
| 496 |
+
val_dataloader = dataloaders.get("validation", None)
|
| 497 |
+
|
| 498 |
+
log_message(f"Train dataset size: {len(train_dataloader.dataset)}", cfg, accelerator)
|
| 499 |
+
log_message(f"Train batches per epoch (before DDP split): {len(train_dataloader)}", cfg, accelerator)
|
| 500 |
+
|
| 501 |
+
if val_dataloader:
|
| 502 |
+
log_message(f"Validation dataset size: {len(val_dataloader.dataset)}", cfg, accelerator)
|
| 503 |
+
log_message(f"Validation batches: {len(val_dataloader)}", cfg, accelerator)
|
| 504 |
+
else:
|
| 505 |
+
log_message("No validation dataset found", cfg, accelerator)
|
| 506 |
+
|
| 507 |
+
# === Optimizer ===
|
| 508 |
+
log_message("Creating optimizer...", cfg, accelerator)
|
| 509 |
+
param_groups = group_params(model, cfg.training.weight_decay)
|
| 510 |
+
|
| 511 |
+
optimizer = torch.optim.AdamW(
|
| 512 |
+
param_groups,
|
| 513 |
+
lr=cfg.training.lr,
|
| 514 |
+
betas=tuple(cfg.training.betas),
|
| 515 |
+
eps=cfg.training.eps,
|
| 516 |
+
)
|
| 517 |
+
|
| 518 |
+
# === Scheduler ===
|
| 519 |
+
steps_per_epoch = math.ceil(
|
| 520 |
+
len(train_dataloader) / accelerator.num_processes
|
| 521 |
+
)
|
| 522 |
+
total_steps = (
|
| 523 |
+
cfg.training.epochs
|
| 524 |
+
* steps_per_epoch
|
| 525 |
+
// cfg.training.gradient_accumulation_steps
|
| 526 |
+
)
|
| 527 |
+
scheduler = get_lr_scheduler(optimizer, cfg, total_steps)
|
| 528 |
+
|
| 529 |
+
log_message(
|
| 530 |
+
f"Total steps: {total_steps}, Steps per epoch: {steps_per_epoch}",
|
| 531 |
+
cfg,
|
| 532 |
+
accelerator
|
| 533 |
+
)
|
| 534 |
+
|
| 535 |
+
# === Accelerate Prepare ===
|
| 536 |
+
log_message("Preparing model, optimizer, and dataloaders with Accelerate...", cfg, accelerator)
|
| 537 |
+
|
| 538 |
+
if val_dataloader is not None:
|
| 539 |
+
model, optimizer, train_dataloader, val_dataloader, scheduler = accelerator.prepare(
|
| 540 |
+
model, optimizer, train_dataloader, val_dataloader, scheduler
|
| 541 |
+
)
|
| 542 |
+
else:
|
| 543 |
+
model, optimizer, train_dataloader, scheduler = accelerator.prepare(
|
| 544 |
+
model, optimizer, train_dataloader, scheduler
|
| 545 |
+
)
|
| 546 |
+
|
| 547 |
+
log_message(f"Train batches per epoch (after DDP split): {len(train_dataloader)}", cfg, accelerator)
|
| 548 |
+
|
| 549 |
+
# === Resume ===
|
| 550 |
+
global_step = 0
|
| 551 |
+
start_epoch = 1
|
| 552 |
+
|
| 553 |
+
if cfg.training.resume and cfg.training.resume_checkpoint:
|
| 554 |
+
global_step, start_epoch = load_checkpoint(
|
| 555 |
+
model, optimizer, scheduler, cfg.training.resume_checkpoint, cfg, accelerator
|
| 556 |
+
)
|
| 557 |
+
start_epoch += 1
|
| 558 |
+
|
| 559 |
+
# === Training Loop ===
|
| 560 |
+
log_message("Starting training...", cfg, accelerator)
|
| 561 |
+
|
| 562 |
+
best_val_loss = float("inf")
|
| 563 |
+
|
| 564 |
+
try:
|
| 565 |
+
for epoch in range(start_epoch, cfg.training.epochs + 1):
|
| 566 |
+
log_message(f"\n{'=' * 60}", cfg, accelerator)
|
| 567 |
+
log_message(f"EPOCH {epoch}/{cfg.training.epochs}", cfg, accelerator)
|
| 568 |
+
log_message(f"{'=' * 60}", cfg, accelerator)
|
| 569 |
+
|
| 570 |
+
global_step, best_val_loss = train_epoch(
|
| 571 |
+
model=model,
|
| 572 |
+
dataloader=train_dataloader,
|
| 573 |
+
optimizer=optimizer,
|
| 574 |
+
scheduler=scheduler,
|
| 575 |
+
cfg=cfg,
|
| 576 |
+
epoch=epoch,
|
| 577 |
+
global_step=global_step,
|
| 578 |
+
accelerator=accelerator,
|
| 579 |
+
val_dataloader=val_dataloader,
|
| 580 |
+
best_val_loss=best_val_loss,
|
| 581 |
+
)
|
| 582 |
+
|
| 583 |
+
if cfg.logging.save_every_epoch:
|
| 584 |
+
save_checkpoint(
|
| 585 |
+
model, optimizer, scheduler, global_step, epoch, cfg, accelerator
|
| 586 |
+
)
|
| 587 |
+
|
| 588 |
+
except KeyboardInterrupt:
|
| 589 |
+
log_message("Training interrupted by user", cfg, accelerator)
|
| 590 |
+
save_checkpoint(model, optimizer, scheduler, global_step, epoch, cfg, accelerator)
|
| 591 |
+
|
| 592 |
+
# === Final Save ===
|
| 593 |
+
log_message("\nTraining completed!", cfg, accelerator)
|
| 594 |
+
|
| 595 |
+
if accelerator.is_main_process:
|
| 596 |
+
final_model_path = Path(cfg.paths.output_dir) / "model_final.pt"
|
| 597 |
+
unwrapped_model = accelerator.unwrap_model(model)
|
| 598 |
+
torch.save(unwrapped_model.state_dict(), final_model_path)
|
| 599 |
+
log_message(f"Final model: {final_model_path}", cfg, accelerator)
|
| 600 |
+
|
| 601 |
+
accelerator.wait_for_everyone()
|
| 602 |
+
finish_tracking()
|
| 603 |
+
|
| 604 |
+
|
| 605 |
+
if __name__ == "__main__":
|
| 606 |
+
main()
|
lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_175124-r2u423d8/files/config.yaml
ADDED
|
@@ -0,0 +1,146 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
_wandb:
|
| 2 |
+
value:
|
| 3 |
+
cli_version: 0.24.0
|
| 4 |
+
code_path: code/code_completion_exp/train_pythia/train.py
|
| 5 |
+
e:
|
| 6 |
+
9lruqtkrywy3tnenzuf4volhpmtbyljs:
|
| 7 |
+
args:
|
| 8 |
+
- tracking=wandb
|
| 9 |
+
- tracking.project=code-completion_lr-sweep
|
| 10 |
+
- tracking.run_name=pythia_1b_lr_1e-5
|
| 11 |
+
- training.lr=1e-5
|
| 12 |
+
- paths.output_dir=/workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5
|
| 13 |
+
- model=pythia_1b
|
| 14 |
+
- data.path=/workspace/byte-llms-code/code_completion_exp/datasets/data_V4_full
|
| 15 |
+
codePath: code_completion_exp/train_pythia/train.py
|
| 16 |
+
codePathLocal: train.py
|
| 17 |
+
cpu_count: 64
|
| 18 |
+
cpu_count_logical: 128
|
| 19 |
+
cudaVersion: "12.2"
|
| 20 |
+
disk:
|
| 21 |
+
/:
|
| 22 |
+
total: "265214230528"
|
| 23 |
+
used: "37788938240"
|
| 24 |
+
email: nikita@local.ru
|
| 25 |
+
executable: /venv/bytellm/bin/python
|
| 26 |
+
git:
|
| 27 |
+
commit: f111e13281aa0dc58e24302edab5b0d5c2024586
|
| 28 |
+
remote: https://github.com/naryst/byte-llms-code.git
|
| 29 |
+
gpu: NVIDIA H100 80GB HBM3
|
| 30 |
+
gpu_count: 4
|
| 31 |
+
gpu_nvidia:
|
| 32 |
+
- architecture: Hopper
|
| 33 |
+
cudaCores: 16896
|
| 34 |
+
memoryTotal: "85520809984"
|
| 35 |
+
name: NVIDIA H100 80GB HBM3
|
| 36 |
+
uuid: GPU-b60cdcab-2033-2009-41de-be646c953a20
|
| 37 |
+
- architecture: Hopper
|
| 38 |
+
cudaCores: 16896
|
| 39 |
+
memoryTotal: "85520809984"
|
| 40 |
+
name: NVIDIA H100 80GB HBM3
|
| 41 |
+
uuid: GPU-9982b420-4520-4238-c378-ec5a46015474
|
| 42 |
+
- architecture: Hopper
|
| 43 |
+
cudaCores: 16896
|
| 44 |
+
memoryTotal: "85520809984"
|
| 45 |
+
name: NVIDIA H100 80GB HBM3
|
| 46 |
+
uuid: GPU-e26ebaac-aaa6-3eed-17ab-a3dce303a76f
|
| 47 |
+
- architecture: Hopper
|
| 48 |
+
cudaCores: 16896
|
| 49 |
+
memoryTotal: "85520809984"
|
| 50 |
+
name: NVIDIA H100 80GB HBM3
|
| 51 |
+
uuid: GPU-9dfc6dba-0be6-4a10-1027-336cc0e65134
|
| 52 |
+
host: 7504e518d24a
|
| 53 |
+
memory:
|
| 54 |
+
total: "1081679683584"
|
| 55 |
+
os: Linux-5.4.0-176-generic-x86_64-with-glibc2.35
|
| 56 |
+
program: /workspace/byte-llms-code/code_completion_exp/train_pythia/train.py
|
| 57 |
+
python: CPython 3.12.0
|
| 58 |
+
root: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5
|
| 59 |
+
startedAt: "2026-04-25T17:51:24.726114Z"
|
| 60 |
+
writerId: 9lruqtkrywy3tnenzuf4volhpmtbyljs
|
| 61 |
+
m: []
|
| 62 |
+
python_version: 3.12.0
|
| 63 |
+
t:
|
| 64 |
+
"1":
|
| 65 |
+
- 1
|
| 66 |
+
- 11
|
| 67 |
+
- 49
|
| 68 |
+
- 50
|
| 69 |
+
- 51
|
| 70 |
+
- 71
|
| 71 |
+
- 105
|
| 72 |
+
"2":
|
| 73 |
+
- 1
|
| 74 |
+
- 11
|
| 75 |
+
- 49
|
| 76 |
+
- 50
|
| 77 |
+
- 51
|
| 78 |
+
- 71
|
| 79 |
+
- 105
|
| 80 |
+
"3":
|
| 81 |
+
- 2
|
| 82 |
+
- 13
|
| 83 |
+
- 16
|
| 84 |
+
- 61
|
| 85 |
+
"4": 3.12.0
|
| 86 |
+
"5": 0.24.0
|
| 87 |
+
"6": 4.57.6
|
| 88 |
+
"12": 0.24.0
|
| 89 |
+
"13": linux-x86_64
|
| 90 |
+
data:
|
| 91 |
+
value:
|
| 92 |
+
max_context_len: 4096
|
| 93 |
+
max_target_len: 256
|
| 94 |
+
max_train_samples: 20000
|
| 95 |
+
max_val_samples: 2000
|
| 96 |
+
num_workers: 4
|
| 97 |
+
path: /workspace/byte-llms-code/code_completion_exp/datasets/data_V4_full
|
| 98 |
+
pin_memory: true
|
| 99 |
+
device:
|
| 100 |
+
value: cuda
|
| 101 |
+
logging:
|
| 102 |
+
value:
|
| 103 |
+
eval_interval: 1000
|
| 104 |
+
log_interval: 10
|
| 105 |
+
save_every_epoch: true
|
| 106 |
+
save_interval: 3000
|
| 107 |
+
model:
|
| 108 |
+
value:
|
| 109 |
+
checkpoint_path: null
|
| 110 |
+
from_scratch: false
|
| 111 |
+
name: EleutherAI/pythia-1b
|
| 112 |
+
paths:
|
| 113 |
+
value:
|
| 114 |
+
output_dir: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5
|
| 115 |
+
seed:
|
| 116 |
+
value: 42
|
| 117 |
+
tracking:
|
| 118 |
+
value:
|
| 119 |
+
backend: wandb
|
| 120 |
+
base_url: https://wandb.platun0v.ru
|
| 121 |
+
enabled: true
|
| 122 |
+
entity: null
|
| 123 |
+
local_dir: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5
|
| 124 |
+
project: code-completion_lr-sweep
|
| 125 |
+
run_name: pythia_1b_lr_1e-5
|
| 126 |
+
training:
|
| 127 |
+
value:
|
| 128 |
+
batch_size: 4
|
| 129 |
+
betas:
|
| 130 |
+
- 0.9
|
| 131 |
+
- 0.95
|
| 132 |
+
decay_ratio: 0.2
|
| 133 |
+
epochs: 1
|
| 134 |
+
eps: 1e-08
|
| 135 |
+
eval_batch_size: 12
|
| 136 |
+
gradient_accumulation_steps: 4
|
| 137 |
+
lr: 1e-05
|
| 138 |
+
lr_scheduler: wsd
|
| 139 |
+
max_grad_norm: 1
|
| 140 |
+
min_lr_ratio: 0.1
|
| 141 |
+
resume: false
|
| 142 |
+
resume_checkpoint: null
|
| 143 |
+
use_amp: true
|
| 144 |
+
warmup_ratio: 0.1
|
| 145 |
+
warmup_steps: 100
|
| 146 |
+
weight_decay: 0.1
|
lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_175124-r2u423d8/files/output.log
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[2026-04-25 17:51:25] Initializing tokenizer...
|
| 2 |
+
[2026-04-25 17:51:26] Loading model...
|
| 3 |
+
`torch_dtype` is deprecated! Use `dtype` instead!
|
| 4 |
+
[2026-04-25 17:51:30] Loaded pretrained: EleutherAI/pythia-1b
|
| 5 |
+
[2026-04-25 17:51:30] Total params: 1,011,781,632
|
| 6 |
+
[2026-04-25 17:51:30] Trainable params: 1,011,781,632
|
| 7 |
+
[2026-04-25 17:51:30] Creating dataloaders...
|
| 8 |
+
[2026-04-25 17:51:30] Train dataset size: 20000
|
| 9 |
+
[2026-04-25 17:51:30] Train batches per epoch (before DDP split): 5000
|
| 10 |
+
[2026-04-25 17:51:30] Validation dataset size: 2000
|
| 11 |
+
[2026-04-25 17:51:30] Validation batches: 167
|
| 12 |
+
[2026-04-25 17:51:30] Creating optimizer...
|
| 13 |
+
[2026-04-25 17:51:30] Total steps: 625, Steps per epoch: 2500
|
| 14 |
+
[2026-04-25 17:51:30] Preparing model, optimizer, and dataloaders with Accelerate...
|
| 15 |
+
[2026-04-25 17:51:32] Train batches per epoch (after DDP split): 2500
|
| 16 |
+
[2026-04-25 17:51:32] Starting training...
|
| 17 |
+
[2026-04-25 17:51:32]
|
| 18 |
+
============================================================
|
| 19 |
+
[2026-04-25 17:51:32] EPOCH 1/1
|
| 20 |
+
[2026-04-25 17:51:32] ============================================================
|
| 21 |
+
[2026-04-25 17:51:35] Epoch 1 | Step 10 | Loss: 2.4974 | LR: 3.90e-06
|
| 22 |
+
[2026-04-25 17:51:38] Epoch 1 | Step 20 | Loss: 2.2461 | LR: 6.81e-06
|
| 23 |
+
[2026-04-25 17:51:41] Epoch 1 | Step 30 | Loss: 1.9776 | LR: 9.71e-06
|
| 24 |
+
[2026-04-25 17:51:43] Epoch 1 | Step 40 | Loss: 1.8005 | LR: 1.00e-05
|
| 25 |
+
[2026-04-25 17:51:46] Epoch 1 | Step 50 | Loss: 1.6747 | LR: 1.00e-05
|
| 26 |
+
[2026-04-25 17:51:48] Epoch 1 | Step 60 | Loss: 1.5843 | LR: 1.00e-05
|
| 27 |
+
[2026-04-25 17:51:51] Epoch 1 | Step 70 | Loss: 1.5115 | LR: 1.00e-05
|
| 28 |
+
[2026-04-25 17:51:53] Epoch 1 | Step 80 | Loss: 1.4551 | LR: 1.00e-05
|
| 29 |
+
[2026-04-25 17:51:56] Epoch 1 | Step 90 | Loss: 1.3999 | LR: 1.00e-05
|
| 30 |
+
[2026-04-25 17:51:58] Epoch 1 | Step 100 | Loss: 1.3846 | LR: 1.00e-05
|
| 31 |
+
[2026-04-25 17:52:01] Epoch 1 | Step 110 | Loss: 1.3649 | LR: 1.00e-05
|
| 32 |
+
[2026-04-25 17:52:03] Epoch 1 | Step 120 | Loss: 1.3474 | LR: 1.00e-05
|
| 33 |
+
[2026-04-25 17:52:06] Epoch 1 | Step 130 | Loss: 1.3130 | LR: 1.00e-05
|
| 34 |
+
[2026-04-25 17:52:08] Epoch 1 | Step 140 | Loss: 1.2949 | LR: 1.00e-05
|
| 35 |
+
[2026-04-25 17:52:11] Epoch 1 | Step 150 | Loss: 1.2813 | LR: 1.00e-05
|
| 36 |
+
[2026-04-25 17:52:13] Epoch 1 | Step 160 | Loss: 1.2615 | LR: 1.00e-05
|
| 37 |
+
[2026-04-25 17:52:16] Epoch 1 | Step 170 | Loss: 1.2478 | LR: 1.00e-05
|
| 38 |
+
[2026-04-25 17:52:18] Epoch 1 | Step 180 | Loss: 1.2421 | LR: 1.00e-05
|
| 39 |
+
[2026-04-25 17:52:21] Epoch 1 | Step 190 | Loss: 1.2342 | LR: 1.00e-05
|
| 40 |
+
[2026-04-25 17:52:24] Epoch 1 | Step 200 | Loss: 1.2227 | LR: 1.00e-05
|
| 41 |
+
[2026-04-25 17:52:26] Epoch 1 | Step 210 | Loss: 1.2143 | LR: 1.00e-05
|
| 42 |
+
[2026-04-25 17:52:29] Epoch 1 | Step 220 | Loss: 1.2103 | LR: 1.00e-05
|
| 43 |
+
[2026-04-25 17:52:31] Epoch 1 | Step 230 | Loss: 1.2054 | LR: 1.00e-05
|
| 44 |
+
[2026-04-25 17:52:34] Epoch 1 | Step 240 | Loss: 1.1962 | LR: 1.00e-05
|
| 45 |
+
[2026-04-25 17:52:36] Epoch 1 | Step 250 | Loss: 1.1868 | LR: 1.00e-05
|
| 46 |
+
[2026-04-25 17:52:39] Epoch 1 | Step 260 | Loss: 1.1883 | LR: 9.44e-06
|
| 47 |
+
[2026-04-25 17:52:42] Epoch 1 | Step 270 | Loss: 1.1847 | LR: 7.91e-06
|
| 48 |
+
[2026-04-25 17:52:44] Epoch 1 | Step 280 | Loss: 1.1804 | LR: 5.78e-06
|
| 49 |
+
[2026-04-25 17:52:47] Epoch 1 | Step 290 | Loss: 1.1755 | LR: 3.58e-06
|
| 50 |
+
[2026-04-25 17:52:49] Epoch 1 | Step 300 | Loss: 1.1688 | LR: 1.86e-06
|
| 51 |
+
[2026-04-25 17:52:52] Epoch 1 | Step 310 | Loss: 1.1661 | LR: 1.04e-06
|
| 52 |
+
[2026-04-25 17:52:55] Epoch 1 | Step 320 | Loss: 1.1683 | LR: 1.00e-06
|
| 53 |
+
[2026-04-25 17:52:57] Epoch 1 | Step 330 | Loss: 1.1641 | LR: 1.00e-06
|
| 54 |
+
[2026-04-25 17:52:59] Epoch 1 | Step 340 | Loss: 1.1625 | LR: 1.00e-06
|
| 55 |
+
[2026-04-25 17:53:02] Epoch 1 | Step 350 | Loss: 1.1564 | LR: 1.00e-06
|
| 56 |
+
[2026-04-25 17:53:04] Epoch 1 | Step 360 | Loss: 1.1521 | LR: 1.00e-06
|
| 57 |
+
[2026-04-25 17:53:07] Epoch 1 | Step 370 | Loss: 1.1474 | LR: 1.00e-06
|
| 58 |
+
[2026-04-25 17:53:10] Epoch 1 | Step 380 | Loss: 1.1489 | LR: 1.00e-06
|
| 59 |
+
[2026-04-25 17:53:12] Epoch 1 | Step 390 | Loss: 1.1455 | LR: 1.00e-06
|
| 60 |
+
[2026-04-25 17:53:14] Epoch 1 | Step 400 | Loss: 1.1420 | LR: 1.00e-06
|
| 61 |
+
[2026-04-25 17:53:17] Epoch 1 | Step 410 | Loss: 1.1431 | LR: 1.00e-06
|
| 62 |
+
[2026-04-25 17:53:20] Epoch 1 | Step 420 | Loss: 1.1391 | LR: 1.00e-06
|
| 63 |
+
[2026-04-25 17:53:22] Epoch 1 | Step 430 | Loss: 1.1357 | LR: 1.00e-06
|
| 64 |
+
[2026-04-25 17:53:25] Epoch 1 | Step 440 | Loss: 1.1338 | LR: 1.00e-06
|
| 65 |
+
[2026-04-25 17:53:27] Epoch 1 | Step 450 | Loss: 1.1328 | LR: 1.00e-06
|
| 66 |
+
[2026-04-25 17:53:30] Epoch 1 | Step 460 | Loss: 1.1333 | LR: 1.00e-06
|
| 67 |
+
[2026-04-25 17:53:32] Epoch 1 | Step 470 | Loss: 1.1346 | LR: 1.00e-06
|
| 68 |
+
[2026-04-25 17:53:35] Epoch 1 | Step 480 | Loss: 1.1344 | LR: 1.00e-06
|
| 69 |
+
[2026-04-25 17:53:37] Epoch 1 | Step 490 | Loss: 1.1360 | LR: 1.00e-06
|
| 70 |
+
[2026-04-25 17:53:40] Epoch 1 | Step 500 | Loss: 1.1334 | LR: 1.00e-06
|
| 71 |
+
[2026-04-25 17:53:42] Epoch 1 | Step 510 | Loss: 1.1307 | LR: 1.00e-06
|
| 72 |
+
[2026-04-25 17:53:45] Epoch 1 | Step 520 | Loss: 1.1298 | LR: 1.00e-06
|
| 73 |
+
[2026-04-25 17:53:48] Epoch 1 | Step 530 | Loss: 1.1278 | LR: 1.00e-06
|
| 74 |
+
[2026-04-25 17:53:50] Epoch 1 | Step 540 | Loss: 1.1289 | LR: 1.00e-06
|
| 75 |
+
[2026-04-25 17:53:53] Epoch 1 | Step 550 | Loss: 1.1277 | LR: 1.00e-06
|
| 76 |
+
[2026-04-25 17:53:55] Epoch 1 | Step 560 | Loss: 1.1266 | LR: 1.00e-06
|
| 77 |
+
[2026-04-25 17:53:58] Epoch 1 | Step 570 | Loss: 1.1254 | LR: 1.00e-06
|
| 78 |
+
[2026-04-25 17:54:01] Epoch 1 | Step 580 | Loss: 1.1248 | LR: 1.00e-06
|
| 79 |
+
[2026-04-25 17:54:03] Epoch 1 | Step 590 | Loss: 1.1261 | LR: 1.00e-06
|
| 80 |
+
[2026-04-25 17:54:05] Epoch 1 | Step 600 | Loss: 1.1263 | LR: 1.00e-06
|
| 81 |
+
[2026-04-25 17:54:08] Epoch 1 | Step 610 | Loss: 1.1270 | LR: 1.00e-06
|
| 82 |
+
[2026-04-25 17:54:11] Epoch 1 | Step 620 | Loss: 1.1250 | LR: 1.00e-06
|
| 83 |
+
[2026-04-25 17:54:12] Epoch 1 completed in 159.64s | Loss: 1.1239
|
| 84 |
+
[2026-04-25 17:54:18] Checkpoint saved: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5/checkpoints/checkpoint_step_625.pt
|
| 85 |
+
[2026-04-25 17:54:25]
|
| 86 |
+
Training completed!
|
| 87 |
+
[2026-04-25 17:54:27] Final model: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5/model_final.pt
|
lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_175124-r2u423d8/files/requirements.txt
ADDED
|
@@ -0,0 +1,245 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
setuptools==78.1.1
|
| 2 |
+
wheel==0.45.1
|
| 3 |
+
pip==25.2
|
| 4 |
+
webencodings==0.5.1
|
| 5 |
+
triton==3.2.0
|
| 6 |
+
pytz==2025.2
|
| 7 |
+
pydub==0.25.1
|
| 8 |
+
pure_eval==0.2.3
|
| 9 |
+
ptyprocess==0.7.0
|
| 10 |
+
nvidia-ml-py==13.590.48
|
| 11 |
+
nvidia-cusparselt-cu12==0.6.2
|
| 12 |
+
mpmath==1.3.0
|
| 13 |
+
ipython-genutils==0.2.0
|
| 14 |
+
fastjsonschema==2.21.2
|
| 15 |
+
brotli==1.2.0
|
| 16 |
+
antlr4-python3-runtime==4.9.3
|
| 17 |
+
xxhash==3.6.0
|
| 18 |
+
widgetsnbextension==4.0.14
|
| 19 |
+
websocket-client==1.9.0
|
| 20 |
+
webcolors==24.11.1
|
| 21 |
+
wcwidth==0.2.14
|
| 22 |
+
urllib3==2.5.0
|
| 23 |
+
uri-template==1.3.0
|
| 24 |
+
tzdata==2025.2
|
| 25 |
+
typing_extensions==4.15.0
|
| 26 |
+
types-python-dateutil==2.9.0.20251008
|
| 27 |
+
traitlets==5.14.3
|
| 28 |
+
tqdm==4.67.1
|
| 29 |
+
tornado==6.5.2
|
| 30 |
+
tomlkit==0.13.3
|
| 31 |
+
tinycss2==1.4.0
|
| 32 |
+
tabulate==0.9.0
|
| 33 |
+
sympy==1.13.1
|
| 34 |
+
soupsieve==2.8
|
| 35 |
+
sniffio==1.3.1
|
| 36 |
+
smmap==5.0.2
|
| 37 |
+
six==1.17.0
|
| 38 |
+
shellingham==1.5.4
|
| 39 |
+
Send2Trash==1.8.3
|
| 40 |
+
semantic-version==2.10.0
|
| 41 |
+
safetensors==0.6.2
|
| 42 |
+
rpds-py==0.27.1
|
| 43 |
+
rfc3986-validator==0.1.1
|
| 44 |
+
regex==2025.9.18
|
| 45 |
+
pyzmq==27.1.0
|
| 46 |
+
PyYAML==6.0.3
|
| 47 |
+
python-multipart==0.0.22
|
| 48 |
+
python-json-logger==4.0.0
|
| 49 |
+
python-dotenv==1.2.1
|
| 50 |
+
pyparsing==3.2.5
|
| 51 |
+
PyJWT==2.8.0
|
| 52 |
+
Pygments==2.19.2
|
| 53 |
+
pycparser==2.23
|
| 54 |
+
pyarrow==22.0.0
|
| 55 |
+
psutil==7.1.0
|
| 56 |
+
protobuf==6.33.4
|
| 57 |
+
propcache==0.4.1
|
| 58 |
+
prometheus_client==0.23.1
|
| 59 |
+
portalocker==3.2.0
|
| 60 |
+
platformdirs==4.5.0
|
| 61 |
+
pillow==11.3.0
|
| 62 |
+
pexpect==4.9.0
|
| 63 |
+
pathspec==1.0.4
|
| 64 |
+
parso==0.8.5
|
| 65 |
+
pandocfilters==1.5.1
|
| 66 |
+
packaging==25.0
|
| 67 |
+
orjson==3.11.6
|
| 68 |
+
opt_einsum==3.4.0
|
| 69 |
+
nvidia-nvtx-cu12==12.4.127
|
| 70 |
+
nvidia-nvjitlink-cu12==12.4.127
|
| 71 |
+
nvidia-nccl-cu12==2.21.5
|
| 72 |
+
nvidia-curand-cu12==10.3.5.147
|
| 73 |
+
nvidia-cufile-cu12==1.13.1.3
|
| 74 |
+
nvidia-cufft-cu12==11.2.1.3
|
| 75 |
+
nvidia-cuda-runtime-cu12==12.4.127
|
| 76 |
+
nvidia-cuda-nvrtc-cu12==12.4.127
|
| 77 |
+
nvidia-cuda-cupti-cu12==12.4.127
|
| 78 |
+
nvidia-cublas-cu12==12.4.5.8
|
| 79 |
+
numpy==2.3.3
|
| 80 |
+
ninja==1.13.0
|
| 81 |
+
networkx==3.5
|
| 82 |
+
nest-asyncio==1.6.0
|
| 83 |
+
narwhals==2.15.0
|
| 84 |
+
mypy_extensions==1.1.0
|
| 85 |
+
multidict==6.7.0
|
| 86 |
+
mistune==3.1.4
|
| 87 |
+
mdurl==0.1.2
|
| 88 |
+
MarkupSafe==3.0.3
|
| 89 |
+
lxml==6.0.2
|
| 90 |
+
librt==0.8.0
|
| 91 |
+
lark==1.3.0
|
| 92 |
+
kiwisolver==1.4.9
|
| 93 |
+
jupyterlab_widgets==3.0.15
|
| 94 |
+
jupyterlab_pygments==0.3.0
|
| 95 |
+
jsonpointer==3.0.0
|
| 96 |
+
json5==0.12.1
|
| 97 |
+
itsdangerous==2.2.0
|
| 98 |
+
idna==3.10
|
| 99 |
+
hf-xet==1.1.10
|
| 100 |
+
h11==0.16.0
|
| 101 |
+
groovy==0.1.2
|
| 102 |
+
fsspec==2025.9.0
|
| 103 |
+
frozenlist==1.8.0
|
| 104 |
+
fqdn==1.5.1
|
| 105 |
+
fonttools==4.60.1
|
| 106 |
+
filelock==3.19.1
|
| 107 |
+
ffmpy==1.0.0
|
| 108 |
+
executing==2.2.1
|
| 109 |
+
einops==0.8.1
|
| 110 |
+
dill==0.4.0
|
| 111 |
+
defusedxml==0.7.1
|
| 112 |
+
decorator==5.2.1
|
| 113 |
+
debugpy==1.8.17
|
| 114 |
+
dacite==1.9.2
|
| 115 |
+
cycler==0.12.1
|
| 116 |
+
comm==0.2.3
|
| 117 |
+
colorama==0.4.6
|
| 118 |
+
click==8.3.1
|
| 119 |
+
charset-normalizer==3.4.3
|
| 120 |
+
certifi==2025.10.5
|
| 121 |
+
bleach==6.2.0
|
| 122 |
+
babel==2.17.0
|
| 123 |
+
attrs==25.4.0
|
| 124 |
+
async-lru==2.0.5
|
| 125 |
+
asttokens==3.0.0
|
| 126 |
+
annotated-types==0.7.0
|
| 127 |
+
annotated-doc==0.0.4
|
| 128 |
+
aiohappyeyeballs==2.6.1
|
| 129 |
+
aiofiles==24.1.0
|
| 130 |
+
yarl==1.22.0
|
| 131 |
+
uvicorn==0.40.0
|
| 132 |
+
typing-inspection==0.4.2
|
| 133 |
+
terminado==0.18.1
|
| 134 |
+
stack-data==0.6.3
|
| 135 |
+
sentry-sdk==2.50.0
|
| 136 |
+
scipy==1.17.0
|
| 137 |
+
sacrebleu==2.6.0
|
| 138 |
+
rfc3987-syntax==1.1.0
|
| 139 |
+
rfc3339-validator==0.1.4
|
| 140 |
+
requests==2.32.5
|
| 141 |
+
reportlab==4.4.9
|
| 142 |
+
referencing==0.36.2
|
| 143 |
+
python-dateutil==2.9.0.post0
|
| 144 |
+
pydantic_core==2.41.5
|
| 145 |
+
prompt_toolkit==3.0.52
|
| 146 |
+
plotly==6.5.2
|
| 147 |
+
pathlib2==2.3.7.post1
|
| 148 |
+
orderedmultidict==1.0.2
|
| 149 |
+
optree==0.17.0
|
| 150 |
+
omegaconf==2.3.0
|
| 151 |
+
nvidia-cusparse-cu12==12.3.1.170
|
| 152 |
+
nvidia-cudnn-cu12==9.1.0.70
|
| 153 |
+
mypy==1.19.1
|
| 154 |
+
multiprocess==0.70.16
|
| 155 |
+
matplotlib-inline==0.1.7
|
| 156 |
+
markdown-it-py==4.0.0
|
| 157 |
+
jupyter_core==5.8.1
|
| 158 |
+
Jinja2==3.1.6
|
| 159 |
+
jedi==0.19.2
|
| 160 |
+
ipython_pygments_lexers==1.1.1
|
| 161 |
+
httpcore==1.0.9
|
| 162 |
+
gitdb==4.0.12
|
| 163 |
+
ftfy==6.3.1
|
| 164 |
+
contourpy==1.3.3
|
| 165 |
+
cffi==2.0.0
|
| 166 |
+
beautifulsoup4==4.14.2
|
| 167 |
+
anyio==4.11.0
|
| 168 |
+
aiosignal==1.4.0
|
| 169 |
+
starlette==0.50.0
|
| 170 |
+
rich==14.2.0
|
| 171 |
+
pydantic==2.12.5
|
| 172 |
+
pandas==2.3.3
|
| 173 |
+
nvidia-cusolver-cu12==11.6.1.9
|
| 174 |
+
matplotlib==3.10.7
|
| 175 |
+
jupyter_server_terminals==0.5.3
|
| 176 |
+
jupyter_client==8.6.3
|
| 177 |
+
jsonschema-specifications==2025.9.1
|
| 178 |
+
ipython==9.6.0
|
| 179 |
+
hydra-core==1.3.2
|
| 180 |
+
huggingface-hub==0.35.3
|
| 181 |
+
httpx==0.28.1
|
| 182 |
+
GitPython==3.1.46
|
| 183 |
+
furl==2.1.4
|
| 184 |
+
cryptography==46.0.4
|
| 185 |
+
arrow==1.3.0
|
| 186 |
+
argon2-cffi-bindings==25.1.0
|
| 187 |
+
aiohttp==3.13.1
|
| 188 |
+
wandb==0.24.0
|
| 189 |
+
typer==0.21.1
|
| 190 |
+
torch==2.6.0
|
| 191 |
+
tokenizers==0.22.1
|
| 192 |
+
seaborn==0.13.2
|
| 193 |
+
safehttpx==0.1.7
|
| 194 |
+
jsonschema==4.25.1
|
| 195 |
+
joypy==0.2.6
|
| 196 |
+
isoduration==20.11.0
|
| 197 |
+
ipywidgets==8.1.7
|
| 198 |
+
ipykernel==6.30.1
|
| 199 |
+
gradio_client==2.0.3
|
| 200 |
+
fastapi==0.128.0
|
| 201 |
+
Authlib==1.6.6
|
| 202 |
+
argon2-cffi==25.1.0
|
| 203 |
+
transformers==4.57.6
|
| 204 |
+
nbformat==5.10.4
|
| 205 |
+
mlstm_kernels==2.0.2
|
| 206 |
+
jupyter-console==6.6.3
|
| 207 |
+
gradio==6.5.1
|
| 208 |
+
datasets==4.3.0
|
| 209 |
+
clearml==1.16.4
|
| 210 |
+
accelerate==1.10.1
|
| 211 |
+
xlstm==2.0.4
|
| 212 |
+
nbclient==0.10.2
|
| 213 |
+
jupyter-events==0.12.0
|
| 214 |
+
trackio==0.15.0
|
| 215 |
+
nbconvert==7.16.6
|
| 216 |
+
jupyter_server==2.17.0
|
| 217 |
+
notebook_shim==0.2.4
|
| 218 |
+
jupyterlab_server==2.27.3
|
| 219 |
+
jupyter-lsp==2.3.0
|
| 220 |
+
nbclassic==1.3.3
|
| 221 |
+
jupyterlab==4.4.9
|
| 222 |
+
notebook==7.4.7
|
| 223 |
+
jupyter_contrib_core==0.4.2
|
| 224 |
+
jupyter==1.1.1
|
| 225 |
+
jupyter_nbextensions_configurator==0.6.4
|
| 226 |
+
causal-conv1d==1.5.0.post8
|
| 227 |
+
flash_attn==2.7.4.post1
|
| 228 |
+
mamba-ssm==2.2.4
|
| 229 |
+
hnet==0.0.1
|
| 230 |
+
autocommand==2.2.2
|
| 231 |
+
backports.tarfile==1.2.0
|
| 232 |
+
importlib_metadata==8.0.0
|
| 233 |
+
inflect==7.3.1
|
| 234 |
+
jaraco.collections==5.1.0
|
| 235 |
+
jaraco.context==5.3.0
|
| 236 |
+
jaraco.functools==4.0.1
|
| 237 |
+
jaraco.text==3.12.1
|
| 238 |
+
more-itertools==10.3.0
|
| 239 |
+
packaging==24.2
|
| 240 |
+
platformdirs==4.2.2
|
| 241 |
+
tomli==2.0.1
|
| 242 |
+
typeguard==4.3.0
|
| 243 |
+
typing_extensions==4.12.2
|
| 244 |
+
wheel==0.45.1
|
| 245 |
+
zipp==3.19.2
|
lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_175124-r2u423d8/files/wandb-metadata.json
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"os": "Linux-5.4.0-176-generic-x86_64-with-glibc2.35",
|
| 3 |
+
"python": "CPython 3.12.0",
|
| 4 |
+
"startedAt": "2026-04-25T17:51:24.726114Z",
|
| 5 |
+
"args": [
|
| 6 |
+
"tracking=wandb",
|
| 7 |
+
"tracking.project=code-completion_lr-sweep",
|
| 8 |
+
"tracking.run_name=pythia_1b_lr_1e-5",
|
| 9 |
+
"training.lr=1e-5",
|
| 10 |
+
"paths.output_dir=/workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5",
|
| 11 |
+
"model=pythia_1b",
|
| 12 |
+
"data.path=/workspace/byte-llms-code/code_completion_exp/datasets/data_V4_full"
|
| 13 |
+
],
|
| 14 |
+
"program": "/workspace/byte-llms-code/code_completion_exp/train_pythia/train.py",
|
| 15 |
+
"codePath": "code_completion_exp/train_pythia/train.py",
|
| 16 |
+
"codePathLocal": "train.py",
|
| 17 |
+
"git": {
|
| 18 |
+
"remote": "https://github.com/naryst/byte-llms-code.git",
|
| 19 |
+
"commit": "f111e13281aa0dc58e24302edab5b0d5c2024586"
|
| 20 |
+
},
|
| 21 |
+
"email": "nikita@local.ru",
|
| 22 |
+
"root": "/workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5",
|
| 23 |
+
"host": "7504e518d24a",
|
| 24 |
+
"executable": "/venv/bytellm/bin/python",
|
| 25 |
+
"cpu_count": 64,
|
| 26 |
+
"cpu_count_logical": 128,
|
| 27 |
+
"gpu": "NVIDIA H100 80GB HBM3",
|
| 28 |
+
"gpu_count": 4,
|
| 29 |
+
"disk": {
|
| 30 |
+
"/": {
|
| 31 |
+
"total": "265214230528",
|
| 32 |
+
"used": "37788938240"
|
| 33 |
+
}
|
| 34 |
+
},
|
| 35 |
+
"memory": {
|
| 36 |
+
"total": "1081679683584"
|
| 37 |
+
},
|
| 38 |
+
"gpu_nvidia": [
|
| 39 |
+
{
|
| 40 |
+
"name": "NVIDIA H100 80GB HBM3",
|
| 41 |
+
"memoryTotal": "85520809984",
|
| 42 |
+
"cudaCores": 16896,
|
| 43 |
+
"architecture": "Hopper",
|
| 44 |
+
"uuid": "GPU-b60cdcab-2033-2009-41de-be646c953a20"
|
| 45 |
+
},
|
| 46 |
+
{
|
| 47 |
+
"name": "NVIDIA H100 80GB HBM3",
|
| 48 |
+
"memoryTotal": "85520809984",
|
| 49 |
+
"cudaCores": 16896,
|
| 50 |
+
"architecture": "Hopper",
|
| 51 |
+
"uuid": "GPU-9982b420-4520-4238-c378-ec5a46015474"
|
| 52 |
+
},
|
| 53 |
+
{
|
| 54 |
+
"name": "NVIDIA H100 80GB HBM3",
|
| 55 |
+
"memoryTotal": "85520809984",
|
| 56 |
+
"cudaCores": 16896,
|
| 57 |
+
"architecture": "Hopper",
|
| 58 |
+
"uuid": "GPU-e26ebaac-aaa6-3eed-17ab-a3dce303a76f"
|
| 59 |
+
},
|
| 60 |
+
{
|
| 61 |
+
"name": "NVIDIA H100 80GB HBM3",
|
| 62 |
+
"memoryTotal": "85520809984",
|
| 63 |
+
"cudaCores": 16896,
|
| 64 |
+
"architecture": "Hopper",
|
| 65 |
+
"uuid": "GPU-9dfc6dba-0be6-4a10-1027-336cc0e65134"
|
| 66 |
+
}
|
| 67 |
+
],
|
| 68 |
+
"cudaVersion": "12.2",
|
| 69 |
+
"writerId": "9lruqtkrywy3tnenzuf4volhpmtbyljs"
|
| 70 |
+
}
|
lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_175124-r2u423d8/files/wandb-summary.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"epoch/time":159.64458012580872,"train/loss":1.1768391132354736,"_step":620,"_wandb":{"runtime":181},"train/loss_avg":1.124989991397747,"_timestamp":1.7771396523165252e+09,"train/lr":1.0000000000000002e-06,"train/epoch":1,"_runtime":181,"train/step_time":0.27210848331451415,"epoch/loss":1.1239365045934915}
|
lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_175124-r2u423d8/logs/debug-core.log
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{"time":"2026-04-25T17:51:24.807120908Z","level":"INFO","msg":"main: starting server","port-filename":"/tmp/tmpzej7_4qt/port-54725.txt","pid":54725,"log-level":0,"disable-analytics":false,"shutdown-on-parent-exit":false,"enable-dcgm-profiling":false}
|
| 2 |
+
{"time":"2026-04-25T17:51:24.807514794Z","level":"INFO","msg":"server: will exit if parent process dies","ppid":54725}
|
| 3 |
+
{"time":"2026-04-25T17:51:24.80751257Z","level":"INFO","msg":"server: accepting connections","addr":{"Name":"/tmp/wandb-54725-54824-326808975/socket","Net":"unix"}}
|
| 4 |
+
{"time":"2026-04-25T17:51:24.995875436Z","level":"INFO","msg":"connection: ManageConnectionData: new connection created","id":"1(@)"}
|
| 5 |
+
{"time":"2026-04-25T17:51:25.015873709Z","level":"INFO","msg":"handleInformInit: received","streamId":"r2u423d8","id":"1(@)"}
|
| 6 |
+
{"time":"2026-04-25T17:51:25.469886148Z","level":"INFO","msg":"handleInformInit: stream started","streamId":"r2u423d8","id":"1(@)"}
|
| 7 |
+
{"time":"2026-04-25T17:54:27.891926069Z","level":"INFO","msg":"handleInformFinish: finish message received","streamId":"r2u423d8","id":"1(@)"}
|
| 8 |
+
{"time":"2026-04-25T17:54:27.957190396Z","level":"INFO","msg":"handleInformFinish: stream closed","streamId":"r2u423d8","id":"1(@)"}
|
| 9 |
+
{"time":"2026-04-25T17:54:27.957208999Z","level":"INFO","msg":"handleInformTeardown: server teardown initiated","id":"1(@)"}
|
| 10 |
+
{"time":"2026-04-25T17:54:27.957215783Z","level":"INFO","msg":"handleInformTeardown: server shutdown complete","id":"1(@)"}
|
| 11 |
+
{"time":"2026-04-25T17:54:27.95722158Z","level":"INFO","msg":"server is shutting down"}
|
| 12 |
+
{"time":"2026-04-25T17:54:27.957223721Z","level":"INFO","msg":"connection: closing","id":"1(@)"}
|
| 13 |
+
{"time":"2026-04-25T17:54:27.957266168Z","level":"INFO","msg":"server: listener closed","addr":{"Name":"/tmp/wandb-54725-54824-326808975/socket","Net":"unix"}}
|
| 14 |
+
{"time":"2026-04-25T17:54:27.957293271Z","level":"INFO","msg":"connection: closed successfully","id":"1(@)"}
|
| 15 |
+
{"time":"2026-04-25T17:54:27.957298535Z","level":"INFO","msg":"connection: ManageConnectionData: connection closed","id":"1(@)"}
|
| 16 |
+
{"time":"2026-04-25T17:54:27.957303164Z","level":"INFO","msg":"server is closed"}
|
lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_175124-r2u423d8/logs/debug-internal.log
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{"time":"2026-04-25T17:51:25.015982883Z","level":"INFO","msg":"stream: starting","core version":"0.24.0"}
|
| 2 |
+
{"time":"2026-04-25T17:51:25.469744806Z","level":"INFO","msg":"stream: created new stream","id":"r2u423d8"}
|
| 3 |
+
{"time":"2026-04-25T17:51:25.469846535Z","level":"INFO","msg":"handler: started","stream_id":"r2u423d8"}
|
| 4 |
+
{"time":"2026-04-25T17:51:25.469879817Z","level":"INFO","msg":"stream: started","id":"r2u423d8"}
|
| 5 |
+
{"time":"2026-04-25T17:51:25.46988944Z","level":"INFO","msg":"writer: started","stream_id":"r2u423d8"}
|
| 6 |
+
{"time":"2026-04-25T17:51:25.469891033Z","level":"INFO","msg":"sender: started","stream_id":"r2u423d8"}
|
| 7 |
+
{"time":"2026-04-25T17:51:25.587517039Z","level":"ERROR","msg":"git repo not found","error":"repository does not exist"}
|
| 8 |
+
{"time":"2026-04-25T17:54:27.78379464Z","level":"INFO","msg":"fileTransfer: Close: file transfer manager closed"}
|
| 9 |
+
{"time":"2026-04-25T17:54:27.889374754Z","level":"INFO","msg":"handler: operation stats","stats":{}}
|
| 10 |
+
{"time":"2026-04-25T17:54:27.891951293Z","level":"INFO","msg":"stream: closing","id":"r2u423d8"}
|
| 11 |
+
{"time":"2026-04-25T17:54:27.89196Z","level":"INFO","msg":"handler: closed","stream_id":"r2u423d8"}
|
| 12 |
+
{"time":"2026-04-25T17:54:27.892010921Z","level":"INFO","msg":"sender: closed","stream_id":"r2u423d8"}
|
| 13 |
+
{"time":"2026-04-25T17:54:27.892023099Z","level":"INFO","msg":"stream: closed","id":"r2u423d8"}
|
lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_175124-r2u423d8/logs/debug.log
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
2026-04-25 17:51:24,727 INFO MainThread:54725 [wandb_setup.py:_flush():81] Current SDK version is 0.24.0
|
| 2 |
+
2026-04-25 17:51:24,727 INFO MainThread:54725 [wandb_setup.py:_flush():81] Configure stats pid to 54725
|
| 3 |
+
2026-04-25 17:51:24,727 INFO MainThread:54725 [wandb_setup.py:_flush():81] Loading settings from environment variables
|
| 4 |
+
2026-04-25 17:51:24,727 INFO MainThread:54725 [wandb_init.py:setup_run_log_directory():717] Logging user logs to /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_175124-r2u423d8/logs/debug.log
|
| 5 |
+
2026-04-25 17:51:24,727 INFO MainThread:54725 [wandb_init.py:setup_run_log_directory():718] Logging internal logs to /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_175124-r2u423d8/logs/debug-internal.log
|
| 6 |
+
2026-04-25 17:51:24,727 INFO MainThread:54725 [wandb_init.py:init():844] calling init triggers
|
| 7 |
+
2026-04-25 17:51:24,727 INFO MainThread:54725 [wandb_init.py:init():849] wandb.init called with sweep_config: {}
|
| 8 |
+
config: {'model': {'name': 'EleutherAI/pythia-1b', 'checkpoint_path': None, 'from_scratch': False}, 'training': {'epochs': 1, 'batch_size': 4, 'eval_batch_size': 12, 'gradient_accumulation_steps': 4, 'lr': 1e-05, 'weight_decay': 0.1, 'betas': [0.9, 0.95], 'eps': 1e-08, 'lr_scheduler': 'wsd', 'warmup_ratio': 0.1, 'decay_ratio': 0.2, 'warmup_steps': 100, 'min_lr_ratio': 0.1, 'max_grad_norm': 1.0, 'use_amp': True, 'resume': False, 'resume_checkpoint': None}, 'data': {'path': '/workspace/byte-llms-code/code_completion_exp/datasets/data_V4_full', 'max_context_len': 4096, 'max_target_len': 256, 'num_workers': 4, 'pin_memory': True, 'max_train_samples': 20000, 'max_val_samples': 2000}, 'logging': {'log_interval': 10, 'save_interval': 3000, 'eval_interval': 1000, 'save_every_epoch': True}, 'tracking': {'enabled': True, 'backend': 'wandb', 'project': 'code-completion_lr-sweep', 'run_name': 'pythia_1b_lr_1e-5', 'entity': None, 'base_url': 'https://wandb.platun0v.ru', 'local_dir': '/workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5'}, 'paths': {'output_dir': '/workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5'}, 'seed': 42, 'device': 'cuda', '_wandb': {'code_path': 'code/code_completion_exp/train_pythia/train.py'}}
|
| 9 |
+
2026-04-25 17:51:24,727 INFO MainThread:54725 [wandb_init.py:init():892] starting backend
|
| 10 |
+
2026-04-25 17:51:24,996 INFO MainThread:54725 [wandb_init.py:init():895] sending inform_init request
|
| 11 |
+
2026-04-25 17:51:25,014 INFO MainThread:54725 [wandb_init.py:init():903] backend started and connected
|
| 12 |
+
2026-04-25 17:51:25,017 INFO MainThread:54725 [wandb_init.py:init():973] updated telemetry
|
| 13 |
+
2026-04-25 17:51:25,031 INFO MainThread:54725 [wandb_init.py:init():997] communicating run to backend with 90.0 second timeout
|
| 14 |
+
2026-04-25 17:51:25,586 INFO MainThread:54725 [wandb_init.py:init():1044] starting run threads in backend
|
| 15 |
+
2026-04-25 17:51:25,744 INFO MainThread:54725 [wandb_run.py:_console_start():2529] atexit reg
|
| 16 |
+
2026-04-25 17:51:25,744 INFO MainThread:54725 [wandb_run.py:_redirect():2377] redirect: wrap_raw
|
| 17 |
+
2026-04-25 17:51:25,744 INFO MainThread:54725 [wandb_run.py:_redirect():2446] Wrapping output streams.
|
| 18 |
+
2026-04-25 17:51:25,744 INFO MainThread:54725 [wandb_run.py:_redirect():2469] Redirects installed.
|
| 19 |
+
2026-04-25 17:51:25,747 INFO MainThread:54725 [wandb_init.py:init():1084] run started, returning control to user process
|
| 20 |
+
2026-04-25 17:54:27,331 INFO MainThread:54725 [wandb_run.py:_finish():2295] finishing run nikita/code-completion_lr-sweep/r2u423d8
|
| 21 |
+
2026-04-25 17:54:27,331 INFO MainThread:54725 [wandb_run.py:_atexit_cleanup():2494] got exitcode: 0
|
| 22 |
+
2026-04-25 17:54:27,332 INFO MainThread:54725 [wandb_run.py:_restore():2476] restore
|
| 23 |
+
2026-04-25 17:54:27,332 INFO MainThread:54725 [wandb_run.py:_restore():2482] restore done
|
| 24 |
+
2026-04-25 17:54:27,891 INFO MainThread:54725 [wandb_run.py:_footer_sync_info():3870] logging synced files
|
lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_175124-r2u423d8/run-r2u423d8.wandb
ADDED
|
Binary file (88.9 kB). View file
|
|
|
lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_180609-3z5g26qd/files/output.log
ADDED
|
@@ -0,0 +1,1056 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[2026-04-25 18:06:10] Initializing tokenizer...
|
| 2 |
+
[2026-04-25 18:06:11] Loading model...
|
| 3 |
+
`torch_dtype` is deprecated! Use `dtype` instead!
|
| 4 |
+
[2026-04-25 18:06:14] Loaded pretrained: EleutherAI/pythia-1b
|
| 5 |
+
[2026-04-25 18:06:14] Total params: 1,011,781,632
|
| 6 |
+
[2026-04-25 18:06:14] Trainable params: 1,011,781,632
|
| 7 |
+
[2026-04-25 18:06:14] Creating dataloaders...
|
| 8 |
+
[2026-04-25 18:06:14] Train dataset size: 316397
|
| 9 |
+
[2026-04-25 18:06:14] Train batches per epoch (before DDP split): 79100
|
| 10 |
+
[2026-04-25 18:06:14] Validation dataset size: 2000
|
| 11 |
+
[2026-04-25 18:06:14] Validation batches: 167
|
| 12 |
+
[2026-04-25 18:06:14] Creating optimizer...
|
| 13 |
+
[2026-04-25 18:06:14] Total steps: 9887, Steps per epoch: 39550
|
| 14 |
+
[2026-04-25 18:06:14] Preparing model, optimizer, and dataloaders with Accelerate...
|
| 15 |
+
[2026-04-25 18:06:15] Train batches per epoch (after DDP split): 39550
|
| 16 |
+
[2026-04-25 18:06:15] Starting training...
|
| 17 |
+
[2026-04-25 18:06:15]
|
| 18 |
+
============================================================
|
| 19 |
+
[2026-04-25 18:06:15] EPOCH 1/1
|
| 20 |
+
[2026-04-25 18:06:15] ============================================================
|
| 21 |
+
[2026-04-25 18:06:18] Epoch 1 | Step 10 | Loss: 2.6460 | LR: 1.18e-06
|
| 22 |
+
[2026-04-25 18:06:21] Epoch 1 | Step 20 | Loss: 2.6636 | LR: 1.36e-06
|
| 23 |
+
[2026-04-25 18:06:23] Epoch 1 | Step 30 | Loss: 2.5925 | LR: 1.55e-06
|
| 24 |
+
[2026-04-25 18:06:26] Epoch 1 | Step 40 | Loss: 2.5481 | LR: 1.73e-06
|
| 25 |
+
[2026-04-25 18:06:29] Epoch 1 | Step 50 | Loss: 2.4824 | LR: 1.91e-06
|
| 26 |
+
[2026-04-25 18:06:31] Epoch 1 | Step 60 | Loss: 2.4141 | LR: 2.09e-06
|
| 27 |
+
[2026-04-25 18:06:34] Epoch 1 | Step 70 | Loss: 2.3474 | LR: 2.28e-06
|
| 28 |
+
[2026-04-25 18:06:36] Epoch 1 | Step 80 | Loss: 2.3035 | LR: 2.46e-06
|
| 29 |
+
[2026-04-25 18:06:39] Epoch 1 | Step 90 | Loss: 2.2299 | LR: 2.64e-06
|
| 30 |
+
[2026-04-25 18:06:41] Epoch 1 | Step 100 | Loss: 2.1684 | LR: 2.82e-06
|
| 31 |
+
[2026-04-25 18:06:44] Epoch 1 | Step 110 | Loss: 2.1276 | LR: 3.00e-06
|
| 32 |
+
[2026-04-25 18:06:46] Epoch 1 | Step 120 | Loss: 2.0775 | LR: 3.19e-06
|
| 33 |
+
[2026-04-25 18:06:49] Epoch 1 | Step 130 | Loss: 2.0371 | LR: 3.37e-06
|
| 34 |
+
[2026-04-25 18:06:51] Epoch 1 | Step 140 | Loss: 1.9966 | LR: 3.55e-06
|
| 35 |
+
[2026-04-25 18:06:54] Epoch 1 | Step 150 | Loss: 1.9444 | LR: 3.73e-06
|
| 36 |
+
[2026-04-25 18:06:56] Epoch 1 | Step 160 | Loss: 1.9014 | LR: 3.91e-06
|
| 37 |
+
[2026-04-25 18:06:59] Epoch 1 | Step 170 | Loss: 1.8649 | LR: 4.10e-06
|
| 38 |
+
[2026-04-25 18:07:01] Epoch 1 | Step 180 | Loss: 1.8227 | LR: 4.28e-06
|
| 39 |
+
[2026-04-25 18:07:03] Epoch 1 | Step 190 | Loss: 1.7938 | LR: 4.46e-06
|
| 40 |
+
[2026-04-25 18:07:06] Epoch 1 | Step 200 | Loss: 1.7650 | LR: 4.64e-06
|
| 41 |
+
[2026-04-25 18:07:08] Epoch 1 | Step 210 | Loss: 1.7455 | LR: 4.83e-06
|
| 42 |
+
[2026-04-25 18:07:11] Epoch 1 | Step 220 | Loss: 1.7219 | LR: 5.01e-06
|
| 43 |
+
[2026-04-25 18:07:13] Epoch 1 | Step 230 | Loss: 1.6927 | LR: 5.19e-06
|
| 44 |
+
[2026-04-25 18:07:16] Epoch 1 | Step 240 | Loss: 1.6687 | LR: 5.37e-06
|
| 45 |
+
[2026-04-25 18:07:18] Epoch 1 | Step 250 | Loss: 1.6482 | LR: 5.55e-06
|
| 46 |
+
[2026-04-25 18:07:21] Epoch 1 | Step 260 | Loss: 1.6359 | LR: 5.74e-06
|
| 47 |
+
[2026-04-25 18:07:23] Epoch 1 | Step 270 | Loss: 1.6153 | LR: 5.92e-06
|
| 48 |
+
[2026-04-25 18:07:26] Epoch 1 | Step 280 | Loss: 1.5946 | LR: 6.10e-06
|
| 49 |
+
[2026-04-25 18:07:28] Epoch 1 | Step 290 | Loss: 1.5788 | LR: 6.28e-06
|
| 50 |
+
[2026-04-25 18:07:31] Epoch 1 | Step 300 | Loss: 1.5643 | LR: 6.47e-06
|
| 51 |
+
[2026-04-25 18:07:33] Epoch 1 | Step 310 | Loss: 1.5481 | LR: 6.65e-06
|
| 52 |
+
[2026-04-25 18:07:36] Epoch 1 | Step 320 | Loss: 1.5323 | LR: 6.83e-06
|
| 53 |
+
[2026-04-25 18:07:38] Epoch 1 | Step 330 | Loss: 1.5169 | LR: 7.01e-06
|
| 54 |
+
[2026-04-25 18:07:41] Epoch 1 | Step 340 | Loss: 1.5035 | LR: 7.19e-06
|
| 55 |
+
[2026-04-25 18:07:43] Epoch 1 | Step 350 | Loss: 1.4927 | LR: 7.38e-06
|
| 56 |
+
[2026-04-25 18:07:46] Epoch 1 | Step 360 | Loss: 1.4786 | LR: 7.56e-06
|
| 57 |
+
[2026-04-25 18:07:48] Epoch 1 | Step 370 | Loss: 1.4647 | LR: 7.74e-06
|
| 58 |
+
[2026-04-25 18:07:51] Epoch 1 | Step 380 | Loss: 1.4527 | LR: 7.92e-06
|
| 59 |
+
[2026-04-25 18:07:54] Epoch 1 | Step 390 | Loss: 1.4421 | LR: 8.11e-06
|
| 60 |
+
[2026-04-25 18:07:56] Epoch 1 | Step 400 | Loss: 1.4326 | LR: 8.29e-06
|
| 61 |
+
[2026-04-25 18:07:59] Epoch 1 | Step 410 | Loss: 1.4242 | LR: 8.47e-06
|
| 62 |
+
[2026-04-25 18:08:01] Epoch 1 | Step 420 | Loss: 1.4156 | LR: 8.65e-06
|
| 63 |
+
[2026-04-25 18:08:04] Epoch 1 | Step 430 | Loss: 1.4108 | LR: 8.83e-06
|
| 64 |
+
[2026-04-25 18:08:06] Epoch 1 | Step 440 | Loss: 1.4000 | LR: 9.02e-06
|
| 65 |
+
[2026-04-25 18:08:09] Epoch 1 | Step 450 | Loss: 1.3918 | LR: 9.20e-06
|
| 66 |
+
[2026-04-25 18:08:12] Epoch 1 | Step 460 | Loss: 1.3851 | LR: 9.38e-06
|
| 67 |
+
[2026-04-25 18:08:14] Epoch 1 | Step 470 | Loss: 1.3768 | LR: 9.56e-06
|
| 68 |
+
[2026-04-25 18:08:17] Epoch 1 | Step 480 | Loss: 1.3712 | LR: 9.74e-06
|
| 69 |
+
[2026-04-25 18:08:19] Epoch 1 | Step 490 | Loss: 1.3631 | LR: 9.93e-06
|
| 70 |
+
[2026-04-25 18:08:22] Epoch 1 | Step 500 | Loss: 1.3555 | LR: 1.00e-05
|
| 71 |
+
[2026-04-25 18:08:24] Epoch 1 | Step 510 | Loss: 1.3482 | LR: 1.00e-05
|
| 72 |
+
[2026-04-25 18:08:26] Epoch 1 | Step 520 | Loss: 1.3431 | LR: 1.00e-05
|
| 73 |
+
[2026-04-25 18:08:29] Epoch 1 | Step 530 | Loss: 1.3361 | LR: 1.00e-05
|
| 74 |
+
[2026-04-25 18:08:32] Epoch 1 | Step 540 | Loss: 1.3284 | LR: 1.00e-05
|
| 75 |
+
[2026-04-25 18:08:34] Epoch 1 | Step 550 | Loss: 1.3221 | LR: 1.00e-05
|
| 76 |
+
[2026-04-25 18:08:37] Epoch 1 | Step 560 | Loss: 1.3162 | LR: 1.00e-05
|
| 77 |
+
[2026-04-25 18:08:39] Epoch 1 | Step 570 | Loss: 1.3123 | LR: 1.00e-05
|
| 78 |
+
[2026-04-25 18:08:42] Epoch 1 | Step 580 | Loss: 1.3084 | LR: 1.00e-05
|
| 79 |
+
[2026-04-25 18:08:44] Epoch 1 | Step 590 | Loss: 1.3049 | LR: 1.00e-05
|
| 80 |
+
[2026-04-25 18:08:47] Epoch 1 | Step 600 | Loss: 1.3009 | LR: 1.00e-05
|
| 81 |
+
[2026-04-25 18:08:49] Epoch 1 | Step 610 | Loss: 1.2988 | LR: 1.00e-05
|
| 82 |
+
[2026-04-25 18:08:52] Epoch 1 | Step 620 | Loss: 1.2954 | LR: 1.00e-05
|
| 83 |
+
[2026-04-25 18:08:54] Epoch 1 | Step 630 | Loss: 1.2909 | LR: 1.00e-05
|
| 84 |
+
[2026-04-25 18:08:56] Epoch 1 | Step 640 | Loss: 1.2881 | LR: 1.00e-05
|
| 85 |
+
[2026-04-25 18:08:59] Epoch 1 | Step 650 | Loss: 1.2841 | LR: 1.00e-05
|
| 86 |
+
[2026-04-25 18:09:01] Epoch 1 | Step 660 | Loss: 1.2816 | LR: 1.00e-05
|
| 87 |
+
[2026-04-25 18:09:04] Epoch 1 | Step 670 | Loss: 1.2773 | LR: 1.00e-05
|
| 88 |
+
[2026-04-25 18:09:06] Epoch 1 | Step 680 | Loss: 1.2744 | LR: 1.00e-05
|
| 89 |
+
[2026-04-25 18:09:09] Epoch 1 | Step 690 | Loss: 1.2705 | LR: 1.00e-05
|
| 90 |
+
[2026-04-25 18:09:11] Epoch 1 | Step 700 | Loss: 1.2688 | LR: 1.00e-05
|
| 91 |
+
[2026-04-25 18:09:14] Epoch 1 | Step 710 | Loss: 1.2656 | LR: 1.00e-05
|
| 92 |
+
[2026-04-25 18:09:16] Epoch 1 | Step 720 | Loss: 1.2626 | LR: 1.00e-05
|
| 93 |
+
[2026-04-25 18:09:19] Epoch 1 | Step 730 | Loss: 1.2601 | LR: 1.00e-05
|
| 94 |
+
[2026-04-25 18:09:21] Epoch 1 | Step 740 | Loss: 1.2567 | LR: 1.00e-05
|
| 95 |
+
[2026-04-25 18:09:24] Epoch 1 | Step 750 | Loss: 1.2531 | LR: 1.00e-05
|
| 96 |
+
[2026-04-25 18:09:26] Epoch 1 | Step 760 | Loss: 1.2520 | LR: 1.00e-05
|
| 97 |
+
[2026-04-25 18:09:29] Epoch 1 | Step 770 | Loss: 1.2511 | LR: 1.00e-05
|
| 98 |
+
[2026-04-25 18:09:31] Epoch 1 | Step 780 | Loss: 1.2486 | LR: 1.00e-05
|
| 99 |
+
[2026-04-25 18:09:34] Epoch 1 | Step 790 | Loss: 1.2458 | LR: 1.00e-05
|
| 100 |
+
[2026-04-25 18:09:36] Epoch 1 | Step 800 | Loss: 1.2425 | LR: 1.00e-05
|
| 101 |
+
[2026-04-25 18:09:39] Epoch 1 | Step 810 | Loss: 1.2401 | LR: 1.00e-05
|
| 102 |
+
[2026-04-25 18:09:41] Epoch 1 | Step 820 | Loss: 1.2374 | LR: 1.00e-05
|
| 103 |
+
[2026-04-25 18:09:44] Epoch 1 | Step 830 | Loss: 1.2355 | LR: 1.00e-05
|
| 104 |
+
[2026-04-25 18:09:46] Epoch 1 | Step 840 | Loss: 1.2328 | LR: 1.00e-05
|
| 105 |
+
[2026-04-25 18:09:49] Epoch 1 | Step 850 | Loss: 1.2294 | LR: 1.00e-05
|
| 106 |
+
[2026-04-25 18:09:51] Epoch 1 | Step 860 | Loss: 1.2281 | LR: 1.00e-05
|
| 107 |
+
[2026-04-25 18:09:54] Epoch 1 | Step 870 | Loss: 1.2272 | LR: 1.00e-05
|
| 108 |
+
[2026-04-25 18:09:56] Epoch 1 | Step 880 | Loss: 1.2257 | LR: 1.00e-05
|
| 109 |
+
[2026-04-25 18:09:59] Epoch 1 | Step 890 | Loss: 1.2240 | LR: 1.00e-05
|
| 110 |
+
[2026-04-25 18:10:01] Epoch 1 | Step 900 | Loss: 1.2217 | LR: 1.00e-05
|
| 111 |
+
[2026-04-25 18:10:04] Epoch 1 | Step 910 | Loss: 1.2203 | LR: 1.00e-05
|
| 112 |
+
[2026-04-25 18:10:06] Epoch 1 | Step 920 | Loss: 1.2198 | LR: 1.00e-05
|
| 113 |
+
[2026-04-25 18:10:09] Epoch 1 | Step 930 | Loss: 1.2180 | LR: 1.00e-05
|
| 114 |
+
[2026-04-25 18:10:11] Epoch 1 | Step 940 | Loss: 1.2167 | LR: 1.00e-05
|
| 115 |
+
[2026-04-25 18:10:14] Epoch 1 | Step 950 | Loss: 1.2136 | LR: 1.00e-05
|
| 116 |
+
[2026-04-25 18:10:17] Epoch 1 | Step 960 | Loss: 1.2117 | LR: 1.00e-05
|
| 117 |
+
[2026-04-25 18:10:19] Epoch 1 | Step 970 | Loss: 1.2102 | LR: 1.00e-05
|
| 118 |
+
[2026-04-25 18:10:22] Epoch 1 | Step 980 | Loss: 1.2082 | LR: 1.00e-05
|
| 119 |
+
[2026-04-25 18:10:25] Epoch 1 | Step 990 | Loss: 1.2053 | LR: 1.00e-05
|
| 120 |
+
[2026-04-25 18:10:27] Epoch 1 | Step 1000 | Loss: 1.2034 | LR: 1.00e-05
|
| 121 |
+
[2026-04-25 18:10:30] Epoch 1 | Step 1010 | Loss: 1.2027 | LR: 1.00e-05
|
| 122 |
+
[2026-04-25 18:10:32] Epoch 1 | Step 1020 | Loss: 1.2012 | LR: 1.00e-05
|
| 123 |
+
[2026-04-25 18:10:34] Epoch 1 | Step 1030 | Loss: 1.2006 | LR: 1.00e-05
|
| 124 |
+
[2026-04-25 18:10:37] Epoch 1 | Step 1040 | Loss: 1.1980 | LR: 1.00e-05
|
| 125 |
+
[2026-04-25 18:10:39] Epoch 1 | Step 1050 | Loss: 1.1962 | LR: 1.00e-05
|
| 126 |
+
[2026-04-25 18:10:42] Epoch 1 | Step 1060 | Loss: 1.1930 | LR: 1.00e-05
|
| 127 |
+
[2026-04-25 18:10:44] Epoch 1 | Step 1070 | Loss: 1.1913 | LR: 1.00e-05
|
| 128 |
+
[2026-04-25 18:10:47] Epoch 1 | Step 1080 | Loss: 1.1911 | LR: 1.00e-05
|
| 129 |
+
[2026-04-25 18:10:49] Epoch 1 | Step 1090 | Loss: 1.1918 | LR: 1.00e-05
|
| 130 |
+
[2026-04-25 18:10:52] Epoch 1 | Step 1100 | Loss: 1.1904 | LR: 1.00e-05
|
| 131 |
+
[2026-04-25 18:10:54] Epoch 1 | Step 1110 | Loss: 1.1898 | LR: 1.00e-05
|
| 132 |
+
[2026-04-25 18:10:56] Epoch 1 | Step 1120 | Loss: 1.1893 | LR: 1.00e-05
|
| 133 |
+
[2026-04-25 18:10:59] Epoch 1 | Step 1130 | Loss: 1.1886 | LR: 1.00e-05
|
| 134 |
+
[2026-04-25 18:11:02] Epoch 1 | Step 1140 | Loss: 1.1871 | LR: 1.00e-05
|
| 135 |
+
[2026-04-25 18:11:05] Epoch 1 | Step 1150 | Loss: 1.1844 | LR: 1.00e-05
|
| 136 |
+
[2026-04-25 18:11:07] Epoch 1 | Step 1160 | Loss: 1.1840 | LR: 1.00e-05
|
| 137 |
+
[2026-04-25 18:11:09] Epoch 1 | Step 1170 | Loss: 1.1839 | LR: 1.00e-05
|
| 138 |
+
[2026-04-25 18:11:12] Epoch 1 | Step 1180 | Loss: 1.1826 | LR: 1.00e-05
|
| 139 |
+
[2026-04-25 18:11:15] Epoch 1 | Step 1190 | Loss: 1.1819 | LR: 1.00e-05
|
| 140 |
+
[2026-04-25 18:11:17] Epoch 1 | Step 1200 | Loss: 1.1801 | LR: 1.00e-05
|
| 141 |
+
[2026-04-25 18:11:20] Epoch 1 | Step 1210 | Loss: 1.1777 | LR: 1.00e-05
|
| 142 |
+
[2026-04-25 18:11:22] Epoch 1 | Step 1220 | Loss: 1.1754 | LR: 1.00e-05
|
| 143 |
+
[2026-04-25 18:11:25] Epoch 1 | Step 1230 | Loss: 1.1748 | LR: 1.00e-05
|
| 144 |
+
[2026-04-25 18:11:28] Epoch 1 | Step 1240 | Loss: 1.1738 | LR: 1.00e-05
|
| 145 |
+
[2026-04-25 18:11:30] Epoch 1 | Step 1250 | Loss: 1.1717 | LR: 1.00e-05
|
| 146 |
+
[2026-04-25 18:11:33] Epoch 1 | Step 1260 | Loss: 1.1706 | LR: 1.00e-05
|
| 147 |
+
[2026-04-25 18:11:35] Epoch 1 | Step 1270 | Loss: 1.1681 | LR: 1.00e-05
|
| 148 |
+
[2026-04-25 18:11:37] Epoch 1 | Step 1280 | Loss: 1.1678 | LR: 1.00e-05
|
| 149 |
+
[2026-04-25 18:11:40] Epoch 1 | Step 1290 | Loss: 1.1673 | LR: 1.00e-05
|
| 150 |
+
[2026-04-25 18:11:42] Epoch 1 | Step 1300 | Loss: 1.1655 | LR: 1.00e-05
|
| 151 |
+
[2026-04-25 18:11:45] Epoch 1 | Step 1310 | Loss: 1.1645 | LR: 1.00e-05
|
| 152 |
+
[2026-04-25 18:11:47] Epoch 1 | Step 1320 | Loss: 1.1639 | LR: 1.00e-05
|
| 153 |
+
[2026-04-25 18:11:50] Epoch 1 | Step 1330 | Loss: 1.1622 | LR: 1.00e-05
|
| 154 |
+
[2026-04-25 18:11:52] Epoch 1 | Step 1340 | Loss: 1.1614 | LR: 1.00e-05
|
| 155 |
+
[2026-04-25 18:11:55] Epoch 1 | Step 1350 | Loss: 1.1612 | LR: 1.00e-05
|
| 156 |
+
[2026-04-25 18:11:57] Epoch 1 | Step 1360 | Loss: 1.1602 | LR: 1.00e-05
|
| 157 |
+
[2026-04-25 18:12:00] Epoch 1 | Step 1370 | Loss: 1.1591 | LR: 1.00e-05
|
| 158 |
+
[2026-04-25 18:12:03] Epoch 1 | Step 1380 | Loss: 1.1592 | LR: 1.00e-05
|
| 159 |
+
[2026-04-25 18:12:05] Epoch 1 | Step 1390 | Loss: 1.1589 | LR: 1.00e-05
|
| 160 |
+
[2026-04-25 18:12:08] Epoch 1 | Step 1400 | Loss: 1.1582 | LR: 1.00e-05
|
| 161 |
+
[2026-04-25 18:12:10] Epoch 1 | Step 1410 | Loss: 1.1563 | LR: 1.00e-05
|
| 162 |
+
[2026-04-25 18:12:13] Epoch 1 | Step 1420 | Loss: 1.1555 | LR: 1.00e-05
|
| 163 |
+
[2026-04-25 18:12:15] Epoch 1 | Step 1430 | Loss: 1.1545 | LR: 1.00e-05
|
| 164 |
+
[2026-04-25 18:12:18] Epoch 1 | Step 1440 | Loss: 1.1532 | LR: 1.00e-05
|
| 165 |
+
[2026-04-25 18:12:20] Epoch 1 | Step 1450 | Loss: 1.1522 | LR: 1.00e-05
|
| 166 |
+
[2026-04-25 18:12:23] Epoch 1 | Step 1460 | Loss: 1.1504 | LR: 1.00e-05
|
| 167 |
+
[2026-04-25 18:12:25] Epoch 1 | Step 1470 | Loss: 1.1500 | LR: 1.00e-05
|
| 168 |
+
[2026-04-25 18:12:28] Epoch 1 | Step 1480 | Loss: 1.1496 | LR: 1.00e-05
|
| 169 |
+
[2026-04-25 18:12:30] Epoch 1 | Step 1490 | Loss: 1.1492 | LR: 1.00e-05
|
| 170 |
+
[2026-04-25 18:12:33] Epoch 1 | Step 1500 | Loss: 1.1478 | LR: 1.00e-05
|
| 171 |
+
[2026-04-25 18:12:36] Epoch 1 | Step 1510 | Loss: 1.1475 | LR: 1.00e-05
|
| 172 |
+
[2026-04-25 18:12:38] Epoch 1 | Step 1520 | Loss: 1.1470 | LR: 1.00e-05
|
| 173 |
+
[2026-04-25 18:12:40] Epoch 1 | Step 1530 | Loss: 1.1461 | LR: 1.00e-05
|
| 174 |
+
[2026-04-25 18:12:43] Epoch 1 | Step 1540 | Loss: 1.1459 | LR: 1.00e-05
|
| 175 |
+
[2026-04-25 18:12:45] Epoch 1 | Step 1550 | Loss: 1.1454 | LR: 1.00e-05
|
| 176 |
+
[2026-04-25 18:12:48] Epoch 1 | Step 1560 | Loss: 1.1443 | LR: 1.00e-05
|
| 177 |
+
[2026-04-25 18:12:50] Epoch 1 | Step 1570 | Loss: 1.1445 | LR: 1.00e-05
|
| 178 |
+
[2026-04-25 18:12:53] Epoch 1 | Step 1580 | Loss: 1.1432 | LR: 1.00e-05
|
| 179 |
+
[2026-04-25 18:12:55] Epoch 1 | Step 1590 | Loss: 1.1427 | LR: 1.00e-05
|
| 180 |
+
[2026-04-25 18:12:58] Epoch 1 | Step 1600 | Loss: 1.1422 | LR: 1.00e-05
|
| 181 |
+
[2026-04-25 18:13:00] Epoch 1 | Step 1610 | Loss: 1.1405 | LR: 1.00e-05
|
| 182 |
+
[2026-04-25 18:13:02] Epoch 1 | Step 1620 | Loss: 1.1392 | LR: 1.00e-05
|
| 183 |
+
[2026-04-25 18:13:05] Epoch 1 | Step 1630 | Loss: 1.1392 | LR: 1.00e-05
|
| 184 |
+
[2026-04-25 18:13:07] Epoch 1 | Step 1640 | Loss: 1.1384 | LR: 1.00e-05
|
| 185 |
+
[2026-04-25 18:13:10] Epoch 1 | Step 1650 | Loss: 1.1373 | LR: 1.00e-05
|
| 186 |
+
[2026-04-25 18:13:12] Epoch 1 | Step 1660 | Loss: 1.1363 | LR: 1.00e-05
|
| 187 |
+
[2026-04-25 18:13:14] Epoch 1 | Step 1670 | Loss: 1.1363 | LR: 1.00e-05
|
| 188 |
+
[2026-04-25 18:13:17] Epoch 1 | Step 1680 | Loss: 1.1360 | LR: 1.00e-05
|
| 189 |
+
[2026-04-25 18:13:20] Epoch 1 | Step 1690 | Loss: 1.1352 | LR: 1.00e-05
|
| 190 |
+
[2026-04-25 18:13:22] Epoch 1 | Step 1700 | Loss: 1.1334 | LR: 1.00e-05
|
| 191 |
+
[2026-04-25 18:13:25] Epoch 1 | Step 1710 | Loss: 1.1326 | LR: 1.00e-05
|
| 192 |
+
[2026-04-25 18:13:27] Epoch 1 | Step 1720 | Loss: 1.1316 | LR: 1.00e-05
|
| 193 |
+
[2026-04-25 18:13:29] Epoch 1 | Step 1730 | Loss: 1.1309 | LR: 1.00e-05
|
| 194 |
+
[2026-04-25 18:13:32] Epoch 1 | Step 1740 | Loss: 1.1307 | LR: 1.00e-05
|
| 195 |
+
[2026-04-25 18:13:34] Epoch 1 | Step 1750 | Loss: 1.1312 | LR: 1.00e-05
|
| 196 |
+
[2026-04-25 18:13:37] Epoch 1 | Step 1760 | Loss: 1.1301 | LR: 1.00e-05
|
| 197 |
+
[2026-04-25 18:13:39] Epoch 1 | Step 1770 | Loss: 1.1300 | LR: 1.00e-05
|
| 198 |
+
[2026-04-25 18:13:42] Epoch 1 | Step 1780 | Loss: 1.1292 | LR: 1.00e-05
|
| 199 |
+
[2026-04-25 18:13:44] Epoch 1 | Step 1790 | Loss: 1.1289 | LR: 1.00e-05
|
| 200 |
+
[2026-04-25 18:13:47] Epoch 1 | Step 1800 | Loss: 1.1278 | LR: 1.00e-05
|
| 201 |
+
[2026-04-25 18:13:50] Epoch 1 | Step 1810 | Loss: 1.1274 | LR: 1.00e-05
|
| 202 |
+
[2026-04-25 18:13:52] Epoch 1 | Step 1820 | Loss: 1.1276 | LR: 1.00e-05
|
| 203 |
+
[2026-04-25 18:13:55] Epoch 1 | Step 1830 | Loss: 1.1273 | LR: 1.00e-05
|
| 204 |
+
[2026-04-25 18:13:57] Epoch 1 | Step 1840 | Loss: 1.1271 | LR: 1.00e-05
|
| 205 |
+
[2026-04-25 18:14:00] Epoch 1 | Step 1850 | Loss: 1.1263 | LR: 1.00e-05
|
| 206 |
+
[2026-04-25 18:14:03] Epoch 1 | Step 1860 | Loss: 1.1259 | LR: 1.00e-05
|
| 207 |
+
[2026-04-25 18:14:05] Epoch 1 | Step 1870 | Loss: 1.1252 | LR: 1.00e-05
|
| 208 |
+
[2026-04-25 18:14:08] Epoch 1 | Step 1880 | Loss: 1.1244 | LR: 1.00e-05
|
| 209 |
+
[2026-04-25 18:14:10] Epoch 1 | Step 1890 | Loss: 1.1244 | LR: 1.00e-05
|
| 210 |
+
[2026-04-25 18:14:13] Epoch 1 | Step 1900 | Loss: 1.1237 | LR: 1.00e-05
|
| 211 |
+
[2026-04-25 18:14:15] Epoch 1 | Step 1910 | Loss: 1.1235 | LR: 1.00e-05
|
| 212 |
+
[2026-04-25 18:14:18] Epoch 1 | Step 1920 | Loss: 1.1235 | LR: 1.00e-05
|
| 213 |
+
[2026-04-25 18:14:20] Epoch 1 | Step 1930 | Loss: 1.1229 | LR: 1.00e-05
|
| 214 |
+
[2026-04-25 18:14:23] Epoch 1 | Step 1940 | Loss: 1.1220 | LR: 1.00e-05
|
| 215 |
+
[2026-04-25 18:14:25] Epoch 1 | Step 1950 | Loss: 1.1213 | LR: 1.00e-05
|
| 216 |
+
[2026-04-25 18:14:28] Epoch 1 | Step 1960 | Loss: 1.1209 | LR: 1.00e-05
|
| 217 |
+
[2026-04-25 18:14:30] Epoch 1 | Step 1970 | Loss: 1.1207 | LR: 1.00e-05
|
| 218 |
+
[2026-04-25 18:14:33] Epoch 1 | Step 1980 | Loss: 1.1208 | LR: 1.00e-05
|
| 219 |
+
[2026-04-25 18:14:35] Epoch 1 | Step 1990 | Loss: 1.1205 | LR: 1.00e-05
|
| 220 |
+
[2026-04-25 18:14:37] Epoch 1 | Step 2000 | Loss: 1.1201 | LR: 1.00e-05
|
| 221 |
+
[2026-04-25 18:14:38] Validation | Batch 10/84 | Loss: 1.0011
|
| 222 |
+
[2026-04-25 18:14:38] Validation | Batch 20/84 | Loss: 1.0029
|
| 223 |
+
[2026-04-25 18:14:39] Validation | Batch 30/84 | Loss: 1.0825
|
| 224 |
+
[2026-04-25 18:14:39] Validation | Batch 40/84 | Loss: 1.0824
|
| 225 |
+
[2026-04-25 18:14:39] Validation | Batch 50/84 | Loss: 1.0856
|
| 226 |
+
[2026-04-25 18:14:40] Validation | Batch 60/84 | Loss: 1.0582
|
| 227 |
+
[2026-04-25 18:14:41] Validation | Batch 70/84 | Loss: 1.0398
|
| 228 |
+
[2026-04-25 18:14:41] Validation | Batch 80/84 | Loss: 1.0455
|
| 229 |
+
[2026-04-25 18:14:41] Validation | Batch 84/84 | Loss: 1.0389
|
| 230 |
+
[2026-04-25 18:14:42] Validation | Loss: 1.0389 | PPL: 2.90 | Time: 3.90s
|
| 231 |
+
[2026-04-25 18:14:44] New best model saved! Val loss: 1.0389
|
| 232 |
+
[2026-04-25 18:14:46] Epoch 1 | Step 2010 | Loss: 1.1197 | LR: 1.00e-05
|
| 233 |
+
[2026-04-25 18:14:49] Epoch 1 | Step 2020 | Loss: 1.1193 | LR: 1.00e-05
|
| 234 |
+
[2026-04-25 18:14:51] Epoch 1 | Step 2030 | Loss: 1.1193 | LR: 1.00e-05
|
| 235 |
+
[2026-04-25 18:14:54] Epoch 1 | Step 2040 | Loss: 1.1188 | LR: 1.00e-05
|
| 236 |
+
[2026-04-25 18:14:56] Epoch 1 | Step 2050 | Loss: 1.1185 | LR: 1.00e-05
|
| 237 |
+
[2026-04-25 18:14:59] Epoch 1 | Step 2060 | Loss: 1.1177 | LR: 1.00e-05
|
| 238 |
+
[2026-04-25 18:15:02] Epoch 1 | Step 2070 | Loss: 1.1164 | LR: 1.00e-05
|
| 239 |
+
[2026-04-25 18:15:04] Epoch 1 | Step 2080 | Loss: 1.1156 | LR: 1.00e-05
|
| 240 |
+
[2026-04-25 18:15:07] Epoch 1 | Step 2090 | Loss: 1.1156 | LR: 1.00e-05
|
| 241 |
+
[2026-04-25 18:15:09] Epoch 1 | Step 2100 | Loss: 1.1154 | LR: 1.00e-05
|
| 242 |
+
[2026-04-25 18:15:13] Epoch 1 | Step 2110 | Loss: 1.1151 | LR: 1.00e-05
|
| 243 |
+
[2026-04-25 18:15:15] Epoch 1 | Step 2120 | Loss: 1.1144 | LR: 1.00e-05
|
| 244 |
+
[2026-04-25 18:15:18] Epoch 1 | Step 2130 | Loss: 1.1142 | LR: 1.00e-05
|
| 245 |
+
[2026-04-25 18:15:20] Epoch 1 | Step 2140 | Loss: 1.1136 | LR: 1.00e-05
|
| 246 |
+
[2026-04-25 18:15:23] Epoch 1 | Step 2150 | Loss: 1.1132 | LR: 1.00e-05
|
| 247 |
+
[2026-04-25 18:15:25] Epoch 1 | Step 2160 | Loss: 1.1132 | LR: 1.00e-05
|
| 248 |
+
[2026-04-25 18:15:27] Epoch 1 | Step 2170 | Loss: 1.1126 | LR: 1.00e-05
|
| 249 |
+
[2026-04-25 18:15:30] Epoch 1 | Step 2180 | Loss: 1.1119 | LR: 1.00e-05
|
| 250 |
+
[2026-04-25 18:15:32] Epoch 1 | Step 2190 | Loss: 1.1118 | LR: 1.00e-05
|
| 251 |
+
[2026-04-25 18:15:35] Epoch 1 | Step 2200 | Loss: 1.1113 | LR: 1.00e-05
|
| 252 |
+
[2026-04-25 18:15:37] Epoch 1 | Step 2210 | Loss: 1.1108 | LR: 1.00e-05
|
| 253 |
+
[2026-04-25 18:15:39] Epoch 1 | Step 2220 | Loss: 1.1111 | LR: 1.00e-05
|
| 254 |
+
[2026-04-25 18:15:42] Epoch 1 | Step 2230 | Loss: 1.1115 | LR: 1.00e-05
|
| 255 |
+
[2026-04-25 18:15:44] Epoch 1 | Step 2240 | Loss: 1.1118 | LR: 1.00e-05
|
| 256 |
+
[2026-04-25 18:15:47] Epoch 1 | Step 2250 | Loss: 1.1120 | LR: 1.00e-05
|
| 257 |
+
[2026-04-25 18:15:49] Epoch 1 | Step 2260 | Loss: 1.1115 | LR: 1.00e-05
|
| 258 |
+
[2026-04-25 18:15:52] Epoch 1 | Step 2270 | Loss: 1.1114 | LR: 1.00e-05
|
| 259 |
+
[2026-04-25 18:15:54] Epoch 1 | Step 2280 | Loss: 1.1113 | LR: 1.00e-05
|
| 260 |
+
[2026-04-25 18:15:57] Epoch 1 | Step 2290 | Loss: 1.1118 | LR: 1.00e-05
|
| 261 |
+
[2026-04-25 18:15:59] Epoch 1 | Step 2300 | Loss: 1.1116 | LR: 1.00e-05
|
| 262 |
+
[2026-04-25 18:16:02] Epoch 1 | Step 2310 | Loss: 1.1111 | LR: 1.00e-05
|
| 263 |
+
[2026-04-25 18:16:04] Epoch 1 | Step 2320 | Loss: 1.1110 | LR: 1.00e-05
|
| 264 |
+
[2026-04-25 18:16:06] Epoch 1 | Step 2330 | Loss: 1.1106 | LR: 1.00e-05
|
| 265 |
+
[2026-04-25 18:16:09] Epoch 1 | Step 2340 | Loss: 1.1101 | LR: 1.00e-05
|
| 266 |
+
[2026-04-25 18:16:11] Epoch 1 | Step 2350 | Loss: 1.1095 | LR: 1.00e-05
|
| 267 |
+
[2026-04-25 18:16:14] Epoch 1 | Step 2360 | Loss: 1.1094 | LR: 1.00e-05
|
| 268 |
+
[2026-04-25 18:16:16] Epoch 1 | Step 2370 | Loss: 1.1091 | LR: 1.00e-05
|
| 269 |
+
[2026-04-25 18:16:19] Epoch 1 | Step 2380 | Loss: 1.1084 | LR: 1.00e-05
|
| 270 |
+
[2026-04-25 18:16:21] Epoch 1 | Step 2390 | Loss: 1.1084 | LR: 1.00e-05
|
| 271 |
+
[2026-04-25 18:16:24] Epoch 1 | Step 2400 | Loss: 1.1077 | LR: 1.00e-05
|
| 272 |
+
[2026-04-25 18:16:26] Epoch 1 | Step 2410 | Loss: 1.1077 | LR: 1.00e-05
|
| 273 |
+
[2026-04-25 18:16:29] Epoch 1 | Step 2420 | Loss: 1.1074 | LR: 1.00e-05
|
| 274 |
+
[2026-04-25 18:16:31] Epoch 1 | Step 2430 | Loss: 1.1074 | LR: 1.00e-05
|
| 275 |
+
[2026-04-25 18:16:34] Epoch 1 | Step 2440 | Loss: 1.1069 | LR: 1.00e-05
|
| 276 |
+
[2026-04-25 18:16:36] Epoch 1 | Step 2450 | Loss: 1.1064 | LR: 1.00e-05
|
| 277 |
+
[2026-04-25 18:16:38] Epoch 1 | Step 2460 | Loss: 1.1061 | LR: 1.00e-05
|
| 278 |
+
[2026-04-25 18:16:41] Epoch 1 | Step 2470 | Loss: 1.1060 | LR: 1.00e-05
|
| 279 |
+
[2026-04-25 18:16:44] Epoch 1 | Step 2480 | Loss: 1.1058 | LR: 1.00e-05
|
| 280 |
+
[2026-04-25 18:16:46] Epoch 1 | Step 2490 | Loss: 1.1053 | LR: 1.00e-05
|
| 281 |
+
[2026-04-25 18:16:49] Epoch 1 | Step 2500 | Loss: 1.1047 | LR: 1.00e-05
|
| 282 |
+
[2026-04-25 18:16:51] Epoch 1 | Step 2510 | Loss: 1.1047 | LR: 1.00e-05
|
| 283 |
+
[2026-04-25 18:16:54] Epoch 1 | Step 2520 | Loss: 1.1038 | LR: 1.00e-05
|
| 284 |
+
[2026-04-25 18:16:56] Epoch 1 | Step 2530 | Loss: 1.1033 | LR: 1.00e-05
|
| 285 |
+
[2026-04-25 18:16:59] Epoch 1 | Step 2540 | Loss: 1.1029 | LR: 1.00e-05
|
| 286 |
+
[2026-04-25 18:17:01] Epoch 1 | Step 2550 | Loss: 1.1021 | LR: 1.00e-05
|
| 287 |
+
[2026-04-25 18:17:04] Epoch 1 | Step 2560 | Loss: 1.1019 | LR: 1.00e-05
|
| 288 |
+
[2026-04-25 18:17:06] Epoch 1 | Step 2570 | Loss: 1.1021 | LR: 1.00e-05
|
| 289 |
+
[2026-04-25 18:17:09] Epoch 1 | Step 2580 | Loss: 1.1022 | LR: 1.00e-05
|
| 290 |
+
[2026-04-25 18:17:12] Epoch 1 | Step 2590 | Loss: 1.1021 | LR: 1.00e-05
|
| 291 |
+
[2026-04-25 18:17:14] Epoch 1 | Step 2600 | Loss: 1.1020 | LR: 1.00e-05
|
| 292 |
+
[2026-04-25 18:17:16] Epoch 1 | Step 2610 | Loss: 1.1017 | LR: 1.00e-05
|
| 293 |
+
[2026-04-25 18:17:19] Epoch 1 | Step 2620 | Loss: 1.1011 | LR: 1.00e-05
|
| 294 |
+
[2026-04-25 18:17:21] Epoch 1 | Step 2630 | Loss: 1.1006 | LR: 1.00e-05
|
| 295 |
+
[2026-04-25 18:17:24] Epoch 1 | Step 2640 | Loss: 1.1005 | LR: 1.00e-05
|
| 296 |
+
[2026-04-25 18:17:26] Epoch 1 | Step 2650 | Loss: 1.1000 | LR: 1.00e-05
|
| 297 |
+
[2026-04-25 18:17:29] Epoch 1 | Step 2660 | Loss: 1.1000 | LR: 1.00e-05
|
| 298 |
+
[2026-04-25 18:17:31] Epoch 1 | Step 2670 | Loss: 1.0996 | LR: 1.00e-05
|
| 299 |
+
[2026-04-25 18:17:34] Epoch 1 | Step 2680 | Loss: 1.0992 | LR: 1.00e-05
|
| 300 |
+
[2026-04-25 18:17:36] Epoch 1 | Step 2690 | Loss: 1.0990 | LR: 1.00e-05
|
| 301 |
+
[2026-04-25 18:17:39] Epoch 1 | Step 2700 | Loss: 1.0984 | LR: 1.00e-05
|
| 302 |
+
[2026-04-25 18:17:41] Epoch 1 | Step 2710 | Loss: 1.0977 | LR: 1.00e-05
|
| 303 |
+
[2026-04-25 18:17:44] Epoch 1 | Step 2720 | Loss: 1.0977 | LR: 1.00e-05
|
| 304 |
+
[2026-04-25 18:17:46] Epoch 1 | Step 2730 | Loss: 1.0973 | LR: 1.00e-05
|
| 305 |
+
[2026-04-25 18:17:49] Epoch 1 | Step 2740 | Loss: 1.0976 | LR: 1.00e-05
|
| 306 |
+
[2026-04-25 18:17:51] Epoch 1 | Step 2750 | Loss: 1.0975 | LR: 1.00e-05
|
| 307 |
+
[2026-04-25 18:17:54] Epoch 1 | Step 2760 | Loss: 1.0970 | LR: 1.00e-05
|
| 308 |
+
[2026-04-25 18:17:56] Epoch 1 | Step 2770 | Loss: 1.0967 | LR: 1.00e-05
|
| 309 |
+
[2026-04-25 18:17:59] Epoch 1 | Step 2780 | Loss: 1.0968 | LR: 1.00e-05
|
| 310 |
+
[2026-04-25 18:18:01] Epoch 1 | Step 2790 | Loss: 1.0965 | LR: 1.00e-05
|
| 311 |
+
[2026-04-25 18:18:03] Epoch 1 | Step 2800 | Loss: 1.0961 | LR: 1.00e-05
|
| 312 |
+
[2026-04-25 18:18:06] Epoch 1 | Step 2810 | Loss: 1.0960 | LR: 1.00e-05
|
| 313 |
+
[2026-04-25 18:18:09] Epoch 1 | Step 2820 | Loss: 1.0957 | LR: 1.00e-05
|
| 314 |
+
[2026-04-25 18:18:11] Epoch 1 | Step 2830 | Loss: 1.0953 | LR: 1.00e-05
|
| 315 |
+
[2026-04-25 18:18:13] Epoch 1 | Step 2840 | Loss: 1.0958 | LR: 1.00e-05
|
| 316 |
+
[2026-04-25 18:18:16] Epoch 1 | Step 2850 | Loss: 1.0956 | LR: 1.00e-05
|
| 317 |
+
[2026-04-25 18:18:18] Epoch 1 | Step 2860 | Loss: 1.0953 | LR: 1.00e-05
|
| 318 |
+
[2026-04-25 18:18:21] Epoch 1 | Step 2870 | Loss: 1.0951 | LR: 1.00e-05
|
| 319 |
+
[2026-04-25 18:18:23] Epoch 1 | Step 2880 | Loss: 1.0947 | LR: 1.00e-05
|
| 320 |
+
[2026-04-25 18:18:26] Epoch 1 | Step 2890 | Loss: 1.0944 | LR: 1.00e-05
|
| 321 |
+
[2026-04-25 18:18:29] Epoch 1 | Step 2900 | Loss: 1.0939 | LR: 1.00e-05
|
| 322 |
+
[2026-04-25 18:18:31] Epoch 1 | Step 2910 | Loss: 1.0937 | LR: 1.00e-05
|
| 323 |
+
[2026-04-25 18:18:34] Epoch 1 | Step 2920 | Loss: 1.0938 | LR: 1.00e-05
|
| 324 |
+
[2026-04-25 18:18:37] Epoch 1 | Step 2930 | Loss: 1.0934 | LR: 1.00e-05
|
| 325 |
+
[2026-04-25 18:18:39] Epoch 1 | Step 2940 | Loss: 1.0929 | LR: 1.00e-05
|
| 326 |
+
[2026-04-25 18:18:41] Epoch 1 | Step 2950 | Loss: 1.0930 | LR: 1.00e-05
|
| 327 |
+
[2026-04-25 18:18:44] Epoch 1 | Step 2960 | Loss: 1.0929 | LR: 1.00e-05
|
| 328 |
+
[2026-04-25 18:18:47] Epoch 1 | Step 2970 | Loss: 1.0929 | LR: 1.00e-05
|
| 329 |
+
[2026-04-25 18:18:49] Epoch 1 | Step 2980 | Loss: 1.0925 | LR: 1.00e-05
|
| 330 |
+
[2026-04-25 18:18:52] Epoch 1 | Step 2990 | Loss: 1.0926 | LR: 1.00e-05
|
| 331 |
+
[2026-04-25 18:18:54] Epoch 1 | Step 3000 | Loss: 1.0924 | LR: 1.00e-05
|
| 332 |
+
[2026-04-25 18:18:57] Epoch 1 | Step 3010 | Loss: 1.0924 | LR: 1.00e-05
|
| 333 |
+
[2026-04-25 18:19:00] Epoch 1 | Step 3020 | Loss: 1.0920 | LR: 1.00e-05
|
| 334 |
+
[2026-04-25 18:19:02] Epoch 1 | Step 3030 | Loss: 1.0917 | LR: 1.00e-05
|
| 335 |
+
[2026-04-25 18:19:05] Epoch 1 | Step 3040 | Loss: 1.0911 | LR: 1.00e-05
|
| 336 |
+
[2026-04-25 18:19:07] Epoch 1 | Step 3050 | Loss: 1.0905 | LR: 1.00e-05
|
| 337 |
+
[2026-04-25 18:19:09] Epoch 1 | Step 3060 | Loss: 1.0902 | LR: 1.00e-05
|
| 338 |
+
[2026-04-25 18:19:12] Epoch 1 | Step 3070 | Loss: 1.0898 | LR: 1.00e-05
|
| 339 |
+
[2026-04-25 18:19:15] Epoch 1 | Step 3080 | Loss: 1.0898 | LR: 1.00e-05
|
| 340 |
+
[2026-04-25 18:19:17] Epoch 1 | Step 3090 | Loss: 1.0893 | LR: 1.00e-05
|
| 341 |
+
[2026-04-25 18:19:19] Epoch 1 | Step 3100 | Loss: 1.0890 | LR: 1.00e-05
|
| 342 |
+
[2026-04-25 18:19:22] Epoch 1 | Step 3110 | Loss: 1.0886 | LR: 1.00e-05
|
| 343 |
+
[2026-04-25 18:19:24] Epoch 1 | Step 3120 | Loss: 1.0890 | LR: 1.00e-05
|
| 344 |
+
[2026-04-25 18:19:27] Epoch 1 | Step 3130 | Loss: 1.0885 | LR: 1.00e-05
|
| 345 |
+
[2026-04-25 18:19:29] Epoch 1 | Step 3140 | Loss: 1.0885 | LR: 1.00e-05
|
| 346 |
+
[2026-04-25 18:19:32] Epoch 1 | Step 3150 | Loss: 1.0886 | LR: 1.00e-05
|
| 347 |
+
[2026-04-25 18:19:35] Epoch 1 | Step 3160 | Loss: 1.0886 | LR: 1.00e-05
|
| 348 |
+
[2026-04-25 18:19:37] Epoch 1 | Step 3170 | Loss: 1.0883 | LR: 1.00e-05
|
| 349 |
+
[2026-04-25 18:19:40] Epoch 1 | Step 3180 | Loss: 1.0883 | LR: 1.00e-05
|
| 350 |
+
[2026-04-25 18:19:42] Epoch 1 | Step 3190 | Loss: 1.0877 | LR: 1.00e-05
|
| 351 |
+
[2026-04-25 18:19:44] Epoch 1 | Step 3200 | Loss: 1.0874 | LR: 1.00e-05
|
| 352 |
+
[2026-04-25 18:19:47] Epoch 1 | Step 3210 | Loss: 1.0870 | LR: 1.00e-05
|
| 353 |
+
[2026-04-25 18:19:49] Epoch 1 | Step 3220 | Loss: 1.0865 | LR: 1.00e-05
|
| 354 |
+
[2026-04-25 18:19:52] Epoch 1 | Step 3230 | Loss: 1.0868 | LR: 1.00e-05
|
| 355 |
+
[2026-04-25 18:19:54] Epoch 1 | Step 3240 | Loss: 1.0866 | LR: 1.00e-05
|
| 356 |
+
[2026-04-25 18:19:57] Epoch 1 | Step 3250 | Loss: 1.0866 | LR: 1.00e-05
|
| 357 |
+
[2026-04-25 18:19:59] Epoch 1 | Step 3260 | Loss: 1.0862 | LR: 1.00e-05
|
| 358 |
+
[2026-04-25 18:20:01] Epoch 1 | Step 3270 | Loss: 1.0860 | LR: 1.00e-05
|
| 359 |
+
[2026-04-25 18:20:04] Epoch 1 | Step 3280 | Loss: 1.0854 | LR: 1.00e-05
|
| 360 |
+
[2026-04-25 18:20:07] Epoch 1 | Step 3290 | Loss: 1.0851 | LR: 1.00e-05
|
| 361 |
+
[2026-04-25 18:20:09] Epoch 1 | Step 3300 | Loss: 1.0850 | LR: 1.00e-05
|
| 362 |
+
[2026-04-25 18:20:12] Epoch 1 | Step 3310 | Loss: 1.0847 | LR: 1.00e-05
|
| 363 |
+
[2026-04-25 18:20:14] Epoch 1 | Step 3320 | Loss: 1.0845 | LR: 1.00e-05
|
| 364 |
+
[2026-04-25 18:20:17] Epoch 1 | Step 3330 | Loss: 1.0843 | LR: 1.00e-05
|
| 365 |
+
[2026-04-25 18:20:20] Epoch 1 | Step 3340 | Loss: 1.0843 | LR: 1.00e-05
|
| 366 |
+
[2026-04-25 18:20:22] Epoch 1 | Step 3350 | Loss: 1.0838 | LR: 1.00e-05
|
| 367 |
+
[2026-04-25 18:20:24] Epoch 1 | Step 3360 | Loss: 1.0836 | LR: 1.00e-05
|
| 368 |
+
[2026-04-25 18:20:27] Epoch 1 | Step 3370 | Loss: 1.0836 | LR: 1.00e-05
|
| 369 |
+
[2026-04-25 18:20:29] Epoch 1 | Step 3380 | Loss: 1.0831 | LR: 1.00e-05
|
| 370 |
+
[2026-04-25 18:20:32] Epoch 1 | Step 3390 | Loss: 1.0832 | LR: 1.00e-05
|
| 371 |
+
[2026-04-25 18:20:35] Epoch 1 | Step 3400 | Loss: 1.0835 | LR: 1.00e-05
|
| 372 |
+
[2026-04-25 18:20:38] Epoch 1 | Step 3410 | Loss: 1.0832 | LR: 1.00e-05
|
| 373 |
+
[2026-04-25 18:20:40] Epoch 1 | Step 3420 | Loss: 1.0828 | LR: 1.00e-05
|
| 374 |
+
[2026-04-25 18:20:43] Epoch 1 | Step 3430 | Loss: 1.0827 | LR: 1.00e-05
|
| 375 |
+
[2026-04-25 18:20:45] Epoch 1 | Step 3440 | Loss: 1.0828 | LR: 1.00e-05
|
| 376 |
+
[2026-04-25 18:20:48] Epoch 1 | Step 3450 | Loss: 1.0825 | LR: 1.00e-05
|
| 377 |
+
[2026-04-25 18:20:50] Epoch 1 | Step 3460 | Loss: 1.0823 | LR: 1.00e-05
|
| 378 |
+
[2026-04-25 18:20:53] Epoch 1 | Step 3470 | Loss: 1.0822 | LR: 1.00e-05
|
| 379 |
+
[2026-04-25 18:20:55] Epoch 1 | Step 3480 | Loss: 1.0820 | LR: 1.00e-05
|
| 380 |
+
[2026-04-25 18:20:57] Epoch 1 | Step 3490 | Loss: 1.0818 | LR: 1.00e-05
|
| 381 |
+
[2026-04-25 18:21:00] Epoch 1 | Step 3500 | Loss: 1.0814 | LR: 1.00e-05
|
| 382 |
+
[2026-04-25 18:21:03] Epoch 1 | Step 3510 | Loss: 1.0815 | LR: 1.00e-05
|
| 383 |
+
[2026-04-25 18:21:05] Epoch 1 | Step 3520 | Loss: 1.0811 | LR: 1.00e-05
|
| 384 |
+
[2026-04-25 18:21:08] Epoch 1 | Step 3530 | Loss: 1.0812 | LR: 1.00e-05
|
| 385 |
+
[2026-04-25 18:21:10] Epoch 1 | Step 3540 | Loss: 1.0808 | LR: 1.00e-05
|
| 386 |
+
[2026-04-25 18:21:13] Epoch 1 | Step 3550 | Loss: 1.0806 | LR: 1.00e-05
|
| 387 |
+
[2026-04-25 18:21:16] Epoch 1 | Step 3560 | Loss: 1.0806 | LR: 1.00e-05
|
| 388 |
+
[2026-04-25 18:21:18] Epoch 1 | Step 3570 | Loss: 1.0804 | LR: 1.00e-05
|
| 389 |
+
[2026-04-25 18:21:21] Epoch 1 | Step 3580 | Loss: 1.0803 | LR: 1.00e-05
|
| 390 |
+
[2026-04-25 18:21:23] Epoch 1 | Step 3590 | Loss: 1.0801 | LR: 1.00e-05
|
| 391 |
+
[2026-04-25 18:21:25] Epoch 1 | Step 3600 | Loss: 1.0797 | LR: 1.00e-05
|
| 392 |
+
[2026-04-25 18:21:28] Epoch 1 | Step 3610 | Loss: 1.0794 | LR: 1.00e-05
|
| 393 |
+
[2026-04-25 18:21:30] Epoch 1 | Step 3620 | Loss: 1.0792 | LR: 1.00e-05
|
| 394 |
+
[2026-04-25 18:21:33] Epoch 1 | Step 3630 | Loss: 1.0794 | LR: 1.00e-05
|
| 395 |
+
[2026-04-25 18:21:35] Epoch 1 | Step 3640 | Loss: 1.0795 | LR: 1.00e-05
|
| 396 |
+
[2026-04-25 18:21:38] Epoch 1 | Step 3650 | Loss: 1.0795 | LR: 1.00e-05
|
| 397 |
+
[2026-04-25 18:21:40] Epoch 1 | Step 3660 | Loss: 1.0793 | LR: 1.00e-05
|
| 398 |
+
[2026-04-25 18:21:43] Epoch 1 | Step 3670 | Loss: 1.0789 | LR: 1.00e-05
|
| 399 |
+
[2026-04-25 18:21:45] Epoch 1 | Step 3680 | Loss: 1.0788 | LR: 1.00e-05
|
| 400 |
+
[2026-04-25 18:21:48] Epoch 1 | Step 3690 | Loss: 1.0786 | LR: 1.00e-05
|
| 401 |
+
[2026-04-25 18:21:50] Epoch 1 | Step 3700 | Loss: 1.0782 | LR: 1.00e-05
|
| 402 |
+
[2026-04-25 18:21:53] Epoch 1 | Step 3710 | Loss: 1.0779 | LR: 1.00e-05
|
| 403 |
+
[2026-04-25 18:21:55] Epoch 1 | Step 3720 | Loss: 1.0778 | LR: 1.00e-05
|
| 404 |
+
[2026-04-25 18:21:58] Epoch 1 | Step 3730 | Loss: 1.0778 | LR: 1.00e-05
|
| 405 |
+
[2026-04-25 18:22:00] Epoch 1 | Step 3740 | Loss: 1.0779 | LR: 1.00e-05
|
| 406 |
+
[2026-04-25 18:22:03] Epoch 1 | Step 3750 | Loss: 1.0776 | LR: 1.00e-05
|
| 407 |
+
[2026-04-25 18:22:06] Epoch 1 | Step 3760 | Loss: 1.0776 | LR: 1.00e-05
|
| 408 |
+
[2026-04-25 18:22:08] Epoch 1 | Step 3770 | Loss: 1.0776 | LR: 1.00e-05
|
| 409 |
+
[2026-04-25 18:22:10] Epoch 1 | Step 3780 | Loss: 1.0775 | LR: 1.00e-05
|
| 410 |
+
[2026-04-25 18:22:13] Epoch 1 | Step 3790 | Loss: 1.0775 | LR: 1.00e-05
|
| 411 |
+
[2026-04-25 18:22:15] Epoch 1 | Step 3800 | Loss: 1.0776 | LR: 1.00e-05
|
| 412 |
+
[2026-04-25 18:22:18] Epoch 1 | Step 3810 | Loss: 1.0770 | LR: 1.00e-05
|
| 413 |
+
[2026-04-25 18:22:20] Epoch 1 | Step 3820 | Loss: 1.0767 | LR: 1.00e-05
|
| 414 |
+
[2026-04-25 18:22:23] Epoch 1 | Step 3830 | Loss: 1.0765 | LR: 1.00e-05
|
| 415 |
+
[2026-04-25 18:22:25] Epoch 1 | Step 3840 | Loss: 1.0764 | LR: 1.00e-05
|
| 416 |
+
[2026-04-25 18:22:28] Epoch 1 | Step 3850 | Loss: 1.0760 | LR: 1.00e-05
|
| 417 |
+
[2026-04-25 18:22:30] Epoch 1 | Step 3860 | Loss: 1.0758 | LR: 1.00e-05
|
| 418 |
+
[2026-04-25 18:22:33] Epoch 1 | Step 3870 | Loss: 1.0757 | LR: 1.00e-05
|
| 419 |
+
[2026-04-25 18:22:35] Epoch 1 | Step 3880 | Loss: 1.0751 | LR: 1.00e-05
|
| 420 |
+
[2026-04-25 18:22:38] Epoch 1 | Step 3890 | Loss: 1.0748 | LR: 1.00e-05
|
| 421 |
+
[2026-04-25 18:22:40] Epoch 1 | Step 3900 | Loss: 1.0747 | LR: 1.00e-05
|
| 422 |
+
[2026-04-25 18:22:43] Epoch 1 | Step 3910 | Loss: 1.0750 | LR: 1.00e-05
|
| 423 |
+
[2026-04-25 18:22:45] Epoch 1 | Step 3920 | Loss: 1.0749 | LR: 1.00e-05
|
| 424 |
+
[2026-04-25 18:22:48] Epoch 1 | Step 3930 | Loss: 1.0747 | LR: 1.00e-05
|
| 425 |
+
[2026-04-25 18:22:51] Epoch 1 | Step 3940 | Loss: 1.0746 | LR: 1.00e-05
|
| 426 |
+
[2026-04-25 18:22:53] Epoch 1 | Step 3950 | Loss: 1.0743 | LR: 1.00e-05
|
| 427 |
+
[2026-04-25 18:22:56] Epoch 1 | Step 3960 | Loss: 1.0743 | LR: 1.00e-05
|
| 428 |
+
[2026-04-25 18:22:58] Epoch 1 | Step 3970 | Loss: 1.0740 | LR: 9.99e-06
|
| 429 |
+
[2026-04-25 18:23:01] Epoch 1 | Step 3980 | Loss: 1.0739 | LR: 9.99e-06
|
| 430 |
+
[2026-04-25 18:23:03] Epoch 1 | Step 3990 | Loss: 1.0735 | LR: 9.97e-06
|
| 431 |
+
[2026-04-25 18:23:06] Epoch 1 | Step 4000 | Loss: 1.0735 | LR: 9.95e-06
|
| 432 |
+
[2026-04-25 18:23:06] Validation | Batch 10/84 | Loss: 0.9900
|
| 433 |
+
[2026-04-25 18:23:07] Validation | Batch 20/84 | Loss: 0.9870
|
| 434 |
+
[2026-04-25 18:23:07] Validation | Batch 30/84 | Loss: 1.0641
|
| 435 |
+
[2026-04-25 18:23:08] Validation | Batch 40/84 | Loss: 1.0678
|
| 436 |
+
[2026-04-25 18:23:08] Validation | Batch 50/84 | Loss: 1.0680
|
| 437 |
+
[2026-04-25 18:23:08] Validation | Batch 60/84 | Loss: 1.0429
|
| 438 |
+
[2026-04-25 18:23:09] Validation | Batch 70/84 | Loss: 1.0235
|
| 439 |
+
[2026-04-25 18:23:09] Validation | Batch 80/84 | Loss: 1.0302
|
| 440 |
+
[2026-04-25 18:23:09] Validation | Batch 84/84 | Loss: 1.0235
|
| 441 |
+
[2026-04-25 18:23:10] Validation | Loss: 1.0235 | PPL: 2.85 | Time: 3.75s
|
| 442 |
+
[2026-04-25 18:23:12] New best model saved! Val loss: 1.0235
|
| 443 |
+
[2026-04-25 18:23:15] Epoch 1 | Step 4010 | Loss: 1.0733 | LR: 9.93e-06
|
| 444 |
+
[2026-04-25 18:23:17] Epoch 1 | Step 4020 | Loss: 1.0734 | LR: 9.90e-06
|
| 445 |
+
[2026-04-25 18:23:20] Epoch 1 | Step 4030 | Loss: 1.0730 | LR: 9.87e-06
|
| 446 |
+
[2026-04-25 18:23:22] Epoch 1 | Step 4040 | Loss: 1.0725 | LR: 9.84e-06
|
| 447 |
+
[2026-04-25 18:23:24] Epoch 1 | Step 4050 | Loss: 1.0722 | LR: 9.80e-06
|
| 448 |
+
[2026-04-25 18:23:27] Epoch 1 | Step 4060 | Loss: 1.0716 | LR: 9.75e-06
|
| 449 |
+
[2026-04-25 18:23:29] Epoch 1 | Step 4070 | Loss: 1.0715 | LR: 9.70e-06
|
| 450 |
+
[2026-04-25 18:23:32] Epoch 1 | Step 4080 | Loss: 1.0715 | LR: 9.65e-06
|
| 451 |
+
[2026-04-25 18:23:34] Epoch 1 | Step 4090 | Loss: 1.0715 | LR: 9.59e-06
|
| 452 |
+
[2026-04-25 18:23:37] Epoch 1 | Step 4100 | Loss: 1.0715 | LR: 9.53e-06
|
| 453 |
+
[2026-04-25 18:23:39] Epoch 1 | Step 4110 | Loss: 1.0714 | LR: 9.46e-06
|
| 454 |
+
[2026-04-25 18:23:42] Epoch 1 | Step 4120 | Loss: 1.0717 | LR: 9.40e-06
|
| 455 |
+
[2026-04-25 18:23:44] Epoch 1 | Step 4130 | Loss: 1.0714 | LR: 9.32e-06
|
| 456 |
+
[2026-04-25 18:23:47] Epoch 1 | Step 4140 | Loss: 1.0715 | LR: 9.24e-06
|
| 457 |
+
[2026-04-25 18:23:50] Epoch 1 | Step 4150 | Loss: 1.0719 | LR: 9.16e-06
|
| 458 |
+
[2026-04-25 18:23:52] Epoch 1 | Step 4160 | Loss: 1.0721 | LR: 9.08e-06
|
| 459 |
+
[2026-04-25 18:23:55] Epoch 1 | Step 4170 | Loss: 1.0718 | LR: 8.99e-06
|
| 460 |
+
[2026-04-25 18:23:57] Epoch 1 | Step 4180 | Loss: 1.0717 | LR: 8.90e-06
|
| 461 |
+
[2026-04-25 18:24:00] Epoch 1 | Step 4190 | Loss: 1.0715 | LR: 8.80e-06
|
| 462 |
+
[2026-04-25 18:24:03] Epoch 1 | Step 4200 | Loss: 1.0718 | LR: 8.70e-06
|
| 463 |
+
[2026-04-25 18:24:05] Epoch 1 | Step 4210 | Loss: 1.0716 | LR: 8.60e-06
|
| 464 |
+
[2026-04-25 18:24:08] Epoch 1 | Step 4220 | Loss: 1.0721 | LR: 8.50e-06
|
| 465 |
+
[2026-04-25 18:24:11] Epoch 1 | Step 4230 | Loss: 1.0721 | LR: 8.39e-06
|
| 466 |
+
[2026-04-25 18:24:13] Epoch 1 | Step 4240 | Loss: 1.0721 | LR: 8.28e-06
|
| 467 |
+
[2026-04-25 18:24:16] Epoch 1 | Step 4250 | Loss: 1.0721 | LR: 8.16e-06
|
| 468 |
+
[2026-04-25 18:24:18] Epoch 1 | Step 4260 | Loss: 1.0717 | LR: 8.05e-06
|
| 469 |
+
[2026-04-25 18:24:21] Epoch 1 | Step 4270 | Loss: 1.0719 | LR: 7.93e-06
|
| 470 |
+
[2026-04-25 18:24:23] Epoch 1 | Step 4280 | Loss: 1.0717 | LR: 7.81e-06
|
| 471 |
+
[2026-04-25 18:24:26] Epoch 1 | Step 4290 | Loss: 1.0714 | LR: 7.68e-06
|
| 472 |
+
[2026-04-25 18:24:28] Epoch 1 | Step 4300 | Loss: 1.0714 | LR: 7.56e-06
|
| 473 |
+
[2026-04-25 18:24:31] Epoch 1 | Step 4310 | Loss: 1.0715 | LR: 7.43e-06
|
| 474 |
+
[2026-04-25 18:24:33] Epoch 1 | Step 4320 | Loss: 1.0715 | LR: 7.30e-06
|
| 475 |
+
[2026-04-25 18:24:36] Epoch 1 | Step 4330 | Loss: 1.0713 | LR: 7.16e-06
|
| 476 |
+
[2026-04-25 18:24:38] Epoch 1 | Step 4340 | Loss: 1.0712 | LR: 7.03e-06
|
| 477 |
+
[2026-04-25 18:24:41] Epoch 1 | Step 4350 | Loss: 1.0709 | LR: 6.90e-06
|
| 478 |
+
[2026-04-25 18:24:43] Epoch 1 | Step 4360 | Loss: 1.0708 | LR: 6.76e-06
|
| 479 |
+
[2026-04-25 18:24:46] Epoch 1 | Step 4370 | Loss: 1.0708 | LR: 6.62e-06
|
| 480 |
+
[2026-04-25 18:24:48] Epoch 1 | Step 4380 | Loss: 1.0706 | LR: 6.48e-06
|
| 481 |
+
[2026-04-25 18:24:51] Epoch 1 | Step 4390 | Loss: 1.0706 | LR: 6.34e-06
|
| 482 |
+
[2026-04-25 18:24:53] Epoch 1 | Step 4400 | Loss: 1.0704 | LR: 6.20e-06
|
| 483 |
+
[2026-04-25 18:24:56] Epoch 1 | Step 4410 | Loss: 1.0699 | LR: 6.06e-06
|
| 484 |
+
[2026-04-25 18:24:58] Epoch 1 | Step 4420 | Loss: 1.0701 | LR: 5.92e-06
|
| 485 |
+
[2026-04-25 18:25:01] Epoch 1 | Step 4430 | Loss: 1.0700 | LR: 5.78e-06
|
| 486 |
+
[2026-04-25 18:25:03] Epoch 1 | Step 4440 | Loss: 1.0702 | LR: 5.63e-06
|
| 487 |
+
[2026-04-25 18:25:06] Epoch 1 | Step 4450 | Loss: 1.0700 | LR: 5.49e-06
|
| 488 |
+
[2026-04-25 18:25:08] Epoch 1 | Step 4460 | Loss: 1.0703 | LR: 5.35e-06
|
| 489 |
+
[2026-04-25 18:25:11] Epoch 1 | Step 4470 | Loss: 1.0700 | LR: 5.20e-06
|
| 490 |
+
[2026-04-25 18:25:13] Epoch 1 | Step 4480 | Loss: 1.0698 | LR: 5.06e-06
|
| 491 |
+
[2026-04-25 18:25:16] Epoch 1 | Step 4490 | Loss: 1.0696 | LR: 4.92e-06
|
| 492 |
+
[2026-04-25 18:25:18] Epoch 1 | Step 4500 | Loss: 1.0697 | LR: 4.78e-06
|
| 493 |
+
[2026-04-25 18:25:21] Epoch 1 | Step 4510 | Loss: 1.0692 | LR: 4.64e-06
|
| 494 |
+
[2026-04-25 18:25:23] Epoch 1 | Step 4520 | Loss: 1.0690 | LR: 4.50e-06
|
| 495 |
+
[2026-04-25 18:25:26] Epoch 1 | Step 4530 | Loss: 1.0687 | LR: 4.36e-06
|
| 496 |
+
[2026-04-25 18:25:29] Epoch 1 | Step 4540 | Loss: 1.0685 | LR: 4.22e-06
|
| 497 |
+
[2026-04-25 18:25:32] Epoch 1 | Step 4550 | Loss: 1.0681 | LR: 4.08e-06
|
| 498 |
+
[2026-04-25 18:25:34] Epoch 1 | Step 4560 | Loss: 1.0681 | LR: 3.95e-06
|
| 499 |
+
[2026-04-25 18:25:37] Epoch 1 | Step 4570 | Loss: 1.0681 | LR: 3.82e-06
|
| 500 |
+
[2026-04-25 18:25:39] Epoch 1 | Step 4580 | Loss: 1.0679 | LR: 3.68e-06
|
| 501 |
+
[2026-04-25 18:25:42] Epoch 1 | Step 4590 | Loss: 1.0677 | LR: 3.55e-06
|
| 502 |
+
[2026-04-25 18:25:44] Epoch 1 | Step 4600 | Loss: 1.0675 | LR: 3.43e-06
|
| 503 |
+
[2026-04-25 18:25:46] Epoch 1 | Step 4610 | Loss: 1.0673 | LR: 3.30e-06
|
| 504 |
+
[2026-04-25 18:25:49] Epoch 1 | Step 4620 | Loss: 1.0674 | LR: 3.18e-06
|
| 505 |
+
[2026-04-25 18:25:51] Epoch 1 | Step 4630 | Loss: 1.0672 | LR: 3.05e-06
|
| 506 |
+
[2026-04-25 18:25:54] Epoch 1 | Step 4640 | Loss: 1.0671 | LR: 2.94e-06
|
| 507 |
+
[2026-04-25 18:25:56] Epoch 1 | Step 4650 | Loss: 1.0671 | LR: 2.82e-06
|
| 508 |
+
[2026-04-25 18:25:59] Epoch 1 | Step 4660 | Loss: 1.0669 | LR: 2.71e-06
|
| 509 |
+
[2026-04-25 18:26:01] Epoch 1 | Step 4670 | Loss: 1.0667 | LR: 2.60e-06
|
| 510 |
+
[2026-04-25 18:26:04] Epoch 1 | Step 4680 | Loss: 1.0668 | LR: 2.49e-06
|
| 511 |
+
[2026-04-25 18:26:07] Epoch 1 | Step 4690 | Loss: 1.0666 | LR: 2.38e-06
|
| 512 |
+
[2026-04-25 18:26:09] Epoch 1 | Step 4700 | Loss: 1.0668 | LR: 2.28e-06
|
| 513 |
+
[2026-04-25 18:26:12] Epoch 1 | Step 4710 | Loss: 1.0666 | LR: 2.18e-06
|
| 514 |
+
[2026-04-25 18:26:14] Epoch 1 | Step 4720 | Loss: 1.0664 | LR: 2.09e-06
|
| 515 |
+
[2026-04-25 18:26:16] Epoch 1 | Step 4730 | Loss: 1.0664 | LR: 2.00e-06
|
| 516 |
+
[2026-04-25 18:26:19] Epoch 1 | Step 4740 | Loss: 1.0662 | LR: 1.91e-06
|
| 517 |
+
[2026-04-25 18:26:21] Epoch 1 | Step 4750 | Loss: 1.0661 | LR: 1.82e-06
|
| 518 |
+
[2026-04-25 18:26:23] Epoch 1 | Step 4760 | Loss: 1.0659 | LR: 1.74e-06
|
| 519 |
+
[2026-04-25 18:26:26] Epoch 1 | Step 4770 | Loss: 1.0655 | LR: 1.67e-06
|
| 520 |
+
[2026-04-25 18:26:28] Epoch 1 | Step 4780 | Loss: 1.0655 | LR: 1.59e-06
|
| 521 |
+
[2026-04-25 18:26:31] Epoch 1 | Step 4790 | Loss: 1.0654 | LR: 1.52e-06
|
| 522 |
+
[2026-04-25 18:26:34] Epoch 1 | Step 4800 | Loss: 1.0652 | LR: 1.46e-06
|
| 523 |
+
[2026-04-25 18:26:36] Epoch 1 | Step 4810 | Loss: 1.0648 | LR: 1.40e-06
|
| 524 |
+
[2026-04-25 18:26:39] Epoch 1 | Step 4820 | Loss: 1.0646 | LR: 1.34e-06
|
| 525 |
+
[2026-04-25 18:26:41] Epoch 1 | Step 4830 | Loss: 1.0642 | LR: 1.29e-06
|
| 526 |
+
[2026-04-25 18:26:44] Epoch 1 | Step 4840 | Loss: 1.0641 | LR: 1.24e-06
|
| 527 |
+
[2026-04-25 18:26:47] Epoch 1 | Step 4850 | Loss: 1.0642 | LR: 1.20e-06
|
| 528 |
+
[2026-04-25 18:26:49] Epoch 1 | Step 4860 | Loss: 1.0644 | LR: 1.16e-06
|
| 529 |
+
[2026-04-25 18:26:52] Epoch 1 | Step 4870 | Loss: 1.0645 | LR: 1.12e-06
|
| 530 |
+
[2026-04-25 18:26:54] Epoch 1 | Step 4880 | Loss: 1.0643 | LR: 1.09e-06
|
| 531 |
+
[2026-04-25 18:26:57] Epoch 1 | Step 4890 | Loss: 1.0640 | LR: 1.06e-06
|
| 532 |
+
[2026-04-25 18:27:00] Epoch 1 | Step 4900 | Loss: 1.0640 | LR: 1.04e-06
|
| 533 |
+
[2026-04-25 18:27:02] Epoch 1 | Step 4910 | Loss: 1.0639 | LR: 1.03e-06
|
| 534 |
+
[2026-04-25 18:27:04] Epoch 1 | Step 4920 | Loss: 1.0638 | LR: 1.01e-06
|
| 535 |
+
[2026-04-25 18:27:07] Epoch 1 | Step 4930 | Loss: 1.0637 | LR: 1.00e-06
|
| 536 |
+
[2026-04-25 18:27:09] Epoch 1 | Step 4940 | Loss: 1.0636 | LR: 1.00e-06
|
| 537 |
+
[2026-04-25 18:27:12] Epoch 1 | Step 4950 | Loss: 1.0636 | LR: 1.00e-06
|
| 538 |
+
[2026-04-25 18:27:14] Epoch 1 | Step 4960 | Loss: 1.0635 | LR: 1.00e-06
|
| 539 |
+
[2026-04-25 18:27:17] Epoch 1 | Step 4970 | Loss: 1.0633 | LR: 1.00e-06
|
| 540 |
+
[2026-04-25 18:27:19] Epoch 1 | Step 4980 | Loss: 1.0632 | LR: 1.00e-06
|
| 541 |
+
[2026-04-25 18:27:21] Epoch 1 | Step 4990 | Loss: 1.0629 | LR: 1.00e-06
|
| 542 |
+
[2026-04-25 18:27:24] Epoch 1 | Step 5000 | Loss: 1.0631 | LR: 1.00e-06
|
| 543 |
+
[2026-04-25 18:27:26] Epoch 1 | Step 5010 | Loss: 1.0628 | LR: 1.00e-06
|
| 544 |
+
[2026-04-25 18:27:29] Epoch 1 | Step 5020 | Loss: 1.0626 | LR: 1.00e-06
|
| 545 |
+
[2026-04-25 18:27:31] Epoch 1 | Step 5030 | Loss: 1.0626 | LR: 1.00e-06
|
| 546 |
+
[2026-04-25 18:27:33] Epoch 1 | Step 5040 | Loss: 1.0624 | LR: 1.00e-06
|
| 547 |
+
[2026-04-25 18:27:36] Epoch 1 | Step 5050 | Loss: 1.0622 | LR: 1.00e-06
|
| 548 |
+
[2026-04-25 18:27:39] Epoch 1 | Step 5060 | Loss: 1.0621 | LR: 1.00e-06
|
| 549 |
+
[2026-04-25 18:27:41] Epoch 1 | Step 5070 | Loss: 1.0621 | LR: 1.00e-06
|
| 550 |
+
[2026-04-25 18:27:44] Epoch 1 | Step 5080 | Loss: 1.0622 | LR: 1.00e-06
|
| 551 |
+
[2026-04-25 18:27:46] Epoch 1 | Step 5090 | Loss: 1.0622 | LR: 1.00e-06
|
| 552 |
+
[2026-04-25 18:27:49] Epoch 1 | Step 5100 | Loss: 1.0620 | LR: 1.00e-06
|
| 553 |
+
[2026-04-25 18:27:51] Epoch 1 | Step 5110 | Loss: 1.0619 | LR: 1.00e-06
|
| 554 |
+
[2026-04-25 18:27:54] Epoch 1 | Step 5120 | Loss: 1.0620 | LR: 1.00e-06
|
| 555 |
+
[2026-04-25 18:27:57] Epoch 1 | Step 5130 | Loss: 1.0619 | LR: 1.00e-06
|
| 556 |
+
[2026-04-25 18:27:59] Epoch 1 | Step 5140 | Loss: 1.0617 | LR: 1.00e-06
|
| 557 |
+
[2026-04-25 18:28:01] Epoch 1 | Step 5150 | Loss: 1.0615 | LR: 1.00e-06
|
| 558 |
+
[2026-04-25 18:28:04] Epoch 1 | Step 5160 | Loss: 1.0610 | LR: 1.00e-06
|
| 559 |
+
[2026-04-25 18:28:06] Epoch 1 | Step 5170 | Loss: 1.0610 | LR: 1.00e-06
|
| 560 |
+
[2026-04-25 18:28:09] Epoch 1 | Step 5180 | Loss: 1.0608 | LR: 1.00e-06
|
| 561 |
+
[2026-04-25 18:28:12] Epoch 1 | Step 5190 | Loss: 1.0608 | LR: 1.00e-06
|
| 562 |
+
[2026-04-25 18:28:14] Epoch 1 | Step 5200 | Loss: 1.0607 | LR: 1.00e-06
|
| 563 |
+
[2026-04-25 18:28:17] Epoch 1 | Step 5210 | Loss: 1.0605 | LR: 1.00e-06
|
| 564 |
+
[2026-04-25 18:28:19] Epoch 1 | Step 5220 | Loss: 1.0605 | LR: 1.00e-06
|
| 565 |
+
[2026-04-25 18:28:22] Epoch 1 | Step 5230 | Loss: 1.0604 | LR: 1.00e-06
|
| 566 |
+
[2026-04-25 18:28:25] Epoch 1 | Step 5240 | Loss: 1.0603 | LR: 1.00e-06
|
| 567 |
+
[2026-04-25 18:28:27] Epoch 1 | Step 5250 | Loss: 1.0603 | LR: 1.00e-06
|
| 568 |
+
[2026-04-25 18:28:30] Epoch 1 | Step 5260 | Loss: 1.0602 | LR: 1.00e-06
|
| 569 |
+
[2026-04-25 18:28:32] Epoch 1 | Step 5270 | Loss: 1.0601 | LR: 1.00e-06
|
| 570 |
+
[2026-04-25 18:28:35] Epoch 1 | Step 5280 | Loss: 1.0598 | LR: 1.00e-06
|
| 571 |
+
[2026-04-25 18:28:37] Epoch 1 | Step 5290 | Loss: 1.0595 | LR: 1.00e-06
|
| 572 |
+
[2026-04-25 18:28:40] Epoch 1 | Step 5300 | Loss: 1.0594 | LR: 1.00e-06
|
| 573 |
+
[2026-04-25 18:28:42] Epoch 1 | Step 5310 | Loss: 1.0595 | LR: 1.00e-06
|
| 574 |
+
[2026-04-25 18:28:45] Epoch 1 | Step 5320 | Loss: 1.0593 | LR: 1.00e-06
|
| 575 |
+
[2026-04-25 18:28:47] Epoch 1 | Step 5330 | Loss: 1.0593 | LR: 1.00e-06
|
| 576 |
+
[2026-04-25 18:28:49] Epoch 1 | Step 5340 | Loss: 1.0591 | LR: 1.00e-06
|
| 577 |
+
[2026-04-25 18:28:52] Epoch 1 | Step 5350 | Loss: 1.0589 | LR: 1.00e-06
|
| 578 |
+
[2026-04-25 18:28:54] Epoch 1 | Step 5360 | Loss: 1.0591 | LR: 1.00e-06
|
| 579 |
+
[2026-04-25 18:28:57] Epoch 1 | Step 5370 | Loss: 1.0590 | LR: 1.00e-06
|
| 580 |
+
[2026-04-25 18:28:59] Epoch 1 | Step 5380 | Loss: 1.0588 | LR: 1.00e-06
|
| 581 |
+
[2026-04-25 18:29:02] Epoch 1 | Step 5390 | Loss: 1.0585 | LR: 1.00e-06
|
| 582 |
+
[2026-04-25 18:29:04] Epoch 1 | Step 5400 | Loss: 1.0583 | LR: 1.00e-06
|
| 583 |
+
[2026-04-25 18:29:07] Epoch 1 | Step 5410 | Loss: 1.0583 | LR: 1.00e-06
|
| 584 |
+
[2026-04-25 18:29:09] Epoch 1 | Step 5420 | Loss: 1.0580 | LR: 1.00e-06
|
| 585 |
+
[2026-04-25 18:29:12] Epoch 1 | Step 5430 | Loss: 1.0580 | LR: 1.00e-06
|
| 586 |
+
[2026-04-25 18:29:14] Epoch 1 | Step 5440 | Loss: 1.0580 | LR: 1.00e-06
|
| 587 |
+
[2026-04-25 18:29:17] Epoch 1 | Step 5450 | Loss: 1.0582 | LR: 1.00e-06
|
| 588 |
+
[2026-04-25 18:29:19] Epoch 1 | Step 5460 | Loss: 1.0580 | LR: 1.00e-06
|
| 589 |
+
[2026-04-25 18:29:22] Epoch 1 | Step 5470 | Loss: 1.0578 | LR: 1.00e-06
|
| 590 |
+
[2026-04-25 18:29:24] Epoch 1 | Step 5480 | Loss: 1.0578 | LR: 1.00e-06
|
| 591 |
+
[2026-04-25 18:29:27] Epoch 1 | Step 5490 | Loss: 1.0578 | LR: 1.00e-06
|
| 592 |
+
[2026-04-25 18:29:29] Epoch 1 | Step 5500 | Loss: 1.0577 | LR: 1.00e-06
|
| 593 |
+
[2026-04-25 18:29:32] Epoch 1 | Step 5510 | Loss: 1.0579 | LR: 1.00e-06
|
| 594 |
+
[2026-04-25 18:29:34] Epoch 1 | Step 5520 | Loss: 1.0578 | LR: 1.00e-06
|
| 595 |
+
[2026-04-25 18:29:37] Epoch 1 | Step 5530 | Loss: 1.0577 | LR: 1.00e-06
|
| 596 |
+
[2026-04-25 18:29:39] Epoch 1 | Step 5540 | Loss: 1.0573 | LR: 1.00e-06
|
| 597 |
+
[2026-04-25 18:29:42] Epoch 1 | Step 5550 | Loss: 1.0573 | LR: 1.00e-06
|
| 598 |
+
[2026-04-25 18:29:44] Epoch 1 | Step 5560 | Loss: 1.0572 | LR: 1.00e-06
|
| 599 |
+
[2026-04-25 18:29:47] Epoch 1 | Step 5570 | Loss: 1.0574 | LR: 1.00e-06
|
| 600 |
+
[2026-04-25 18:29:49] Epoch 1 | Step 5580 | Loss: 1.0572 | LR: 1.00e-06
|
| 601 |
+
[2026-04-25 18:29:52] Epoch 1 | Step 5590 | Loss: 1.0570 | LR: 1.00e-06
|
| 602 |
+
[2026-04-25 18:29:54] Epoch 1 | Step 5600 | Loss: 1.0571 | LR: 1.00e-06
|
| 603 |
+
[2026-04-25 18:29:57] Epoch 1 | Step 5610 | Loss: 1.0572 | LR: 1.00e-06
|
| 604 |
+
[2026-04-25 18:29:59] Epoch 1 | Step 5620 | Loss: 1.0570 | LR: 1.00e-06
|
| 605 |
+
[2026-04-25 18:30:02] Epoch 1 | Step 5630 | Loss: 1.0571 | LR: 1.00e-06
|
| 606 |
+
[2026-04-25 18:30:04] Epoch 1 | Step 5640 | Loss: 1.0570 | LR: 1.00e-06
|
| 607 |
+
[2026-04-25 18:30:07] Epoch 1 | Step 5650 | Loss: 1.0569 | LR: 1.00e-06
|
| 608 |
+
[2026-04-25 18:30:09] Epoch 1 | Step 5660 | Loss: 1.0567 | LR: 1.00e-06
|
| 609 |
+
[2026-04-25 18:30:12] Epoch 1 | Step 5670 | Loss: 1.0566 | LR: 1.00e-06
|
| 610 |
+
[2026-04-25 18:30:14] Epoch 1 | Step 5680 | Loss: 1.0564 | LR: 1.00e-06
|
| 611 |
+
[2026-04-25 18:30:17] Epoch 1 | Step 5690 | Loss: 1.0564 | LR: 1.00e-06
|
| 612 |
+
[2026-04-25 18:30:19] Epoch 1 | Step 5700 | Loss: 1.0563 | LR: 1.00e-06
|
| 613 |
+
[2026-04-25 18:30:22] Epoch 1 | Step 5710 | Loss: 1.0563 | LR: 1.00e-06
|
| 614 |
+
[2026-04-25 18:30:25] Epoch 1 | Step 5720 | Loss: 1.0564 | LR: 1.00e-06
|
| 615 |
+
[2026-04-25 18:30:27] Epoch 1 | Step 5730 | Loss: 1.0563 | LR: 1.00e-06
|
| 616 |
+
[2026-04-25 18:30:30] Epoch 1 | Step 5740 | Loss: 1.0563 | LR: 1.00e-06
|
| 617 |
+
[2026-04-25 18:30:32] Epoch 1 | Step 5750 | Loss: 1.0562 | LR: 1.00e-06
|
| 618 |
+
[2026-04-25 18:30:35] Epoch 1 | Step 5760 | Loss: 1.0562 | LR: 1.00e-06
|
| 619 |
+
[2026-04-25 18:30:37] Epoch 1 | Step 5770 | Loss: 1.0562 | LR: 1.00e-06
|
| 620 |
+
[2026-04-25 18:30:40] Epoch 1 | Step 5780 | Loss: 1.0560 | LR: 1.00e-06
|
| 621 |
+
[2026-04-25 18:30:42] Epoch 1 | Step 5790 | Loss: 1.0561 | LR: 1.00e-06
|
| 622 |
+
[2026-04-25 18:30:45] Epoch 1 | Step 5800 | Loss: 1.0563 | LR: 1.00e-06
|
| 623 |
+
[2026-04-25 18:30:47] Epoch 1 | Step 5810 | Loss: 1.0562 | LR: 1.00e-06
|
| 624 |
+
[2026-04-25 18:30:50] Epoch 1 | Step 5820 | Loss: 1.0560 | LR: 1.00e-06
|
| 625 |
+
[2026-04-25 18:30:52] Epoch 1 | Step 5830 | Loss: 1.0559 | LR: 1.00e-06
|
| 626 |
+
[2026-04-25 18:30:55] Epoch 1 | Step 5840 | Loss: 1.0560 | LR: 1.00e-06
|
| 627 |
+
[2026-04-25 18:30:57] Epoch 1 | Step 5850 | Loss: 1.0561 | LR: 1.00e-06
|
| 628 |
+
[2026-04-25 18:31:00] Epoch 1 | Step 5860 | Loss: 1.0560 | LR: 1.00e-06
|
| 629 |
+
[2026-04-25 18:31:02] Epoch 1 | Step 5870 | Loss: 1.0559 | LR: 1.00e-06
|
| 630 |
+
[2026-04-25 18:31:05] Epoch 1 | Step 5880 | Loss: 1.0560 | LR: 1.00e-06
|
| 631 |
+
[2026-04-25 18:31:07] Epoch 1 | Step 5890 | Loss: 1.0560 | LR: 1.00e-06
|
| 632 |
+
[2026-04-25 18:31:10] Epoch 1 | Step 5900 | Loss: 1.0558 | LR: 1.00e-06
|
| 633 |
+
[2026-04-25 18:31:13] Epoch 1 | Step 5910 | Loss: 1.0558 | LR: 1.00e-06
|
| 634 |
+
[2026-04-25 18:31:15] Epoch 1 | Step 5920 | Loss: 1.0555 | LR: 1.00e-06
|
| 635 |
+
[2026-04-25 18:31:18] Epoch 1 | Step 5930 | Loss: 1.0556 | LR: 1.00e-06
|
| 636 |
+
[2026-04-25 18:31:21] Epoch 1 | Step 5940 | Loss: 1.0555 | LR: 1.00e-06
|
| 637 |
+
[2026-04-25 18:31:23] Epoch 1 | Step 5950 | Loss: 1.0555 | LR: 1.00e-06
|
| 638 |
+
[2026-04-25 18:31:26] Epoch 1 | Step 5960 | Loss: 1.0555 | LR: 1.00e-06
|
| 639 |
+
[2026-04-25 18:31:28] Epoch 1 | Step 5970 | Loss: 1.0556 | LR: 1.00e-06
|
| 640 |
+
[2026-04-25 18:31:31] Epoch 1 | Step 5980 | Loss: 1.0555 | LR: 1.00e-06
|
| 641 |
+
[2026-04-25 18:31:33] Epoch 1 | Step 5990 | Loss: 1.0557 | LR: 1.00e-06
|
| 642 |
+
[2026-04-25 18:31:36] Epoch 1 | Step 6000 | Loss: 1.0555 | LR: 1.00e-06
|
| 643 |
+
[2026-04-25 18:31:36] Validation | Batch 10/84 | Loss: 0.9832
|
| 644 |
+
[2026-04-25 18:31:37] Validation | Batch 20/84 | Loss: 0.9809
|
| 645 |
+
[2026-04-25 18:31:37] Validation | Batch 30/84 | Loss: 1.0582
|
| 646 |
+
[2026-04-25 18:31:38] Validation | Batch 40/84 | Loss: 1.0622
|
| 647 |
+
[2026-04-25 18:31:38] Validation | Batch 50/84 | Loss: 1.0617
|
| 648 |
+
[2026-04-25 18:31:39] Validation | Batch 60/84 | Loss: 1.0346
|
| 649 |
+
[2026-04-25 18:31:39] Validation | Batch 70/84 | Loss: 1.0156
|
| 650 |
+
[2026-04-25 18:31:39] Validation | Batch 80/84 | Loss: 1.0225
|
| 651 |
+
[2026-04-25 18:31:40] Validation | Batch 84/84 | Loss: 1.0160
|
| 652 |
+
[2026-04-25 18:31:40] Validation | Loss: 1.0160 | PPL: 2.84 | Time: 3.75s
|
| 653 |
+
[2026-04-25 18:31:43] New best model saved! Val loss: 1.0160
|
| 654 |
+
[2026-04-25 18:31:45] Epoch 1 | Step 6010 | Loss: 1.0555 | LR: 1.00e-06
|
| 655 |
+
[2026-04-25 18:31:48] Epoch 1 | Step 6020 | Loss: 1.0553 | LR: 1.00e-06
|
| 656 |
+
[2026-04-25 18:31:50] Epoch 1 | Step 6030 | Loss: 1.0554 | LR: 1.00e-06
|
| 657 |
+
[2026-04-25 18:31:53] Epoch 1 | Step 6040 | Loss: 1.0554 | LR: 1.00e-06
|
| 658 |
+
[2026-04-25 18:31:55] Epoch 1 | Step 6050 | Loss: 1.0555 | LR: 1.00e-06
|
| 659 |
+
[2026-04-25 18:31:58] Epoch 1 | Step 6060 | Loss: 1.0554 | LR: 1.00e-06
|
| 660 |
+
[2026-04-25 18:32:01] Epoch 1 | Step 6070 | Loss: 1.0552 | LR: 1.00e-06
|
| 661 |
+
[2026-04-25 18:32:03] Epoch 1 | Step 6080 | Loss: 1.0553 | LR: 1.00e-06
|
| 662 |
+
[2026-04-25 18:32:06] Epoch 1 | Step 6090 | Loss: 1.0553 | LR: 1.00e-06
|
| 663 |
+
[2026-04-25 18:32:08] Epoch 1 | Step 6100 | Loss: 1.0554 | LR: 1.00e-06
|
| 664 |
+
[2026-04-25 18:32:11] Epoch 1 | Step 6110 | Loss: 1.0554 | LR: 1.00e-06
|
| 665 |
+
[2026-04-25 18:32:13] Epoch 1 | Step 6120 | Loss: 1.0553 | LR: 1.00e-06
|
| 666 |
+
[2026-04-25 18:32:16] Epoch 1 | Step 6130 | Loss: 1.0552 | LR: 1.00e-06
|
| 667 |
+
[2026-04-25 18:32:18] Epoch 1 | Step 6140 | Loss: 1.0548 | LR: 1.00e-06
|
| 668 |
+
[2026-04-25 18:32:21] Epoch 1 | Step 6150 | Loss: 1.0547 | LR: 1.00e-06
|
| 669 |
+
[2026-04-25 18:32:23] Epoch 1 | Step 6160 | Loss: 1.0547 | LR: 1.00e-06
|
| 670 |
+
[2026-04-25 18:32:26] Epoch 1 | Step 6170 | Loss: 1.0548 | LR: 1.00e-06
|
| 671 |
+
[2026-04-25 18:32:28] Epoch 1 | Step 6180 | Loss: 1.0546 | LR: 1.00e-06
|
| 672 |
+
[2026-04-25 18:32:31] Epoch 1 | Step 6190 | Loss: 1.0543 | LR: 1.00e-06
|
| 673 |
+
[2026-04-25 18:32:33] Epoch 1 | Step 6200 | Loss: 1.0542 | LR: 1.00e-06
|
| 674 |
+
[2026-04-25 18:32:36] Epoch 1 | Step 6210 | Loss: 1.0542 | LR: 1.00e-06
|
| 675 |
+
[2026-04-25 18:32:39] Epoch 1 | Step 6220 | Loss: 1.0543 | LR: 1.00e-06
|
| 676 |
+
[2026-04-25 18:32:41] Epoch 1 | Step 6230 | Loss: 1.0541 | LR: 1.00e-06
|
| 677 |
+
[2026-04-25 18:32:44] Epoch 1 | Step 6240 | Loss: 1.0541 | LR: 1.00e-06
|
| 678 |
+
[2026-04-25 18:32:46] Epoch 1 | Step 6250 | Loss: 1.0538 | LR: 1.00e-06
|
| 679 |
+
[2026-04-25 18:32:49] Epoch 1 | Step 6260 | Loss: 1.0538 | LR: 1.00e-06
|
| 680 |
+
[2026-04-25 18:32:51] Epoch 1 | Step 6270 | Loss: 1.0537 | LR: 1.00e-06
|
| 681 |
+
[2026-04-25 18:32:54] Epoch 1 | Step 6280 | Loss: 1.0535 | LR: 1.00e-06
|
| 682 |
+
[2026-04-25 18:32:56] Epoch 1 | Step 6290 | Loss: 1.0534 | LR: 1.00e-06
|
| 683 |
+
[2026-04-25 18:32:59] Epoch 1 | Step 6300 | Loss: 1.0534 | LR: 1.00e-06
|
| 684 |
+
[2026-04-25 18:33:01] Epoch 1 | Step 6310 | Loss: 1.0535 | LR: 1.00e-06
|
| 685 |
+
[2026-04-25 18:33:04] Epoch 1 | Step 6320 | Loss: 1.0534 | LR: 1.00e-06
|
| 686 |
+
[2026-04-25 18:33:07] Epoch 1 | Step 6330 | Loss: 1.0536 | LR: 1.00e-06
|
| 687 |
+
[2026-04-25 18:33:09] Epoch 1 | Step 6340 | Loss: 1.0536 | LR: 1.00e-06
|
| 688 |
+
[2026-04-25 18:33:11] Epoch 1 | Step 6350 | Loss: 1.0536 | LR: 1.00e-06
|
| 689 |
+
[2026-04-25 18:33:14] Epoch 1 | Step 6360 | Loss: 1.0536 | LR: 1.00e-06
|
| 690 |
+
[2026-04-25 18:33:16] Epoch 1 | Step 6370 | Loss: 1.0536 | LR: 1.00e-06
|
| 691 |
+
[2026-04-25 18:33:19] Epoch 1 | Step 6380 | Loss: 1.0536 | LR: 1.00e-06
|
| 692 |
+
[2026-04-25 18:33:21] Epoch 1 | Step 6390 | Loss: 1.0534 | LR: 1.00e-06
|
| 693 |
+
[2026-04-25 18:33:24] Epoch 1 | Step 6400 | Loss: 1.0534 | LR: 1.00e-06
|
| 694 |
+
[2026-04-25 18:33:26] Epoch 1 | Step 6410 | Loss: 1.0533 | LR: 1.00e-06
|
| 695 |
+
[2026-04-25 18:33:28] Epoch 1 | Step 6420 | Loss: 1.0531 | LR: 1.00e-06
|
| 696 |
+
[2026-04-25 18:33:31] Epoch 1 | Step 6430 | Loss: 1.0530 | LR: 1.00e-06
|
| 697 |
+
[2026-04-25 18:33:33] Epoch 1 | Step 6440 | Loss: 1.0530 | LR: 1.00e-06
|
| 698 |
+
[2026-04-25 18:33:35] Epoch 1 | Step 6450 | Loss: 1.0529 | LR: 1.00e-06
|
| 699 |
+
[2026-04-25 18:33:38] Epoch 1 | Step 6460 | Loss: 1.0525 | LR: 1.00e-06
|
| 700 |
+
[2026-04-25 18:33:41] Epoch 1 | Step 6470 | Loss: 1.0525 | LR: 1.00e-06
|
| 701 |
+
[2026-04-25 18:33:43] Epoch 1 | Step 6480 | Loss: 1.0526 | LR: 1.00e-06
|
| 702 |
+
[2026-04-25 18:33:45] Epoch 1 | Step 6490 | Loss: 1.0527 | LR: 1.00e-06
|
| 703 |
+
[2026-04-25 18:33:48] Epoch 1 | Step 6500 | Loss: 1.0524 | LR: 1.00e-06
|
| 704 |
+
[2026-04-25 18:33:50] Epoch 1 | Step 6510 | Loss: 1.0523 | LR: 1.00e-06
|
| 705 |
+
[2026-04-25 18:33:53] Epoch 1 | Step 6520 | Loss: 1.0521 | LR: 1.00e-06
|
| 706 |
+
[2026-04-25 18:33:55] Epoch 1 | Step 6530 | Loss: 1.0518 | LR: 1.00e-06
|
| 707 |
+
[2026-04-25 18:33:58] Epoch 1 | Step 6540 | Loss: 1.0518 | LR: 1.00e-06
|
| 708 |
+
[2026-04-25 18:34:00] Epoch 1 | Step 6550 | Loss: 1.0516 | LR: 1.00e-06
|
| 709 |
+
[2026-04-25 18:34:02] Epoch 1 | Step 6560 | Loss: 1.0515 | LR: 1.00e-06
|
| 710 |
+
[2026-04-25 18:34:05] Epoch 1 | Step 6570 | Loss: 1.0515 | LR: 1.00e-06
|
| 711 |
+
[2026-04-25 18:34:07] Epoch 1 | Step 6580 | Loss: 1.0515 | LR: 1.00e-06
|
| 712 |
+
[2026-04-25 18:34:10] Epoch 1 | Step 6590 | Loss: 1.0514 | LR: 1.00e-06
|
| 713 |
+
[2026-04-25 18:34:13] Epoch 1 | Step 6600 | Loss: 1.0513 | LR: 1.00e-06
|
| 714 |
+
[2026-04-25 18:34:15] Epoch 1 | Step 6610 | Loss: 1.0512 | LR: 1.00e-06
|
| 715 |
+
[2026-04-25 18:34:18] Epoch 1 | Step 6620 | Loss: 1.0511 | LR: 1.00e-06
|
| 716 |
+
[2026-04-25 18:34:20] Epoch 1 | Step 6630 | Loss: 1.0510 | LR: 1.00e-06
|
| 717 |
+
[2026-04-25 18:34:23] Epoch 1 | Step 6640 | Loss: 1.0510 | LR: 1.00e-06
|
| 718 |
+
[2026-04-25 18:34:25] Epoch 1 | Step 6650 | Loss: 1.0511 | LR: 1.00e-06
|
| 719 |
+
[2026-04-25 18:34:27] Epoch 1 | Step 6660 | Loss: 1.0508 | LR: 1.00e-06
|
| 720 |
+
[2026-04-25 18:34:30] Epoch 1 | Step 6670 | Loss: 1.0508 | LR: 1.00e-06
|
| 721 |
+
[2026-04-25 18:34:33] Epoch 1 | Step 6680 | Loss: 1.0508 | LR: 1.00e-06
|
| 722 |
+
[2026-04-25 18:34:35] Epoch 1 | Step 6690 | Loss: 1.0507 | LR: 1.00e-06
|
| 723 |
+
[2026-04-25 18:34:38] Epoch 1 | Step 6700 | Loss: 1.0507 | LR: 1.00e-06
|
| 724 |
+
[2026-04-25 18:34:40] Epoch 1 | Step 6710 | Loss: 1.0507 | LR: 1.00e-06
|
| 725 |
+
[2026-04-25 18:34:42] Epoch 1 | Step 6720 | Loss: 1.0506 | LR: 1.00e-06
|
| 726 |
+
[2026-04-25 18:34:45] Epoch 1 | Step 6730 | Loss: 1.0508 | LR: 1.00e-06
|
| 727 |
+
[2026-04-25 18:34:48] Epoch 1 | Step 6740 | Loss: 1.0506 | LR: 1.00e-06
|
| 728 |
+
[2026-04-25 18:34:50] Epoch 1 | Step 6750 | Loss: 1.0504 | LR: 1.00e-06
|
| 729 |
+
[2026-04-25 18:34:52] Epoch 1 | Step 6760 | Loss: 1.0505 | LR: 1.00e-06
|
| 730 |
+
[2026-04-25 18:34:55] Epoch 1 | Step 6770 | Loss: 1.0504 | LR: 1.00e-06
|
| 731 |
+
[2026-04-25 18:34:57] Epoch 1 | Step 6780 | Loss: 1.0503 | LR: 1.00e-06
|
| 732 |
+
[2026-04-25 18:35:00] Epoch 1 | Step 6790 | Loss: 1.0504 | LR: 1.00e-06
|
| 733 |
+
[2026-04-25 18:35:02] Epoch 1 | Step 6800 | Loss: 1.0506 | LR: 1.00e-06
|
| 734 |
+
[2026-04-25 18:35:05] Epoch 1 | Step 6810 | Loss: 1.0506 | LR: 1.00e-06
|
| 735 |
+
[2026-04-25 18:35:07] Epoch 1 | Step 6820 | Loss: 1.0507 | LR: 1.00e-06
|
| 736 |
+
[2026-04-25 18:35:10] Epoch 1 | Step 6830 | Loss: 1.0508 | LR: 1.00e-06
|
| 737 |
+
[2026-04-25 18:35:12] Epoch 1 | Step 6840 | Loss: 1.0509 | LR: 1.00e-06
|
| 738 |
+
[2026-04-25 18:35:15] Epoch 1 | Step 6850 | Loss: 1.0509 | LR: 1.00e-06
|
| 739 |
+
[2026-04-25 18:35:17] Epoch 1 | Step 6860 | Loss: 1.0508 | LR: 1.00e-06
|
| 740 |
+
[2026-04-25 18:35:20] Epoch 1 | Step 6870 | Loss: 1.0507 | LR: 1.00e-06
|
| 741 |
+
[2026-04-25 18:35:22] Epoch 1 | Step 6880 | Loss: 1.0507 | LR: 1.00e-06
|
| 742 |
+
[2026-04-25 18:35:25] Epoch 1 | Step 6890 | Loss: 1.0508 | LR: 1.00e-06
|
| 743 |
+
[2026-04-25 18:35:27] Epoch 1 | Step 6900 | Loss: 1.0508 | LR: 1.00e-06
|
| 744 |
+
[2026-04-25 18:35:30] Epoch 1 | Step 6910 | Loss: 1.0504 | LR: 1.00e-06
|
| 745 |
+
[2026-04-25 18:35:32] Epoch 1 | Step 6920 | Loss: 1.0504 | LR: 1.00e-06
|
| 746 |
+
[2026-04-25 18:35:35] Epoch 1 | Step 6930 | Loss: 1.0504 | LR: 1.00e-06
|
| 747 |
+
[2026-04-25 18:35:38] Epoch 1 | Step 6940 | Loss: 1.0503 | LR: 1.00e-06
|
| 748 |
+
[2026-04-25 18:35:40] Epoch 1 | Step 6950 | Loss: 1.0502 | LR: 1.00e-06
|
| 749 |
+
[2026-04-25 18:35:43] Epoch 1 | Step 6960 | Loss: 1.0502 | LR: 1.00e-06
|
| 750 |
+
[2026-04-25 18:35:45] Epoch 1 | Step 6970 | Loss: 1.0501 | LR: 1.00e-06
|
| 751 |
+
[2026-04-25 18:35:47] Epoch 1 | Step 6980 | Loss: 1.0501 | LR: 1.00e-06
|
| 752 |
+
[2026-04-25 18:35:50] Epoch 1 | Step 6990 | Loss: 1.0498 | LR: 1.00e-06
|
| 753 |
+
[2026-04-25 18:35:52] Epoch 1 | Step 7000 | Loss: 1.0497 | LR: 1.00e-06
|
| 754 |
+
[2026-04-25 18:35:55] Epoch 1 | Step 7010 | Loss: 1.0496 | LR: 1.00e-06
|
| 755 |
+
[2026-04-25 18:35:57] Epoch 1 | Step 7020 | Loss: 1.0497 | LR: 1.00e-06
|
| 756 |
+
[2026-04-25 18:35:59] Epoch 1 | Step 7030 | Loss: 1.0496 | LR: 1.00e-06
|
| 757 |
+
[2026-04-25 18:36:02] Epoch 1 | Step 7040 | Loss: 1.0496 | LR: 1.00e-06
|
| 758 |
+
[2026-04-25 18:36:04] Epoch 1 | Step 7050 | Loss: 1.0495 | LR: 1.00e-06
|
| 759 |
+
[2026-04-25 18:36:07] Epoch 1 | Step 7060 | Loss: 1.0494 | LR: 1.00e-06
|
| 760 |
+
[2026-04-25 18:36:09] Epoch 1 | Step 7070 | Loss: 1.0495 | LR: 1.00e-06
|
| 761 |
+
[2026-04-25 18:36:12] Epoch 1 | Step 7080 | Loss: 1.0494 | LR: 1.00e-06
|
| 762 |
+
[2026-04-25 18:36:14] Epoch 1 | Step 7090 | Loss: 1.0494 | LR: 1.00e-06
|
| 763 |
+
[2026-04-25 18:36:17] Epoch 1 | Step 7100 | Loss: 1.0492 | LR: 1.00e-06
|
| 764 |
+
[2026-04-25 18:36:20] Epoch 1 | Step 7110 | Loss: 1.0491 | LR: 1.00e-06
|
| 765 |
+
[2026-04-25 18:36:22] Epoch 1 | Step 7120 | Loss: 1.0492 | LR: 1.00e-06
|
| 766 |
+
[2026-04-25 18:36:25] Epoch 1 | Step 7130 | Loss: 1.0490 | LR: 1.00e-06
|
| 767 |
+
[2026-04-25 18:36:27] Epoch 1 | Step 7140 | Loss: 1.0489 | LR: 1.00e-06
|
| 768 |
+
[2026-04-25 18:36:29] Epoch 1 | Step 7150 | Loss: 1.0490 | LR: 1.00e-06
|
| 769 |
+
[2026-04-25 18:36:32] Epoch 1 | Step 7160 | Loss: 1.0488 | LR: 1.00e-06
|
| 770 |
+
[2026-04-25 18:36:35] Epoch 1 | Step 7170 | Loss: 1.0488 | LR: 1.00e-06
|
| 771 |
+
[2026-04-25 18:36:37] Epoch 1 | Step 7180 | Loss: 1.0488 | LR: 1.00e-06
|
| 772 |
+
[2026-04-25 18:36:39] Epoch 1 | Step 7190 | Loss: 1.0489 | LR: 1.00e-06
|
| 773 |
+
[2026-04-25 18:36:42] Epoch 1 | Step 7200 | Loss: 1.0487 | LR: 1.00e-06
|
| 774 |
+
[2026-04-25 18:36:45] Epoch 1 | Step 7210 | Loss: 1.0486 | LR: 1.00e-06
|
| 775 |
+
[2026-04-25 18:36:47] Epoch 1 | Step 7220 | Loss: 1.0487 | LR: 1.00e-06
|
| 776 |
+
[2026-04-25 18:36:50] Epoch 1 | Step 7230 | Loss: 1.0487 | LR: 1.00e-06
|
| 777 |
+
[2026-04-25 18:36:52] Epoch 1 | Step 7240 | Loss: 1.0486 | LR: 1.00e-06
|
| 778 |
+
[2026-04-25 18:36:55] Epoch 1 | Step 7250 | Loss: 1.0485 | LR: 1.00e-06
|
| 779 |
+
[2026-04-25 18:36:58] Epoch 1 | Step 7260 | Loss: 1.0485 | LR: 1.00e-06
|
| 780 |
+
[2026-04-25 18:37:00] Epoch 1 | Step 7270 | Loss: 1.0486 | LR: 1.00e-06
|
| 781 |
+
[2026-04-25 18:37:03] Epoch 1 | Step 7280 | Loss: 1.0486 | LR: 1.00e-06
|
| 782 |
+
[2026-04-25 18:37:05] Epoch 1 | Step 7290 | Loss: 1.0484 | LR: 1.00e-06
|
| 783 |
+
[2026-04-25 18:37:08] Epoch 1 | Step 7300 | Loss: 1.0483 | LR: 1.00e-06
|
| 784 |
+
[2026-04-25 18:37:10] Epoch 1 | Step 7310 | Loss: 1.0481 | LR: 1.00e-06
|
| 785 |
+
[2026-04-25 18:37:13] Epoch 1 | Step 7320 | Loss: 1.0479 | LR: 1.00e-06
|
| 786 |
+
[2026-04-25 18:37:15] Epoch 1 | Step 7330 | Loss: 1.0479 | LR: 1.00e-06
|
| 787 |
+
[2026-04-25 18:37:18] Epoch 1 | Step 7340 | Loss: 1.0480 | LR: 1.00e-06
|
| 788 |
+
[2026-04-25 18:37:21] Epoch 1 | Step 7350 | Loss: 1.0481 | LR: 1.00e-06
|
| 789 |
+
[2026-04-25 18:37:23] Epoch 1 | Step 7360 | Loss: 1.0480 | LR: 1.00e-06
|
| 790 |
+
[2026-04-25 18:37:26] Epoch 1 | Step 7370 | Loss: 1.0478 | LR: 1.00e-06
|
| 791 |
+
[2026-04-25 18:37:28] Epoch 1 | Step 7380 | Loss: 1.0477 | LR: 1.00e-06
|
| 792 |
+
[2026-04-25 18:37:31] Epoch 1 | Step 7390 | Loss: 1.0475 | LR: 1.00e-06
|
| 793 |
+
[2026-04-25 18:37:33] Epoch 1 | Step 7400 | Loss: 1.0474 | LR: 1.00e-06
|
| 794 |
+
[2026-04-25 18:37:36] Epoch 1 | Step 7410 | Loss: 1.0475 | LR: 1.00e-06
|
| 795 |
+
[2026-04-25 18:37:38] Epoch 1 | Step 7420 | Loss: 1.0475 | LR: 1.00e-06
|
| 796 |
+
[2026-04-25 18:37:41] Epoch 1 | Step 7430 | Loss: 1.0474 | LR: 1.00e-06
|
| 797 |
+
[2026-04-25 18:37:43] Epoch 1 | Step 7440 | Loss: 1.0474 | LR: 1.00e-06
|
| 798 |
+
[2026-04-25 18:37:46] Epoch 1 | Step 7450 | Loss: 1.0473 | LR: 1.00e-06
|
| 799 |
+
[2026-04-25 18:37:48] Epoch 1 | Step 7460 | Loss: 1.0472 | LR: 1.00e-06
|
| 800 |
+
[2026-04-25 18:37:51] Epoch 1 | Step 7470 | Loss: 1.0472 | LR: 1.00e-06
|
| 801 |
+
[2026-04-25 18:37:53] Epoch 1 | Step 7480 | Loss: 1.0472 | LR: 1.00e-06
|
| 802 |
+
[2026-04-25 18:37:56] Epoch 1 | Step 7490 | Loss: 1.0473 | LR: 1.00e-06
|
| 803 |
+
[2026-04-25 18:37:58] Epoch 1 | Step 7500 | Loss: 1.0473 | LR: 1.00e-06
|
| 804 |
+
[2026-04-25 18:38:01] Epoch 1 | Step 7510 | Loss: 1.0474 | LR: 1.00e-06
|
| 805 |
+
[2026-04-25 18:38:03] Epoch 1 | Step 7520 | Loss: 1.0473 | LR: 1.00e-06
|
| 806 |
+
[2026-04-25 18:38:06] Epoch 1 | Step 7530 | Loss: 1.0472 | LR: 1.00e-06
|
| 807 |
+
[2026-04-25 18:38:08] Epoch 1 | Step 7540 | Loss: 1.0471 | LR: 1.00e-06
|
| 808 |
+
[2026-04-25 18:38:11] Epoch 1 | Step 7550 | Loss: 1.0472 | LR: 1.00e-06
|
| 809 |
+
[2026-04-25 18:38:13] Epoch 1 | Step 7560 | Loss: 1.0472 | LR: 1.00e-06
|
| 810 |
+
[2026-04-25 18:38:16] Epoch 1 | Step 7570 | Loss: 1.0471 | LR: 1.00e-06
|
| 811 |
+
[2026-04-25 18:38:18] Epoch 1 | Step 7580 | Loss: 1.0471 | LR: 1.00e-06
|
| 812 |
+
[2026-04-25 18:38:20] Epoch 1 | Step 7590 | Loss: 1.0470 | LR: 1.00e-06
|
| 813 |
+
[2026-04-25 18:38:23] Epoch 1 | Step 7600 | Loss: 1.0468 | LR: 1.00e-06
|
| 814 |
+
[2026-04-25 18:38:26] Epoch 1 | Step 7610 | Loss: 1.0468 | LR: 1.00e-06
|
| 815 |
+
[2026-04-25 18:38:28] Epoch 1 | Step 7620 | Loss: 1.0466 | LR: 1.00e-06
|
| 816 |
+
[2026-04-25 18:38:31] Epoch 1 | Step 7630 | Loss: 1.0465 | LR: 1.00e-06
|
| 817 |
+
[2026-04-25 18:38:33] Epoch 1 | Step 7640 | Loss: 1.0464 | LR: 1.00e-06
|
| 818 |
+
[2026-04-25 18:38:36] Epoch 1 | Step 7650 | Loss: 1.0463 | LR: 1.00e-06
|
| 819 |
+
[2026-04-25 18:38:39] Epoch 1 | Step 7660 | Loss: 1.0462 | LR: 1.00e-06
|
| 820 |
+
[2026-04-25 18:38:41] Epoch 1 | Step 7670 | Loss: 1.0460 | LR: 1.00e-06
|
| 821 |
+
[2026-04-25 18:38:44] Epoch 1 | Step 7680 | Loss: 1.0460 | LR: 1.00e-06
|
| 822 |
+
[2026-04-25 18:38:46] Epoch 1 | Step 7690 | Loss: 1.0461 | LR: 1.00e-06
|
| 823 |
+
[2026-04-25 18:38:49] Epoch 1 | Step 7700 | Loss: 1.0460 | LR: 1.00e-06
|
| 824 |
+
[2026-04-25 18:38:52] Epoch 1 | Step 7710 | Loss: 1.0458 | LR: 1.00e-06
|
| 825 |
+
[2026-04-25 18:38:54] Epoch 1 | Step 7720 | Loss: 1.0459 | LR: 1.00e-06
|
| 826 |
+
[2026-04-25 18:38:57] Epoch 1 | Step 7730 | Loss: 1.0460 | LR: 1.00e-06
|
| 827 |
+
[2026-04-25 18:38:59] Epoch 1 | Step 7740 | Loss: 1.0461 | LR: 1.00e-06
|
| 828 |
+
[2026-04-25 18:39:02] Epoch 1 | Step 7750 | Loss: 1.0461 | LR: 1.00e-06
|
| 829 |
+
[2026-04-25 18:39:05] Epoch 1 | Step 7760 | Loss: 1.0459 | LR: 1.00e-06
|
| 830 |
+
[2026-04-25 18:39:07] Epoch 1 | Step 7770 | Loss: 1.0458 | LR: 1.00e-06
|
| 831 |
+
[2026-04-25 18:39:09] Epoch 1 | Step 7780 | Loss: 1.0457 | LR: 1.00e-06
|
| 832 |
+
[2026-04-25 18:39:12] Epoch 1 | Step 7790 | Loss: 1.0456 | LR: 1.00e-06
|
| 833 |
+
[2026-04-25 18:39:14] Epoch 1 | Step 7800 | Loss: 1.0455 | LR: 1.00e-06
|
| 834 |
+
[2026-04-25 18:39:17] Epoch 1 | Step 7810 | Loss: 1.0456 | LR: 1.00e-06
|
| 835 |
+
[2026-04-25 18:39:20] Epoch 1 | Step 7820 | Loss: 1.0457 | LR: 1.00e-06
|
| 836 |
+
[2026-04-25 18:39:22] Epoch 1 | Step 7830 | Loss: 1.0456 | LR: 1.00e-06
|
| 837 |
+
[2026-04-25 18:39:25] Epoch 1 | Step 7840 | Loss: 1.0455 | LR: 1.00e-06
|
| 838 |
+
[2026-04-25 18:39:27] Epoch 1 | Step 7850 | Loss: 1.0452 | LR: 1.00e-06
|
| 839 |
+
[2026-04-25 18:39:30] Epoch 1 | Step 7860 | Loss: 1.0453 | LR: 1.00e-06
|
| 840 |
+
[2026-04-25 18:39:32] Epoch 1 | Step 7870 | Loss: 1.0451 | LR: 1.00e-06
|
| 841 |
+
[2026-04-25 18:39:35] Epoch 1 | Step 7880 | Loss: 1.0452 | LR: 1.00e-06
|
| 842 |
+
[2026-04-25 18:39:38] Epoch 1 | Step 7890 | Loss: 1.0452 | LR: 1.00e-06
|
| 843 |
+
[2026-04-25 18:39:41] Epoch 1 | Step 7900 | Loss: 1.0452 | LR: 1.00e-06
|
| 844 |
+
[2026-04-25 18:39:44] Epoch 1 | Step 7910 | Loss: 1.0452 | LR: 1.00e-06
|
| 845 |
+
[2026-04-25 18:39:46] Epoch 1 | Step 7920 | Loss: 1.0452 | LR: 1.00e-06
|
| 846 |
+
[2026-04-25 18:39:49] Epoch 1 | Step 7930 | Loss: 1.0453 | LR: 1.00e-06
|
| 847 |
+
[2026-04-25 18:39:51] Epoch 1 | Step 7940 | Loss: 1.0453 | LR: 1.00e-06
|
| 848 |
+
[2026-04-25 18:39:54] Epoch 1 | Step 7950 | Loss: 1.0455 | LR: 1.00e-06
|
| 849 |
+
[2026-04-25 18:39:56] Epoch 1 | Step 7960 | Loss: 1.0455 | LR: 1.00e-06
|
| 850 |
+
[2026-04-25 18:39:58] Epoch 1 | Step 7970 | Loss: 1.0455 | LR: 1.00e-06
|
| 851 |
+
[2026-04-25 18:40:01] Epoch 1 | Step 7980 | Loss: 1.0454 | LR: 1.00e-06
|
| 852 |
+
[2026-04-25 18:40:03] Epoch 1 | Step 7990 | Loss: 1.0454 | LR: 1.00e-06
|
| 853 |
+
[2026-04-25 18:40:06] Epoch 1 | Step 8000 | Loss: 1.0453 | LR: 1.00e-06
|
| 854 |
+
[2026-04-25 18:40:06] Validation | Batch 10/84 | Loss: 0.9831
|
| 855 |
+
[2026-04-25 18:40:07] Validation | Batch 20/84 | Loss: 0.9812
|
| 856 |
+
[2026-04-25 18:40:07] Validation | Batch 30/84 | Loss: 1.0587
|
| 857 |
+
[2026-04-25 18:40:08] Validation | Batch 40/84 | Loss: 1.0625
|
| 858 |
+
[2026-04-25 18:40:08] Validation | Batch 50/84 | Loss: 1.0617
|
| 859 |
+
[2026-04-25 18:40:09] Validation | Batch 60/84 | Loss: 1.0346
|
| 860 |
+
[2026-04-25 18:40:09] Validation | Batch 70/84 | Loss: 1.0155
|
| 861 |
+
[2026-04-25 18:40:10] Validation | Batch 80/84 | Loss: 1.0222
|
| 862 |
+
[2026-04-25 18:40:10] Validation | Batch 84/84 | Loss: 1.0157
|
| 863 |
+
[2026-04-25 18:40:10] Validation | Loss: 1.0157 | PPL: 2.84 | Time: 3.75s
|
| 864 |
+
[2026-04-25 18:40:13] New best model saved! Val loss: 1.0157
|
| 865 |
+
[2026-04-25 18:40:15] Epoch 1 | Step 8010 | Loss: 1.0452 | LR: 1.00e-06
|
| 866 |
+
[2026-04-25 18:40:18] Epoch 1 | Step 8020 | Loss: 1.0450 | LR: 1.00e-06
|
| 867 |
+
[2026-04-25 18:40:21] Epoch 1 | Step 8030 | Loss: 1.0450 | LR: 1.00e-06
|
| 868 |
+
[2026-04-25 18:40:24] Epoch 1 | Step 8040 | Loss: 1.0451 | LR: 1.00e-06
|
| 869 |
+
[2026-04-25 18:40:26] Epoch 1 | Step 8050 | Loss: 1.0449 | LR: 1.00e-06
|
| 870 |
+
[2026-04-25 18:40:29] Epoch 1 | Step 8060 | Loss: 1.0449 | LR: 1.00e-06
|
| 871 |
+
[2026-04-25 18:40:31] Epoch 1 | Step 8070 | Loss: 1.0448 | LR: 1.00e-06
|
| 872 |
+
[2026-04-25 18:40:34] Epoch 1 | Step 8080 | Loss: 1.0448 | LR: 1.00e-06
|
| 873 |
+
[2026-04-25 18:40:37] Epoch 1 | Step 8090 | Loss: 1.0446 | LR: 1.00e-06
|
| 874 |
+
[2026-04-25 18:40:39] Epoch 1 | Step 8100 | Loss: 1.0445 | LR: 1.00e-06
|
| 875 |
+
[2026-04-25 18:40:42] Epoch 1 | Step 8110 | Loss: 1.0446 | LR: 1.00e-06
|
| 876 |
+
[2026-04-25 18:40:44] Epoch 1 | Step 8120 | Loss: 1.0446 | LR: 1.00e-06
|
| 877 |
+
[2026-04-25 18:40:47] Epoch 1 | Step 8130 | Loss: 1.0445 | LR: 1.00e-06
|
| 878 |
+
[2026-04-25 18:40:49] Epoch 1 | Step 8140 | Loss: 1.0446 | LR: 1.00e-06
|
| 879 |
+
[2026-04-25 18:40:52] Epoch 1 | Step 8150 | Loss: 1.0446 | LR: 1.00e-06
|
| 880 |
+
[2026-04-25 18:40:54] Epoch 1 | Step 8160 | Loss: 1.0444 | LR: 1.00e-06
|
| 881 |
+
[2026-04-25 18:40:56] Epoch 1 | Step 8170 | Loss: 1.0444 | LR: 1.00e-06
|
| 882 |
+
[2026-04-25 18:40:59] Epoch 1 | Step 8180 | Loss: 1.0444 | LR: 1.00e-06
|
| 883 |
+
[2026-04-25 18:41:01] Epoch 1 | Step 8190 | Loss: 1.0443 | LR: 1.00e-06
|
| 884 |
+
[2026-04-25 18:41:04] Epoch 1 | Step 8200 | Loss: 1.0443 | LR: 1.00e-06
|
| 885 |
+
[2026-04-25 18:41:06] Epoch 1 | Step 8210 | Loss: 1.0443 | LR: 1.00e-06
|
| 886 |
+
[2026-04-25 18:41:09] Epoch 1 | Step 8220 | Loss: 1.0443 | LR: 1.00e-06
|
| 887 |
+
[2026-04-25 18:41:12] Epoch 1 | Step 8230 | Loss: 1.0443 | LR: 1.00e-06
|
| 888 |
+
[2026-04-25 18:41:14] Epoch 1 | Step 8240 | Loss: 1.0444 | LR: 1.00e-06
|
| 889 |
+
[2026-04-25 18:41:16] Epoch 1 | Step 8250 | Loss: 1.0443 | LR: 1.00e-06
|
| 890 |
+
[2026-04-25 18:41:19] Epoch 1 | Step 8260 | Loss: 1.0444 | LR: 1.00e-06
|
| 891 |
+
[2026-04-25 18:41:21] Epoch 1 | Step 8270 | Loss: 1.0445 | LR: 1.00e-06
|
| 892 |
+
[2026-04-25 18:41:24] Epoch 1 | Step 8280 | Loss: 1.0446 | LR: 1.00e-06
|
| 893 |
+
[2026-04-25 18:41:27] Epoch 1 | Step 8290 | Loss: 1.0446 | LR: 1.00e-06
|
| 894 |
+
[2026-04-25 18:41:29] Epoch 1 | Step 8300 | Loss: 1.0445 | LR: 1.00e-06
|
| 895 |
+
[2026-04-25 18:41:31] Epoch 1 | Step 8310 | Loss: 1.0445 | LR: 1.00e-06
|
| 896 |
+
[2026-04-25 18:41:34] Epoch 1 | Step 8320 | Loss: 1.0445 | LR: 1.00e-06
|
| 897 |
+
[2026-04-25 18:41:36] Epoch 1 | Step 8330 | Loss: 1.0445 | LR: 1.00e-06
|
| 898 |
+
[2026-04-25 18:41:39] Epoch 1 | Step 8340 | Loss: 1.0445 | LR: 1.00e-06
|
| 899 |
+
[2026-04-25 18:41:41] Epoch 1 | Step 8350 | Loss: 1.0444 | LR: 1.00e-06
|
| 900 |
+
[2026-04-25 18:41:43] Epoch 1 | Step 8360 | Loss: 1.0443 | LR: 1.00e-06
|
| 901 |
+
[2026-04-25 18:41:46] Epoch 1 | Step 8370 | Loss: 1.0443 | LR: 1.00e-06
|
| 902 |
+
[2026-04-25 18:41:48] Epoch 1 | Step 8380 | Loss: 1.0443 | LR: 1.00e-06
|
| 903 |
+
[2026-04-25 18:41:51] Epoch 1 | Step 8390 | Loss: 1.0443 | LR: 1.00e-06
|
| 904 |
+
[2026-04-25 18:41:53] Epoch 1 | Step 8400 | Loss: 1.0443 | LR: 1.00e-06
|
| 905 |
+
[2026-04-25 18:41:56] Epoch 1 | Step 8410 | Loss: 1.0443 | LR: 1.00e-06
|
| 906 |
+
[2026-04-25 18:41:58] Epoch 1 | Step 8420 | Loss: 1.0444 | LR: 1.00e-06
|
| 907 |
+
[2026-04-25 18:42:01] Epoch 1 | Step 8430 | Loss: 1.0443 | LR: 1.00e-06
|
| 908 |
+
[2026-04-25 18:42:03] Epoch 1 | Step 8440 | Loss: 1.0443 | LR: 1.00e-06
|
| 909 |
+
[2026-04-25 18:42:06] Epoch 1 | Step 8450 | Loss: 1.0442 | LR: 1.00e-06
|
| 910 |
+
[2026-04-25 18:42:08] Epoch 1 | Step 8460 | Loss: 1.0442 | LR: 1.00e-06
|
| 911 |
+
[2026-04-25 18:42:11] Epoch 1 | Step 8470 | Loss: 1.0442 | LR: 1.00e-06
|
| 912 |
+
[2026-04-25 18:42:13] Epoch 1 | Step 8480 | Loss: 1.0442 | LR: 1.00e-06
|
| 913 |
+
[2026-04-25 18:42:15] Epoch 1 | Step 8490 | Loss: 1.0440 | LR: 1.00e-06
|
| 914 |
+
[2026-04-25 18:42:18] Epoch 1 | Step 8500 | Loss: 1.0441 | LR: 1.00e-06
|
| 915 |
+
[2026-04-25 18:42:20] Epoch 1 | Step 8510 | Loss: 1.0440 | LR: 1.00e-06
|
| 916 |
+
[2026-04-25 18:42:23] Epoch 1 | Step 8520 | Loss: 1.0440 | LR: 1.00e-06
|
| 917 |
+
[2026-04-25 18:42:26] Epoch 1 | Step 8530 | Loss: 1.0440 | LR: 1.00e-06
|
| 918 |
+
[2026-04-25 18:42:28] Epoch 1 | Step 8540 | Loss: 1.0441 | LR: 1.00e-06
|
| 919 |
+
[2026-04-25 18:42:31] Epoch 1 | Step 8550 | Loss: 1.0442 | LR: 1.00e-06
|
| 920 |
+
[2026-04-25 18:42:33] Epoch 1 | Step 8560 | Loss: 1.0441 | LR: 1.00e-06
|
| 921 |
+
[2026-04-25 18:42:35] Epoch 1 | Step 8570 | Loss: 1.0442 | LR: 1.00e-06
|
| 922 |
+
[2026-04-25 18:42:38] Epoch 1 | Step 8580 | Loss: 1.0440 | LR: 1.00e-06
|
| 923 |
+
[2026-04-25 18:42:41] Epoch 1 | Step 8590 | Loss: 1.0439 | LR: 1.00e-06
|
| 924 |
+
[2026-04-25 18:42:43] Epoch 1 | Step 8600 | Loss: 1.0438 | LR: 1.00e-06
|
| 925 |
+
[2026-04-25 18:42:46] Epoch 1 | Step 8610 | Loss: 1.0439 | LR: 1.00e-06
|
| 926 |
+
[2026-04-25 18:42:48] Epoch 1 | Step 8620 | Loss: 1.0438 | LR: 1.00e-06
|
| 927 |
+
[2026-04-25 18:42:51] Epoch 1 | Step 8630 | Loss: 1.0436 | LR: 1.00e-06
|
| 928 |
+
[2026-04-25 18:42:53] Epoch 1 | Step 8640 | Loss: 1.0438 | LR: 1.00e-06
|
| 929 |
+
[2026-04-25 18:42:55] Epoch 1 | Step 8650 | Loss: 1.0438 | LR: 1.00e-06
|
| 930 |
+
[2026-04-25 18:42:58] Epoch 1 | Step 8660 | Loss: 1.0437 | LR: 1.00e-06
|
| 931 |
+
[2026-04-25 18:43:01] Epoch 1 | Step 8670 | Loss: 1.0437 | LR: 1.00e-06
|
| 932 |
+
[2026-04-25 18:43:03] Epoch 1 | Step 8680 | Loss: 1.0438 | LR: 1.00e-06
|
| 933 |
+
[2026-04-25 18:43:05] Epoch 1 | Step 8690 | Loss: 1.0436 | LR: 1.00e-06
|
| 934 |
+
[2026-04-25 18:43:08] Epoch 1 | Step 8700 | Loss: 1.0436 | LR: 1.00e-06
|
| 935 |
+
[2026-04-25 18:43:11] Epoch 1 | Step 8710 | Loss: 1.0433 | LR: 1.00e-06
|
| 936 |
+
[2026-04-25 18:43:13] Epoch 1 | Step 8720 | Loss: 1.0432 | LR: 1.00e-06
|
| 937 |
+
[2026-04-25 18:43:16] Epoch 1 | Step 8730 | Loss: 1.0433 | LR: 1.00e-06
|
| 938 |
+
[2026-04-25 18:43:18] Epoch 1 | Step 8740 | Loss: 1.0433 | LR: 1.00e-06
|
| 939 |
+
[2026-04-25 18:43:21] Epoch 1 | Step 8750 | Loss: 1.0433 | LR: 1.00e-06
|
| 940 |
+
[2026-04-25 18:43:23] Epoch 1 | Step 8760 | Loss: 1.0432 | LR: 1.00e-06
|
| 941 |
+
[2026-04-25 18:43:26] Epoch 1 | Step 8770 | Loss: 1.0431 | LR: 1.00e-06
|
| 942 |
+
[2026-04-25 18:43:29] Epoch 1 | Step 8780 | Loss: 1.0431 | LR: 1.00e-06
|
| 943 |
+
[2026-04-25 18:43:31] Epoch 1 | Step 8790 | Loss: 1.0431 | LR: 1.00e-06
|
| 944 |
+
[2026-04-25 18:43:34] Epoch 1 | Step 8800 | Loss: 1.0429 | LR: 1.00e-06
|
| 945 |
+
[2026-04-25 18:43:36] Epoch 1 | Step 8810 | Loss: 1.0429 | LR: 1.00e-06
|
| 946 |
+
[2026-04-25 18:43:39] Epoch 1 | Step 8820 | Loss: 1.0428 | LR: 1.00e-06
|
| 947 |
+
[2026-04-25 18:43:41] Epoch 1 | Step 8830 | Loss: 1.0428 | LR: 1.00e-06
|
| 948 |
+
[2026-04-25 18:43:44] Epoch 1 | Step 8840 | Loss: 1.0428 | LR: 1.00e-06
|
| 949 |
+
[2026-04-25 18:43:46] Epoch 1 | Step 8850 | Loss: 1.0428 | LR: 1.00e-06
|
| 950 |
+
[2026-04-25 18:43:49] Epoch 1 | Step 8860 | Loss: 1.0427 | LR: 1.00e-06
|
| 951 |
+
[2026-04-25 18:43:51] Epoch 1 | Step 8870 | Loss: 1.0428 | LR: 1.00e-06
|
| 952 |
+
[2026-04-25 18:43:54] Epoch 1 | Step 8880 | Loss: 1.0427 | LR: 1.00e-06
|
| 953 |
+
[2026-04-25 18:43:56] Epoch 1 | Step 8890 | Loss: 1.0426 | LR: 1.00e-06
|
| 954 |
+
[2026-04-25 18:43:59] Epoch 1 | Step 8900 | Loss: 1.0424 | LR: 1.00e-06
|
| 955 |
+
[2026-04-25 18:44:01] Epoch 1 | Step 8910 | Loss: 1.0425 | LR: 1.00e-06
|
| 956 |
+
[2026-04-25 18:44:04] Epoch 1 | Step 8920 | Loss: 1.0423 | LR: 1.00e-06
|
| 957 |
+
[2026-04-25 18:44:06] Epoch 1 | Step 8930 | Loss: 1.0422 | LR: 1.00e-06
|
| 958 |
+
[2026-04-25 18:44:09] Epoch 1 | Step 8940 | Loss: 1.0422 | LR: 1.00e-06
|
| 959 |
+
[2026-04-25 18:44:11] Epoch 1 | Step 8950 | Loss: 1.0423 | LR: 1.00e-06
|
| 960 |
+
[2026-04-25 18:44:14] Epoch 1 | Step 8960 | Loss: 1.0422 | LR: 1.00e-06
|
| 961 |
+
[2026-04-25 18:44:16] Epoch 1 | Step 8970 | Loss: 1.0422 | LR: 1.00e-06
|
| 962 |
+
[2026-04-25 18:44:19] Epoch 1 | Step 8980 | Loss: 1.0421 | LR: 1.00e-06
|
| 963 |
+
[2026-04-25 18:44:21] Epoch 1 | Step 8990 | Loss: 1.0420 | LR: 1.00e-06
|
| 964 |
+
[2026-04-25 18:44:24] Epoch 1 | Step 9000 | Loss: 1.0419 | LR: 1.00e-06
|
| 965 |
+
[2026-04-25 18:44:26] Epoch 1 | Step 9010 | Loss: 1.0420 | LR: 1.00e-06
|
| 966 |
+
[2026-04-25 18:44:29] Epoch 1 | Step 9020 | Loss: 1.0421 | LR: 1.00e-06
|
| 967 |
+
[2026-04-25 18:44:31] Epoch 1 | Step 9030 | Loss: 1.0421 | LR: 1.00e-06
|
| 968 |
+
[2026-04-25 18:44:34] Epoch 1 | Step 9040 | Loss: 1.0420 | LR: 1.00e-06
|
| 969 |
+
[2026-04-25 18:44:36] Epoch 1 | Step 9050 | Loss: 1.0419 | LR: 1.00e-06
|
| 970 |
+
[2026-04-25 18:44:39] Epoch 1 | Step 9060 | Loss: 1.0420 | LR: 1.00e-06
|
| 971 |
+
[2026-04-25 18:44:41] Epoch 1 | Step 9070 | Loss: 1.0419 | LR: 1.00e-06
|
| 972 |
+
[2026-04-25 18:44:44] Epoch 1 | Step 9080 | Loss: 1.0420 | LR: 1.00e-06
|
| 973 |
+
[2026-04-25 18:44:46] Epoch 1 | Step 9090 | Loss: 1.0419 | LR: 1.00e-06
|
| 974 |
+
[2026-04-25 18:44:48] Epoch 1 | Step 9100 | Loss: 1.0419 | LR: 1.00e-06
|
| 975 |
+
[2026-04-25 18:44:51] Epoch 1 | Step 9110 | Loss: 1.0419 | LR: 1.00e-06
|
| 976 |
+
[2026-04-25 18:44:53] Epoch 1 | Step 9120 | Loss: 1.0420 | LR: 1.00e-06
|
| 977 |
+
[2026-04-25 18:44:56] Epoch 1 | Step 9130 | Loss: 1.0419 | LR: 1.00e-06
|
| 978 |
+
[2026-04-25 18:44:58] Epoch 1 | Step 9140 | Loss: 1.0419 | LR: 1.00e-06
|
| 979 |
+
[2026-04-25 18:45:00] Epoch 1 | Step 9150 | Loss: 1.0420 | LR: 1.00e-06
|
| 980 |
+
[2026-04-25 18:45:03] Epoch 1 | Step 9160 | Loss: 1.0419 | LR: 1.00e-06
|
| 981 |
+
[2026-04-25 18:45:06] Epoch 1 | Step 9170 | Loss: 1.0417 | LR: 1.00e-06
|
| 982 |
+
[2026-04-25 18:45:08] Epoch 1 | Step 9180 | Loss: 1.0416 | LR: 1.00e-06
|
| 983 |
+
[2026-04-25 18:45:11] Epoch 1 | Step 9190 | Loss: 1.0414 | LR: 1.00e-06
|
| 984 |
+
[2026-04-25 18:45:13] Epoch 1 | Step 9200 | Loss: 1.0414 | LR: 1.00e-06
|
| 985 |
+
[2026-04-25 18:45:16] Epoch 1 | Step 9210 | Loss: 1.0415 | LR: 1.00e-06
|
| 986 |
+
[2026-04-25 18:45:18] Epoch 1 | Step 9220 | Loss: 1.0414 | LR: 1.00e-06
|
| 987 |
+
[2026-04-25 18:45:21] Epoch 1 | Step 9230 | Loss: 1.0413 | LR: 1.00e-06
|
| 988 |
+
[2026-04-25 18:45:23] Epoch 1 | Step 9240 | Loss: 1.0411 | LR: 1.00e-06
|
| 989 |
+
[2026-04-25 18:45:26] Epoch 1 | Step 9250 | Loss: 1.0410 | LR: 1.00e-06
|
| 990 |
+
[2026-04-25 18:45:28] Epoch 1 | Step 9260 | Loss: 1.0409 | LR: 1.00e-06
|
| 991 |
+
[2026-04-25 18:45:31] Epoch 1 | Step 9270 | Loss: 1.0408 | LR: 1.00e-06
|
| 992 |
+
[2026-04-25 18:45:33] Epoch 1 | Step 9280 | Loss: 1.0408 | LR: 1.00e-06
|
| 993 |
+
[2026-04-25 18:45:36] Epoch 1 | Step 9290 | Loss: 1.0408 | LR: 1.00e-06
|
| 994 |
+
[2026-04-25 18:45:38] Epoch 1 | Step 9300 | Loss: 1.0408 | LR: 1.00e-06
|
| 995 |
+
[2026-04-25 18:45:41] Epoch 1 | Step 9310 | Loss: 1.0407 | LR: 1.00e-06
|
| 996 |
+
[2026-04-25 18:45:43] Epoch 1 | Step 9320 | Loss: 1.0407 | LR: 1.00e-06
|
| 997 |
+
[2026-04-25 18:45:46] Epoch 1 | Step 9330 | Loss: 1.0407 | LR: 1.00e-06
|
| 998 |
+
[2026-04-25 18:45:48] Epoch 1 | Step 9340 | Loss: 1.0406 | LR: 1.00e-06
|
| 999 |
+
[2026-04-25 18:45:51] Epoch 1 | Step 9350 | Loss: 1.0405 | LR: 1.00e-06
|
| 1000 |
+
[2026-04-25 18:45:53] Epoch 1 | Step 9360 | Loss: 1.0405 | LR: 1.00e-06
|
| 1001 |
+
[2026-04-25 18:45:56] Epoch 1 | Step 9370 | Loss: 1.0404 | LR: 1.00e-06
|
| 1002 |
+
[2026-04-25 18:45:58] Epoch 1 | Step 9380 | Loss: 1.0404 | LR: 1.00e-06
|
| 1003 |
+
[2026-04-25 18:46:01] Epoch 1 | Step 9390 | Loss: 1.0402 | LR: 1.00e-06
|
| 1004 |
+
[2026-04-25 18:46:03] Epoch 1 | Step 9400 | Loss: 1.0403 | LR: 1.00e-06
|
| 1005 |
+
[2026-04-25 18:46:06] Epoch 1 | Step 9410 | Loss: 1.0403 | LR: 1.00e-06
|
| 1006 |
+
[2026-04-25 18:46:09] Epoch 1 | Step 9420 | Loss: 1.0404 | LR: 1.00e-06
|
| 1007 |
+
[2026-04-25 18:46:11] Epoch 1 | Step 9430 | Loss: 1.0404 | LR: 1.00e-06
|
| 1008 |
+
[2026-04-25 18:46:13] Epoch 1 | Step 9440 | Loss: 1.0404 | LR: 1.00e-06
|
| 1009 |
+
[2026-04-25 18:46:16] Epoch 1 | Step 9450 | Loss: 1.0405 | LR: 1.00e-06
|
| 1010 |
+
[2026-04-25 18:46:19] Epoch 1 | Step 9460 | Loss: 1.0404 | LR: 1.00e-06
|
| 1011 |
+
[2026-04-25 18:46:21] Epoch 1 | Step 9470 | Loss: 1.0403 | LR: 1.00e-06
|
| 1012 |
+
[2026-04-25 18:46:24] Epoch 1 | Step 9480 | Loss: 1.0401 | LR: 1.00e-06
|
| 1013 |
+
[2026-04-25 18:46:26] Epoch 1 | Step 9490 | Loss: 1.0401 | LR: 1.00e-06
|
| 1014 |
+
[2026-04-25 18:46:28] Epoch 1 | Step 9500 | Loss: 1.0401 | LR: 1.00e-06
|
| 1015 |
+
[2026-04-25 18:46:31] Epoch 1 | Step 9510 | Loss: 1.0401 | LR: 1.00e-06
|
| 1016 |
+
[2026-04-25 18:46:33] Epoch 1 | Step 9520 | Loss: 1.0401 | LR: 1.00e-06
|
| 1017 |
+
[2026-04-25 18:46:36] Epoch 1 | Step 9530 | Loss: 1.0401 | LR: 1.00e-06
|
| 1018 |
+
[2026-04-25 18:46:39] Epoch 1 | Step 9540 | Loss: 1.0399 | LR: 1.00e-06
|
| 1019 |
+
[2026-04-25 18:46:41] Epoch 1 | Step 9550 | Loss: 1.0400 | LR: 1.00e-06
|
| 1020 |
+
[2026-04-25 18:46:44] Epoch 1 | Step 9560 | Loss: 1.0400 | LR: 1.00e-06
|
| 1021 |
+
[2026-04-25 18:46:46] Epoch 1 | Step 9570 | Loss: 1.0401 | LR: 1.00e-06
|
| 1022 |
+
[2026-04-25 18:46:48] Epoch 1 | Step 9580 | Loss: 1.0402 | LR: 1.00e-06
|
| 1023 |
+
[2026-04-25 18:46:51] Epoch 1 | Step 9590 | Loss: 1.0401 | LR: 1.00e-06
|
| 1024 |
+
[2026-04-25 18:46:54] Epoch 1 | Step 9600 | Loss: 1.0400 | LR: 1.00e-06
|
| 1025 |
+
[2026-04-25 18:46:57] Epoch 1 | Step 9610 | Loss: 1.0400 | LR: 1.00e-06
|
| 1026 |
+
[2026-04-25 18:46:59] Epoch 1 | Step 9620 | Loss: 1.0400 | LR: 1.00e-06
|
| 1027 |
+
[2026-04-25 18:47:02] Epoch 1 | Step 9630 | Loss: 1.0402 | LR: 1.00e-06
|
| 1028 |
+
[2026-04-25 18:47:05] Epoch 1 | Step 9640 | Loss: 1.0401 | LR: 1.00e-06
|
| 1029 |
+
[2026-04-25 18:47:08] Epoch 1 | Step 9650 | Loss: 1.0401 | LR: 1.00e-06
|
| 1030 |
+
[2026-04-25 18:47:10] Epoch 1 | Step 9660 | Loss: 1.0401 | LR: 1.00e-06
|
| 1031 |
+
[2026-04-25 18:47:13] Epoch 1 | Step 9670 | Loss: 1.0401 | LR: 1.00e-06
|
| 1032 |
+
[2026-04-25 18:47:15] Epoch 1 | Step 9680 | Loss: 1.0401 | LR: 1.00e-06
|
| 1033 |
+
[2026-04-25 18:47:17] Epoch 1 | Step 9690 | Loss: 1.0400 | LR: 1.00e-06
|
| 1034 |
+
[2026-04-25 18:47:20] Epoch 1 | Step 9700 | Loss: 1.0401 | LR: 1.00e-06
|
| 1035 |
+
[2026-04-25 18:47:23] Epoch 1 | Step 9710 | Loss: 1.0400 | LR: 1.00e-06
|
| 1036 |
+
[2026-04-25 18:47:25] Epoch 1 | Step 9720 | Loss: 1.0400 | LR: 1.00e-06
|
| 1037 |
+
[2026-04-25 18:47:28] Epoch 1 | Step 9730 | Loss: 1.0400 | LR: 1.00e-06
|
| 1038 |
+
[2026-04-25 18:47:30] Epoch 1 | Step 9740 | Loss: 1.0399 | LR: 1.00e-06
|
| 1039 |
+
[2026-04-25 18:47:32] Epoch 1 | Step 9750 | Loss: 1.0399 | LR: 1.00e-06
|
| 1040 |
+
[2026-04-25 18:47:35] Epoch 1 | Step 9760 | Loss: 1.0399 | LR: 1.00e-06
|
| 1041 |
+
[2026-04-25 18:47:37] Epoch 1 | Step 9770 | Loss: 1.0399 | LR: 1.00e-06
|
| 1042 |
+
[2026-04-25 18:47:40] Epoch 1 | Step 9780 | Loss: 1.0398 | LR: 1.00e-06
|
| 1043 |
+
[2026-04-25 18:47:42] Epoch 1 | Step 9790 | Loss: 1.0398 | LR: 1.00e-06
|
| 1044 |
+
[2026-04-25 18:47:45] Epoch 1 | Step 9800 | Loss: 1.0398 | LR: 1.00e-06
|
| 1045 |
+
[2026-04-25 18:47:48] Epoch 1 | Step 9810 | Loss: 1.0397 | LR: 1.00e-06
|
| 1046 |
+
[2026-04-25 18:47:50] Epoch 1 | Step 9820 | Loss: 1.0396 | LR: 1.00e-06
|
| 1047 |
+
[2026-04-25 18:47:53] Epoch 1 | Step 9830 | Loss: 1.0396 | LR: 1.00e-06
|
| 1048 |
+
[2026-04-25 18:47:56] Epoch 1 | Step 9840 | Loss: 1.0398 | LR: 1.00e-06
|
| 1049 |
+
[2026-04-25 18:47:58] Epoch 1 | Step 9850 | Loss: 1.0397 | LR: 1.00e-06
|
| 1050 |
+
[2026-04-25 18:48:01] Epoch 1 | Step 9860 | Loss: 1.0396 | LR: 1.00e-06
|
| 1051 |
+
[2026-04-25 18:48:03] Epoch 1 | Step 9870 | Loss: 1.0397 | LR: 1.00e-06
|
| 1052 |
+
[2026-04-25 18:48:06] Epoch 1 | Step 9880 | Loss: 1.0397 | LR: 1.00e-06
|
| 1053 |
+
[2026-04-25 18:48:08] Epoch 1 completed in 2512.48s | Loss: 1.0397
|
| 1054 |
+
[2026-04-25 18:48:08]
|
| 1055 |
+
Training completed!
|
| 1056 |
+
[2026-04-25 18:48:10] Final model: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5/model_final.pt
|
lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_180609-3z5g26qd/files/requirements.txt
ADDED
|
@@ -0,0 +1,245 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
setuptools==78.1.1
|
| 2 |
+
wheel==0.45.1
|
| 3 |
+
pip==25.2
|
| 4 |
+
webencodings==0.5.1
|
| 5 |
+
triton==3.2.0
|
| 6 |
+
pytz==2025.2
|
| 7 |
+
pydub==0.25.1
|
| 8 |
+
pure_eval==0.2.3
|
| 9 |
+
ptyprocess==0.7.0
|
| 10 |
+
nvidia-ml-py==13.590.48
|
| 11 |
+
nvidia-cusparselt-cu12==0.6.2
|
| 12 |
+
mpmath==1.3.0
|
| 13 |
+
ipython-genutils==0.2.0
|
| 14 |
+
fastjsonschema==2.21.2
|
| 15 |
+
brotli==1.2.0
|
| 16 |
+
antlr4-python3-runtime==4.9.3
|
| 17 |
+
xxhash==3.6.0
|
| 18 |
+
widgetsnbextension==4.0.14
|
| 19 |
+
websocket-client==1.9.0
|
| 20 |
+
webcolors==24.11.1
|
| 21 |
+
wcwidth==0.2.14
|
| 22 |
+
urllib3==2.5.0
|
| 23 |
+
uri-template==1.3.0
|
| 24 |
+
tzdata==2025.2
|
| 25 |
+
typing_extensions==4.15.0
|
| 26 |
+
types-python-dateutil==2.9.0.20251008
|
| 27 |
+
traitlets==5.14.3
|
| 28 |
+
tqdm==4.67.1
|
| 29 |
+
tornado==6.5.2
|
| 30 |
+
tomlkit==0.13.3
|
| 31 |
+
tinycss2==1.4.0
|
| 32 |
+
tabulate==0.9.0
|
| 33 |
+
sympy==1.13.1
|
| 34 |
+
soupsieve==2.8
|
| 35 |
+
sniffio==1.3.1
|
| 36 |
+
smmap==5.0.2
|
| 37 |
+
six==1.17.0
|
| 38 |
+
shellingham==1.5.4
|
| 39 |
+
Send2Trash==1.8.3
|
| 40 |
+
semantic-version==2.10.0
|
| 41 |
+
safetensors==0.6.2
|
| 42 |
+
rpds-py==0.27.1
|
| 43 |
+
rfc3986-validator==0.1.1
|
| 44 |
+
regex==2025.9.18
|
| 45 |
+
pyzmq==27.1.0
|
| 46 |
+
PyYAML==6.0.3
|
| 47 |
+
python-multipart==0.0.22
|
| 48 |
+
python-json-logger==4.0.0
|
| 49 |
+
python-dotenv==1.2.1
|
| 50 |
+
pyparsing==3.2.5
|
| 51 |
+
PyJWT==2.8.0
|
| 52 |
+
Pygments==2.19.2
|
| 53 |
+
pycparser==2.23
|
| 54 |
+
pyarrow==22.0.0
|
| 55 |
+
psutil==7.1.0
|
| 56 |
+
protobuf==6.33.4
|
| 57 |
+
propcache==0.4.1
|
| 58 |
+
prometheus_client==0.23.1
|
| 59 |
+
portalocker==3.2.0
|
| 60 |
+
platformdirs==4.5.0
|
| 61 |
+
pillow==11.3.0
|
| 62 |
+
pexpect==4.9.0
|
| 63 |
+
pathspec==1.0.4
|
| 64 |
+
parso==0.8.5
|
| 65 |
+
pandocfilters==1.5.1
|
| 66 |
+
packaging==25.0
|
| 67 |
+
orjson==3.11.6
|
| 68 |
+
opt_einsum==3.4.0
|
| 69 |
+
nvidia-nvtx-cu12==12.4.127
|
| 70 |
+
nvidia-nvjitlink-cu12==12.4.127
|
| 71 |
+
nvidia-nccl-cu12==2.21.5
|
| 72 |
+
nvidia-curand-cu12==10.3.5.147
|
| 73 |
+
nvidia-cufile-cu12==1.13.1.3
|
| 74 |
+
nvidia-cufft-cu12==11.2.1.3
|
| 75 |
+
nvidia-cuda-runtime-cu12==12.4.127
|
| 76 |
+
nvidia-cuda-nvrtc-cu12==12.4.127
|
| 77 |
+
nvidia-cuda-cupti-cu12==12.4.127
|
| 78 |
+
nvidia-cublas-cu12==12.4.5.8
|
| 79 |
+
numpy==2.3.3
|
| 80 |
+
ninja==1.13.0
|
| 81 |
+
networkx==3.5
|
| 82 |
+
nest-asyncio==1.6.0
|
| 83 |
+
narwhals==2.15.0
|
| 84 |
+
mypy_extensions==1.1.0
|
| 85 |
+
multidict==6.7.0
|
| 86 |
+
mistune==3.1.4
|
| 87 |
+
mdurl==0.1.2
|
| 88 |
+
MarkupSafe==3.0.3
|
| 89 |
+
lxml==6.0.2
|
| 90 |
+
librt==0.8.0
|
| 91 |
+
lark==1.3.0
|
| 92 |
+
kiwisolver==1.4.9
|
| 93 |
+
jupyterlab_widgets==3.0.15
|
| 94 |
+
jupyterlab_pygments==0.3.0
|
| 95 |
+
jsonpointer==3.0.0
|
| 96 |
+
json5==0.12.1
|
| 97 |
+
itsdangerous==2.2.0
|
| 98 |
+
idna==3.10
|
| 99 |
+
hf-xet==1.1.10
|
| 100 |
+
h11==0.16.0
|
| 101 |
+
groovy==0.1.2
|
| 102 |
+
fsspec==2025.9.0
|
| 103 |
+
frozenlist==1.8.0
|
| 104 |
+
fqdn==1.5.1
|
| 105 |
+
fonttools==4.60.1
|
| 106 |
+
filelock==3.19.1
|
| 107 |
+
ffmpy==1.0.0
|
| 108 |
+
executing==2.2.1
|
| 109 |
+
einops==0.8.1
|
| 110 |
+
dill==0.4.0
|
| 111 |
+
defusedxml==0.7.1
|
| 112 |
+
decorator==5.2.1
|
| 113 |
+
debugpy==1.8.17
|
| 114 |
+
dacite==1.9.2
|
| 115 |
+
cycler==0.12.1
|
| 116 |
+
comm==0.2.3
|
| 117 |
+
colorama==0.4.6
|
| 118 |
+
click==8.3.1
|
| 119 |
+
charset-normalizer==3.4.3
|
| 120 |
+
certifi==2025.10.5
|
| 121 |
+
bleach==6.2.0
|
| 122 |
+
babel==2.17.0
|
| 123 |
+
attrs==25.4.0
|
| 124 |
+
async-lru==2.0.5
|
| 125 |
+
asttokens==3.0.0
|
| 126 |
+
annotated-types==0.7.0
|
| 127 |
+
annotated-doc==0.0.4
|
| 128 |
+
aiohappyeyeballs==2.6.1
|
| 129 |
+
aiofiles==24.1.0
|
| 130 |
+
yarl==1.22.0
|
| 131 |
+
uvicorn==0.40.0
|
| 132 |
+
typing-inspection==0.4.2
|
| 133 |
+
terminado==0.18.1
|
| 134 |
+
stack-data==0.6.3
|
| 135 |
+
sentry-sdk==2.50.0
|
| 136 |
+
scipy==1.17.0
|
| 137 |
+
sacrebleu==2.6.0
|
| 138 |
+
rfc3987-syntax==1.1.0
|
| 139 |
+
rfc3339-validator==0.1.4
|
| 140 |
+
requests==2.32.5
|
| 141 |
+
reportlab==4.4.9
|
| 142 |
+
referencing==0.36.2
|
| 143 |
+
python-dateutil==2.9.0.post0
|
| 144 |
+
pydantic_core==2.41.5
|
| 145 |
+
prompt_toolkit==3.0.52
|
| 146 |
+
plotly==6.5.2
|
| 147 |
+
pathlib2==2.3.7.post1
|
| 148 |
+
orderedmultidict==1.0.2
|
| 149 |
+
optree==0.17.0
|
| 150 |
+
omegaconf==2.3.0
|
| 151 |
+
nvidia-cusparse-cu12==12.3.1.170
|
| 152 |
+
nvidia-cudnn-cu12==9.1.0.70
|
| 153 |
+
mypy==1.19.1
|
| 154 |
+
multiprocess==0.70.16
|
| 155 |
+
matplotlib-inline==0.1.7
|
| 156 |
+
markdown-it-py==4.0.0
|
| 157 |
+
jupyter_core==5.8.1
|
| 158 |
+
Jinja2==3.1.6
|
| 159 |
+
jedi==0.19.2
|
| 160 |
+
ipython_pygments_lexers==1.1.1
|
| 161 |
+
httpcore==1.0.9
|
| 162 |
+
gitdb==4.0.12
|
| 163 |
+
ftfy==6.3.1
|
| 164 |
+
contourpy==1.3.3
|
| 165 |
+
cffi==2.0.0
|
| 166 |
+
beautifulsoup4==4.14.2
|
| 167 |
+
anyio==4.11.0
|
| 168 |
+
aiosignal==1.4.0
|
| 169 |
+
starlette==0.50.0
|
| 170 |
+
rich==14.2.0
|
| 171 |
+
pydantic==2.12.5
|
| 172 |
+
pandas==2.3.3
|
| 173 |
+
nvidia-cusolver-cu12==11.6.1.9
|
| 174 |
+
matplotlib==3.10.7
|
| 175 |
+
jupyter_server_terminals==0.5.3
|
| 176 |
+
jupyter_client==8.6.3
|
| 177 |
+
jsonschema-specifications==2025.9.1
|
| 178 |
+
ipython==9.6.0
|
| 179 |
+
hydra-core==1.3.2
|
| 180 |
+
huggingface-hub==0.35.3
|
| 181 |
+
httpx==0.28.1
|
| 182 |
+
GitPython==3.1.46
|
| 183 |
+
furl==2.1.4
|
| 184 |
+
cryptography==46.0.4
|
| 185 |
+
arrow==1.3.0
|
| 186 |
+
argon2-cffi-bindings==25.1.0
|
| 187 |
+
aiohttp==3.13.1
|
| 188 |
+
wandb==0.24.0
|
| 189 |
+
typer==0.21.1
|
| 190 |
+
torch==2.6.0
|
| 191 |
+
tokenizers==0.22.1
|
| 192 |
+
seaborn==0.13.2
|
| 193 |
+
safehttpx==0.1.7
|
| 194 |
+
jsonschema==4.25.1
|
| 195 |
+
joypy==0.2.6
|
| 196 |
+
isoduration==20.11.0
|
| 197 |
+
ipywidgets==8.1.7
|
| 198 |
+
ipykernel==6.30.1
|
| 199 |
+
gradio_client==2.0.3
|
| 200 |
+
fastapi==0.128.0
|
| 201 |
+
Authlib==1.6.6
|
| 202 |
+
argon2-cffi==25.1.0
|
| 203 |
+
transformers==4.57.6
|
| 204 |
+
nbformat==5.10.4
|
| 205 |
+
mlstm_kernels==2.0.2
|
| 206 |
+
jupyter-console==6.6.3
|
| 207 |
+
gradio==6.5.1
|
| 208 |
+
datasets==4.3.0
|
| 209 |
+
clearml==1.16.4
|
| 210 |
+
accelerate==1.10.1
|
| 211 |
+
xlstm==2.0.4
|
| 212 |
+
nbclient==0.10.2
|
| 213 |
+
jupyter-events==0.12.0
|
| 214 |
+
trackio==0.15.0
|
| 215 |
+
nbconvert==7.16.6
|
| 216 |
+
jupyter_server==2.17.0
|
| 217 |
+
notebook_shim==0.2.4
|
| 218 |
+
jupyterlab_server==2.27.3
|
| 219 |
+
jupyter-lsp==2.3.0
|
| 220 |
+
nbclassic==1.3.3
|
| 221 |
+
jupyterlab==4.4.9
|
| 222 |
+
notebook==7.4.7
|
| 223 |
+
jupyter_contrib_core==0.4.2
|
| 224 |
+
jupyter==1.1.1
|
| 225 |
+
jupyter_nbextensions_configurator==0.6.4
|
| 226 |
+
causal-conv1d==1.5.0.post8
|
| 227 |
+
flash_attn==2.7.4.post1
|
| 228 |
+
mamba-ssm==2.2.4
|
| 229 |
+
hnet==0.0.1
|
| 230 |
+
autocommand==2.2.2
|
| 231 |
+
backports.tarfile==1.2.0
|
| 232 |
+
importlib_metadata==8.0.0
|
| 233 |
+
inflect==7.3.1
|
| 234 |
+
jaraco.collections==5.1.0
|
| 235 |
+
jaraco.context==5.3.0
|
| 236 |
+
jaraco.functools==4.0.1
|
| 237 |
+
jaraco.text==3.12.1
|
| 238 |
+
more-itertools==10.3.0
|
| 239 |
+
packaging==24.2
|
| 240 |
+
platformdirs==4.2.2
|
| 241 |
+
tomli==2.0.1
|
| 242 |
+
typeguard==4.3.0
|
| 243 |
+
typing_extensions==4.12.2
|
| 244 |
+
wheel==0.45.1
|
| 245 |
+
zipp==3.19.2
|
lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_180609-3z5g26qd/files/wandb-metadata.json
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"os": "Linux-5.4.0-176-generic-x86_64-with-glibc2.35",
|
| 3 |
+
"python": "CPython 3.12.0",
|
| 4 |
+
"startedAt": "2026-04-25T18:06:09.709909Z",
|
| 5 |
+
"args": [
|
| 6 |
+
"tracking=wandb",
|
| 7 |
+
"tracking.project=code-completion_lr-sweep",
|
| 8 |
+
"tracking.run_name=pythia_1b_lr_1e-5",
|
| 9 |
+
"training.lr=1e-5",
|
| 10 |
+
"paths.output_dir=/workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5",
|
| 11 |
+
"model=pythia_1b",
|
| 12 |
+
"data.path=/workspace/byte-llms-code/code_completion_exp/datasets/data_V4_full"
|
| 13 |
+
],
|
| 14 |
+
"program": "/workspace/byte-llms-code/code_completion_exp/train_pythia/train.py",
|
| 15 |
+
"codePath": "code_completion_exp/train_pythia/train.py",
|
| 16 |
+
"codePathLocal": "train.py",
|
| 17 |
+
"git": {
|
| 18 |
+
"remote": "https://github.com/naryst/byte-llms-code.git",
|
| 19 |
+
"commit": "f111e13281aa0dc58e24302edab5b0d5c2024586"
|
| 20 |
+
},
|
| 21 |
+
"email": "nikita@local.ru",
|
| 22 |
+
"root": "/workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5",
|
| 23 |
+
"host": "7504e518d24a",
|
| 24 |
+
"executable": "/venv/bytellm/bin/python",
|
| 25 |
+
"cpu_count": 64,
|
| 26 |
+
"cpu_count_logical": 128,
|
| 27 |
+
"gpu": "NVIDIA H100 80GB HBM3",
|
| 28 |
+
"gpu_count": 4,
|
| 29 |
+
"disk": {
|
| 30 |
+
"/": {
|
| 31 |
+
"total": "265214230528",
|
| 32 |
+
"used": "91343659008"
|
| 33 |
+
}
|
| 34 |
+
},
|
| 35 |
+
"memory": {
|
| 36 |
+
"total": "1081679683584"
|
| 37 |
+
},
|
| 38 |
+
"gpu_nvidia": [
|
| 39 |
+
{
|
| 40 |
+
"name": "NVIDIA H100 80GB HBM3",
|
| 41 |
+
"memoryTotal": "85520809984",
|
| 42 |
+
"cudaCores": 16896,
|
| 43 |
+
"architecture": "Hopper",
|
| 44 |
+
"uuid": "GPU-b60cdcab-2033-2009-41de-be646c953a20"
|
| 45 |
+
},
|
| 46 |
+
{
|
| 47 |
+
"name": "NVIDIA H100 80GB HBM3",
|
| 48 |
+
"memoryTotal": "85520809984",
|
| 49 |
+
"cudaCores": 16896,
|
| 50 |
+
"architecture": "Hopper",
|
| 51 |
+
"uuid": "GPU-9982b420-4520-4238-c378-ec5a46015474"
|
| 52 |
+
},
|
| 53 |
+
{
|
| 54 |
+
"name": "NVIDIA H100 80GB HBM3",
|
| 55 |
+
"memoryTotal": "85520809984",
|
| 56 |
+
"cudaCores": 16896,
|
| 57 |
+
"architecture": "Hopper",
|
| 58 |
+
"uuid": "GPU-e26ebaac-aaa6-3eed-17ab-a3dce303a76f"
|
| 59 |
+
},
|
| 60 |
+
{
|
| 61 |
+
"name": "NVIDIA H100 80GB HBM3",
|
| 62 |
+
"memoryTotal": "85520809984",
|
| 63 |
+
"cudaCores": 16896,
|
| 64 |
+
"architecture": "Hopper",
|
| 65 |
+
"uuid": "GPU-9dfc6dba-0be6-4a10-1027-336cc0e65134"
|
| 66 |
+
}
|
| 67 |
+
],
|
| 68 |
+
"cudaVersion": "12.2",
|
| 69 |
+
"writerId": "dmklf1i0rmj0ula04lyo02ubhqbt3jfa"
|
| 70 |
+
}
|
lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_180609-3z5g26qd/logs/debug-core.log
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{"time":"2026-04-25T18:06:09.787519787Z","level":"INFO","msg":"main: starting server","port-filename":"/tmp/tmpf5x00zj7/port-65409.txt","pid":65409,"log-level":0,"disable-analytics":false,"shutdown-on-parent-exit":false,"enable-dcgm-profiling":false}
|
| 2 |
+
{"time":"2026-04-25T18:06:09.787923233Z","level":"INFO","msg":"server: will exit if parent process dies","ppid":65409}
|
| 3 |
+
{"time":"2026-04-25T18:06:09.787908874Z","level":"INFO","msg":"server: accepting connections","addr":{"Name":"/tmp/wandb-65409-65484-4228298840/socket","Net":"unix"}}
|
| 4 |
+
{"time":"2026-04-25T18:06:09.975538523Z","level":"INFO","msg":"connection: ManageConnectionData: new connection created","id":"1(@)"}
|
| 5 |
+
{"time":"2026-04-25T18:06:09.998738408Z","level":"INFO","msg":"handleInformInit: received","streamId":"3z5g26qd","id":"1(@)"}
|
| 6 |
+
{"time":"2026-04-25T18:06:10.475016454Z","level":"INFO","msg":"handleInformInit: stream started","streamId":"3z5g26qd","id":"1(@)"}
|
| 7 |
+
{"time":"2026-04-25T18:48:11.627669464Z","level":"INFO","msg":"handleInformFinish: finish message received","streamId":"3z5g26qd","id":"1(@)"}
|
| 8 |
+
{"time":"2026-04-25T18:48:11.628064511Z","level":"INFO","msg":"handleInformFinish: stream closed","streamId":"3z5g26qd","id":"1(@)"}
|
| 9 |
+
{"time":"2026-04-25T18:48:11.643176343Z","level":"INFO","msg":"handleInformTeardown: server teardown initiated","id":"1(@)"}
|
| 10 |
+
{"time":"2026-04-25T18:48:11.643213451Z","level":"INFO","msg":"handleInformTeardown: server shutdown complete","id":"1(@)"}
|
| 11 |
+
{"time":"2026-04-25T18:48:11.643222547Z","level":"INFO","msg":"server is shutting down"}
|
| 12 |
+
{"time":"2026-04-25T18:48:11.643222003Z","level":"INFO","msg":"connection: closing","id":"1(@)"}
|
| 13 |
+
{"time":"2026-04-25T18:48:11.643253636Z","level":"INFO","msg":"connection: closed successfully","id":"1(@)"}
|
| 14 |
+
{"time":"2026-04-25T18:48:11.643257408Z","level":"INFO","msg":"connection: ManageConnectionData: connection closed","id":"1(@)"}
|
| 15 |
+
{"time":"2026-04-25T18:48:11.643321227Z","level":"INFO","msg":"server: listener closed","addr":{"Name":"/tmp/wandb-65409-65484-4228298840/socket","Net":"unix"}}
|
| 16 |
+
{"time":"2026-04-25T18:48:11.643353706Z","level":"INFO","msg":"server is closed"}
|
lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_180609-3z5g26qd/logs/debug-internal.log
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{"time":"2026-04-25T18:06:09.99885055Z","level":"INFO","msg":"stream: starting","core version":"0.24.0"}
|
| 2 |
+
{"time":"2026-04-25T18:06:10.474895883Z","level":"INFO","msg":"stream: created new stream","id":"3z5g26qd"}
|
| 3 |
+
{"time":"2026-04-25T18:06:10.474937729Z","level":"INFO","msg":"handler: started","stream_id":"3z5g26qd"}
|
| 4 |
+
{"time":"2026-04-25T18:06:10.475009823Z","level":"INFO","msg":"stream: started","id":"3z5g26qd"}
|
| 5 |
+
{"time":"2026-04-25T18:06:10.475020366Z","level":"INFO","msg":"writer: started","stream_id":"3z5g26qd"}
|
| 6 |
+
{"time":"2026-04-25T18:06:10.475023072Z","level":"INFO","msg":"sender: started","stream_id":"3z5g26qd"}
|
| 7 |
+
{"time":"2026-04-25T18:06:10.687143958Z","level":"ERROR","msg":"git repo not found","error":"repository does not exist"}
|
| 8 |
+
{"time":"2026-04-25T18:48:11.476724146Z","level":"INFO","msg":"fileTransfer: Close: file transfer manager closed"}
|
| 9 |
+
{"time":"2026-04-25T18:48:11.625156726Z","level":"INFO","msg":"handler: operation stats","stats":{}}
|
| 10 |
+
{"time":"2026-04-25T18:48:11.627697754Z","level":"INFO","msg":"stream: closing","id":"3z5g26qd"}
|
| 11 |
+
{"time":"2026-04-25T18:48:11.627706958Z","level":"INFO","msg":"handler: closed","stream_id":"3z5g26qd"}
|
| 12 |
+
{"time":"2026-04-25T18:48:11.62778746Z","level":"INFO","msg":"sender: closed","stream_id":"3z5g26qd"}
|
| 13 |
+
{"time":"2026-04-25T18:48:11.627798934Z","level":"INFO","msg":"stream: closed","id":"3z5g26qd"}
|
lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_180609-3z5g26qd/logs/debug.log
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
2026-04-25 18:06:09,711 INFO MainThread:65409 [wandb_setup.py:_flush():81] Current SDK version is 0.24.0
|
| 2 |
+
2026-04-25 18:06:09,711 INFO MainThread:65409 [wandb_setup.py:_flush():81] Configure stats pid to 65409
|
| 3 |
+
2026-04-25 18:06:09,711 INFO MainThread:65409 [wandb_setup.py:_flush():81] Loading settings from environment variables
|
| 4 |
+
2026-04-25 18:06:09,711 INFO MainThread:65409 [wandb_init.py:setup_run_log_directory():717] Logging user logs to /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_180609-3z5g26qd/logs/debug.log
|
| 5 |
+
2026-04-25 18:06:09,711 INFO MainThread:65409 [wandb_init.py:setup_run_log_directory():718] Logging internal logs to /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5/wandb/run-20260425_180609-3z5g26qd/logs/debug-internal.log
|
| 6 |
+
2026-04-25 18:06:09,711 INFO MainThread:65409 [wandb_init.py:init():844] calling init triggers
|
| 7 |
+
2026-04-25 18:06:09,711 INFO MainThread:65409 [wandb_init.py:init():849] wandb.init called with sweep_config: {}
|
| 8 |
+
config: {'model': {'name': 'EleutherAI/pythia-1b', 'checkpoint_path': None, 'from_scratch': False}, 'training': {'epochs': 1, 'batch_size': 4, 'eval_batch_size': 12, 'gradient_accumulation_steps': 4, 'lr': 1e-05, 'weight_decay': 0.1, 'betas': [0.9, 0.95], 'eps': 1e-08, 'lr_scheduler': 'wsd', 'warmup_ratio': 0.1, 'decay_ratio': 0.2, 'warmup_steps': 100, 'min_lr_ratio': 0.1, 'max_grad_norm': 1.0, 'use_amp': True, 'resume': False, 'resume_checkpoint': None}, 'data': {'path': '/workspace/byte-llms-code/code_completion_exp/datasets/data_V4_full', 'max_context_len': 4096, 'max_target_len': 256, 'num_workers': 4, 'pin_memory': True, 'max_train_samples': None, 'max_val_samples': 2000}, 'logging': {'log_interval': 10, 'save_interval': 0, 'eval_interval': 2000, 'save_every_epoch': False}, 'tracking': {'enabled': True, 'backend': 'wandb', 'project': 'code-completion_lr-sweep', 'run_name': 'pythia_1b_lr_1e-5', 'entity': None, 'base_url': 'https://wandb.platun0v.ru', 'local_dir': '/workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5'}, 'paths': {'output_dir': '/workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_1e-5'}, 'seed': 42, 'device': 'cuda', '_wandb': {'code_path': 'code/code_completion_exp/train_pythia/train.py'}}
|
| 9 |
+
2026-04-25 18:06:09,711 INFO MainThread:65409 [wandb_init.py:init():892] starting backend
|
| 10 |
+
2026-04-25 18:06:09,975 INFO MainThread:65409 [wandb_init.py:init():895] sending inform_init request
|
| 11 |
+
2026-04-25 18:06:09,997 INFO MainThread:65409 [wandb_init.py:init():903] backend started and connected
|
| 12 |
+
2026-04-25 18:06:10,000 INFO MainThread:65409 [wandb_init.py:init():973] updated telemetry
|
| 13 |
+
2026-04-25 18:06:10,016 INFO MainThread:65409 [wandb_init.py:init():997] communicating run to backend with 90.0 second timeout
|
| 14 |
+
2026-04-25 18:06:10,686 INFO MainThread:65409 [wandb_init.py:init():1044] starting run threads in backend
|
| 15 |
+
2026-04-25 18:06:10,846 INFO MainThread:65409 [wandb_run.py:_console_start():2529] atexit reg
|
| 16 |
+
2026-04-25 18:06:10,846 INFO MainThread:65409 [wandb_run.py:_redirect():2377] redirect: wrap_raw
|
| 17 |
+
2026-04-25 18:06:10,846 INFO MainThread:65409 [wandb_run.py:_redirect():2446] Wrapping output streams.
|
| 18 |
+
2026-04-25 18:06:10,846 INFO MainThread:65409 [wandb_run.py:_redirect():2469] Redirects installed.
|
| 19 |
+
2026-04-25 18:06:10,849 INFO MainThread:65409 [wandb_init.py:init():1084] run started, returning control to user process
|
| 20 |
+
2026-04-25 18:48:10,640 INFO MainThread:65409 [wandb_run.py:_finish():2295] finishing run nikita/code-completion_lr-sweep/3z5g26qd
|
| 21 |
+
2026-04-25 18:48:10,640 INFO MainThread:65409 [wandb_run.py:_atexit_cleanup():2494] got exitcode: 0
|
| 22 |
+
2026-04-25 18:48:10,640 INFO MainThread:65409 [wandb_run.py:_restore():2476] restore
|
| 23 |
+
2026-04-25 18:48:10,640 INFO MainThread:65409 [wandb_run.py:_restore():2482] restore done
|
| 24 |
+
2026-04-25 18:48:11,627 INFO MainThread:65409 [wandb_run.py:_footer_sync_info():3870] logging synced files
|
lr_sweep/pythia_1b_lr_2e-5/train.log
ADDED
|
@@ -0,0 +1,1259 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[2026-04-25 17:54:38,152][accelerate.utils.other][WARNING] - Detected kernel version 5.4.0, which is below the recommended minimum of 5.5.0; this can cause the process to hang. It is recommended to upgrade the kernel to the minimum version or higher.
|
| 2 |
+
[2026-04-25 17:54:38] CUDA_VISIBLE_DEVICES: 2,3
|
| 3 |
+
[2026-04-25 17:54:38] Number of processes: 2
|
| 4 |
+
[2026-04-25 17:54:38] Process index: 0
|
| 5 |
+
[2026-04-25 17:54:38] Mixed precision: bf16
|
| 6 |
+
[2026-04-25 17:54:38] ============================================================
|
| 7 |
+
[2026-04-25 17:54:38] Pythia Training Pipeline (Hydra + Trackio + Accelerate)
|
| 8 |
+
[2026-04-25 17:54:38] ============================================================
|
| 9 |
+
[2026-04-25 17:54:38] Config:
|
| 10 |
+
model:
|
| 11 |
+
name: EleutherAI/pythia-1b
|
| 12 |
+
checkpoint_path: null
|
| 13 |
+
from_scratch: false
|
| 14 |
+
training:
|
| 15 |
+
epochs: 1
|
| 16 |
+
batch_size: 4
|
| 17 |
+
eval_batch_size: 12
|
| 18 |
+
gradient_accumulation_steps: 4
|
| 19 |
+
lr: 2.0e-05
|
| 20 |
+
weight_decay: 0.1
|
| 21 |
+
betas:
|
| 22 |
+
- 0.9
|
| 23 |
+
- 0.95
|
| 24 |
+
eps: 1.0e-08
|
| 25 |
+
lr_scheduler: wsd
|
| 26 |
+
warmup_ratio: 0.1
|
| 27 |
+
decay_ratio: 0.2
|
| 28 |
+
warmup_steps: 100
|
| 29 |
+
min_lr_ratio: 0.1
|
| 30 |
+
max_grad_norm: 1.0
|
| 31 |
+
use_amp: true
|
| 32 |
+
resume: false
|
| 33 |
+
resume_checkpoint: null
|
| 34 |
+
data:
|
| 35 |
+
path: /workspace/byte-llms-code/code_completion_exp/datasets/data_V4_full
|
| 36 |
+
max_context_len: 4096
|
| 37 |
+
max_target_len: 256
|
| 38 |
+
num_workers: 4
|
| 39 |
+
pin_memory: true
|
| 40 |
+
max_train_samples: 20000
|
| 41 |
+
max_val_samples: 2000
|
| 42 |
+
logging:
|
| 43 |
+
log_interval: 10
|
| 44 |
+
save_interval: 3000
|
| 45 |
+
eval_interval: 1000
|
| 46 |
+
save_every_epoch: true
|
| 47 |
+
tracking:
|
| 48 |
+
enabled: true
|
| 49 |
+
backend: wandb
|
| 50 |
+
project: code-completion_lr-sweep
|
| 51 |
+
run_name: pythia_1b_lr_2e-5
|
| 52 |
+
entity: null
|
| 53 |
+
base_url: https://wandb.platun0v.ru
|
| 54 |
+
local_dir: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_2e-5
|
| 55 |
+
paths:
|
| 56 |
+
output_dir: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_2e-5
|
| 57 |
+
seed: 42
|
| 58 |
+
device: cuda
|
| 59 |
+
|
| 60 |
+
[2026-04-25 17:54:40] Initializing tokenizer...
|
| 61 |
+
[2026-04-25 17:54:40] Loading model...
|
| 62 |
+
[2026-04-25 17:54:43] Loaded pretrained: EleutherAI/pythia-1b
|
| 63 |
+
[2026-04-25 17:54:43] Total params: 1,011,781,632
|
| 64 |
+
[2026-04-25 17:54:43] Trainable params: 1,011,781,632
|
| 65 |
+
[2026-04-25 17:54:43] Creating dataloaders...
|
| 66 |
+
[2026-04-25 17:54:43] Train dataset size: 20000
|
| 67 |
+
[2026-04-25 17:54:43] Train batches per epoch (before DDP split): 5000
|
| 68 |
+
[2026-04-25 17:54:43] Validation dataset size: 2000
|
| 69 |
+
[2026-04-25 17:54:43] Validation batches: 167
|
| 70 |
+
[2026-04-25 17:54:43] Creating optimizer...
|
| 71 |
+
[2026-04-25 17:54:43] Total steps: 625, Steps per epoch: 2500
|
| 72 |
+
[2026-04-25 17:54:43] Preparing model, optimizer, and dataloaders with Accelerate...
|
| 73 |
+
[2026-04-25 17:54:45] Train batches per epoch (after DDP split): 2500
|
| 74 |
+
[2026-04-25 17:54:45] Starting training...
|
| 75 |
+
[2026-04-25 17:54:45]
|
| 76 |
+
============================================================
|
| 77 |
+
[2026-04-25 17:54:45] EPOCH 1/1
|
| 78 |
+
[2026-04-25 17:54:45] ============================================================
|
| 79 |
+
[2026-04-25 17:54:48] Epoch 1 | Step 10 | Loss: 2.2990 | LR: 7.81e-06
|
| 80 |
+
[2026-04-25 17:54:51] Epoch 1 | Step 20 | Loss: 1.8267 | LR: 1.36e-05
|
| 81 |
+
[2026-04-25 17:54:53] Epoch 1 | Step 30 | Loss: 1.6114 | LR: 1.94e-05
|
| 82 |
+
[2026-04-25 17:54:56] Epoch 1 | Step 40 | Loss: 1.5023 | LR: 2.00e-05
|
| 83 |
+
[2026-04-25 17:54:58] Epoch 1 | Step 50 | Loss: 1.4291 | LR: 2.00e-05
|
| 84 |
+
[2026-04-25 17:55:01] Epoch 1 | Step 60 | Loss: 1.3708 | LR: 2.00e-05
|
| 85 |
+
[2026-04-25 17:55:03] Epoch 1 | Step 70 | Loss: 1.3249 | LR: 2.00e-05
|
| 86 |
+
[2026-04-25 17:55:06] Epoch 1 | Step 80 | Loss: 1.2878 | LR: 2.00e-05
|
| 87 |
+
[2026-04-25 17:55:09] Epoch 1 | Step 90 | Loss: 1.2478 | LR: 2.00e-05
|
| 88 |
+
[2026-04-25 17:55:11] Epoch 1 | Step 100 | Loss: 1.2470 | LR: 2.00e-05
|
| 89 |
+
[2026-04-25 17:55:14] Epoch 1 | Step 110 | Loss: 1.2379 | LR: 2.00e-05
|
| 90 |
+
[2026-04-25 17:55:16] Epoch 1 | Step 120 | Loss: 1.2292 | LR: 2.00e-05
|
| 91 |
+
[2026-04-25 17:55:19] Epoch 1 | Step 130 | Loss: 1.2023 | LR: 2.00e-05
|
| 92 |
+
[2026-04-25 17:55:22] Epoch 1 | Step 140 | Loss: 1.1906 | LR: 2.00e-05
|
| 93 |
+
[2026-04-25 17:55:24] Epoch 1 | Step 150 | Loss: 1.1838 | LR: 2.00e-05
|
| 94 |
+
[2026-04-25 17:55:27] Epoch 1 | Step 160 | Loss: 1.1693 | LR: 2.00e-05
|
| 95 |
+
[2026-04-25 17:55:29] Epoch 1 | Step 170 | Loss: 1.1607 | LR: 2.00e-05
|
| 96 |
+
[2026-04-25 17:55:32] Epoch 1 | Step 180 | Loss: 1.1611 | LR: 2.00e-05
|
| 97 |
+
[2026-04-25 17:55:34] Epoch 1 | Step 190 | Loss: 1.1570 | LR: 2.00e-05
|
| 98 |
+
[2026-04-25 17:55:37] Epoch 1 | Step 200 | Loss: 1.1495 | LR: 2.00e-05
|
| 99 |
+
[2026-04-25 17:55:40] Epoch 1 | Step 210 | Loss: 1.1443 | LR: 2.00e-05
|
| 100 |
+
[2026-04-25 17:55:42] Epoch 1 | Step 220 | Loss: 1.1430 | LR: 2.00e-05
|
| 101 |
+
[2026-04-25 17:55:45] Epoch 1 | Step 230 | Loss: 1.1414 | LR: 2.00e-05
|
| 102 |
+
[2026-04-25 17:55:47] Epoch 1 | Step 240 | Loss: 1.1347 | LR: 2.00e-05
|
| 103 |
+
[2026-04-25 17:55:50] Epoch 1 | Step 250 | Loss: 1.1281 | LR: 2.00e-05
|
| 104 |
+
[2026-04-25 17:55:53] Epoch 1 | Step 260 | Loss: 1.1317 | LR: 1.89e-05
|
| 105 |
+
[2026-04-25 17:55:55] Epoch 1 | Step 270 | Loss: 1.1303 | LR: 1.58e-05
|
| 106 |
+
[2026-04-25 17:55:58] Epoch 1 | Step 280 | Loss: 1.1275 | LR: 1.16e-05
|
| 107 |
+
[2026-04-25 17:56:00] Epoch 1 | Step 290 | Loss: 1.1248 | LR: 7.17e-06
|
| 108 |
+
[2026-04-25 17:56:03] Epoch 1 | Step 300 | Loss: 1.1198 | LR: 3.72e-06
|
| 109 |
+
[2026-04-25 17:56:06] Epoch 1 | Step 310 | Loss: 1.1188 | LR: 2.07e-06
|
| 110 |
+
[2026-04-25 17:56:08] Epoch 1 | Step 320 | Loss: 1.1224 | LR: 2.00e-06
|
| 111 |
+
[2026-04-25 17:56:11] Epoch 1 | Step 330 | Loss: 1.1196 | LR: 2.00e-06
|
| 112 |
+
[2026-04-25 17:56:13] Epoch 1 | Step 340 | Loss: 1.1193 | LR: 2.00e-06
|
| 113 |
+
[2026-04-25 17:56:15] Epoch 1 | Step 350 | Loss: 1.1143 | LR: 2.00e-06
|
| 114 |
+
[2026-04-25 17:56:18] Epoch 1 | Step 360 | Loss: 1.1109 | LR: 2.00e-06
|
| 115 |
+
[2026-04-25 17:56:21] Epoch 1 | Step 370 | Loss: 1.1074 | LR: 2.00e-06
|
| 116 |
+
[2026-04-25 17:56:23] Epoch 1 | Step 380 | Loss: 1.1095 | LR: 2.00e-06
|
| 117 |
+
[2026-04-25 17:56:26] Epoch 1 | Step 390 | Loss: 1.1066 | LR: 2.00e-06
|
| 118 |
+
[2026-04-25 17:56:28] Epoch 1 | Step 400 | Loss: 1.1040 | LR: 2.00e-06
|
| 119 |
+
[2026-04-25 17:56:31] Epoch 1 | Step 410 | Loss: 1.1061 | LR: 2.00e-06
|
| 120 |
+
[2026-04-25 17:56:34] Epoch 1 | Step 420 | Loss: 1.1029 | LR: 2.00e-06
|
| 121 |
+
[2026-04-25 17:56:36] Epoch 1 | Step 430 | Loss: 1.1003 | LR: 2.00e-06
|
| 122 |
+
[2026-04-25 17:56:39] Epoch 1 | Step 440 | Loss: 1.0993 | LR: 2.00e-06
|
| 123 |
+
[2026-04-25 17:56:41] Epoch 1 | Step 450 | Loss: 1.0988 | LR: 2.00e-06
|
| 124 |
+
[2026-04-25 17:56:44] Epoch 1 | Step 460 | Loss: 1.1001 | LR: 2.00e-06
|
| 125 |
+
[2026-04-25 17:56:46] Epoch 1 | Step 470 | Loss: 1.1021 | LR: 2.00e-06
|
| 126 |
+
[2026-04-25 17:56:49] Epoch 1 | Step 480 | Loss: 1.1025 | LR: 2.00e-06
|
| 127 |
+
[2026-04-25 17:56:51] Epoch 1 | Step 490 | Loss: 1.1045 | LR: 2.00e-06
|
| 128 |
+
[2026-04-25 17:56:54] Epoch 1 | Step 500 | Loss: 1.1021 | LR: 2.00e-06
|
| 129 |
+
[2026-04-25 17:56:57] Epoch 1 | Step 510 | Loss: 1.0999 | LR: 2.00e-06
|
| 130 |
+
[2026-04-25 17:56:59] Epoch 1 | Step 520 | Loss: 1.0994 | LR: 2.00e-06
|
| 131 |
+
[2026-04-25 17:57:02] Epoch 1 | Step 530 | Loss: 1.0979 | LR: 2.00e-06
|
| 132 |
+
[2026-04-25 17:57:04] Epoch 1 | Step 540 | Loss: 1.0996 | LR: 2.00e-06
|
| 133 |
+
[2026-04-25 17:57:07] Epoch 1 | Step 550 | Loss: 1.0987 | LR: 2.00e-06
|
| 134 |
+
[2026-04-25 17:57:10] Epoch 1 | Step 560 | Loss: 1.0978 | LR: 2.00e-06
|
| 135 |
+
[2026-04-25 17:57:12] Epoch 1 | Step 570 | Loss: 1.0967 | LR: 2.00e-06
|
| 136 |
+
[2026-04-25 17:57:15] Epoch 1 | Step 580 | Loss: 1.0964 | LR: 2.00e-06
|
| 137 |
+
[2026-04-25 17:57:17] Epoch 1 | Step 590 | Loss: 1.0982 | LR: 2.00e-06
|
| 138 |
+
[2026-04-25 17:57:20] Epoch 1 | Step 600 | Loss: 1.0986 | LR: 2.00e-06
|
| 139 |
+
[2026-04-25 17:57:22] Epoch 1 | Step 610 | Loss: 1.0997 | LR: 2.00e-06
|
| 140 |
+
[2026-04-25 17:57:25] Epoch 1 | Step 620 | Loss: 1.0978 | LR: 2.00e-06
|
| 141 |
+
[2026-04-25 17:57:26] Epoch 1 completed in 161.35s | Loss: 1.0968
|
| 142 |
+
[2026-04-25 17:57:33] Checkpoint saved: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_2e-5/checkpoints/checkpoint_step_625.pt
|
| 143 |
+
[2026-04-25 17:57:39]
|
| 144 |
+
Training completed!
|
| 145 |
+
[2026-04-25 17:57:41] Final model: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_2e-5/model_final.pt
|
| 146 |
+
[2026-04-25 18:48:21,802][accelerate.utils.other][WARNING] - Detected kernel version 5.4.0, which is below the recommended minimum of 5.5.0; this can cause the process to hang. It is recommended to upgrade the kernel to the minimum version or higher.
|
| 147 |
+
[2026-04-25 18:48:21] CUDA_VISIBLE_DEVICES: 2,3
|
| 148 |
+
[2026-04-25 18:48:21] Number of processes: 2
|
| 149 |
+
[2026-04-25 18:48:21] Process index: 0
|
| 150 |
+
[2026-04-25 18:48:21] Mixed precision: bf16
|
| 151 |
+
[2026-04-25 18:48:21] ============================================================
|
| 152 |
+
[2026-04-25 18:48:21] Pythia Training Pipeline (Hydra + Trackio + Accelerate)
|
| 153 |
+
[2026-04-25 18:48:21] ============================================================
|
| 154 |
+
[2026-04-25 18:48:21] Config:
|
| 155 |
+
model:
|
| 156 |
+
name: EleutherAI/pythia-1b
|
| 157 |
+
checkpoint_path: null
|
| 158 |
+
from_scratch: false
|
| 159 |
+
training:
|
| 160 |
+
epochs: 1
|
| 161 |
+
batch_size: 4
|
| 162 |
+
eval_batch_size: 12
|
| 163 |
+
gradient_accumulation_steps: 4
|
| 164 |
+
lr: 2.0e-05
|
| 165 |
+
weight_decay: 0.1
|
| 166 |
+
betas:
|
| 167 |
+
- 0.9
|
| 168 |
+
- 0.95
|
| 169 |
+
eps: 1.0e-08
|
| 170 |
+
lr_scheduler: wsd
|
| 171 |
+
warmup_ratio: 0.1
|
| 172 |
+
decay_ratio: 0.2
|
| 173 |
+
warmup_steps: 100
|
| 174 |
+
min_lr_ratio: 0.1
|
| 175 |
+
max_grad_norm: 1.0
|
| 176 |
+
use_amp: true
|
| 177 |
+
resume: false
|
| 178 |
+
resume_checkpoint: null
|
| 179 |
+
data:
|
| 180 |
+
path: /workspace/byte-llms-code/code_completion_exp/datasets/data_V4_full
|
| 181 |
+
max_context_len: 4096
|
| 182 |
+
max_target_len: 256
|
| 183 |
+
num_workers: 4
|
| 184 |
+
pin_memory: true
|
| 185 |
+
max_train_samples: null
|
| 186 |
+
max_val_samples: 2000
|
| 187 |
+
logging:
|
| 188 |
+
log_interval: 10
|
| 189 |
+
save_interval: 0
|
| 190 |
+
eval_interval: 2000
|
| 191 |
+
save_every_epoch: false
|
| 192 |
+
tracking:
|
| 193 |
+
enabled: true
|
| 194 |
+
backend: wandb
|
| 195 |
+
project: code-completion_lr-sweep
|
| 196 |
+
run_name: pythia_1b_lr_2e-5
|
| 197 |
+
entity: null
|
| 198 |
+
base_url: https://wandb.platun0v.ru
|
| 199 |
+
local_dir: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_2e-5
|
| 200 |
+
paths:
|
| 201 |
+
output_dir: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_2e-5
|
| 202 |
+
seed: 42
|
| 203 |
+
device: cuda
|
| 204 |
+
|
| 205 |
+
[2026-04-25 18:48:24] Initializing tokenizer...
|
| 206 |
+
[2026-04-25 18:48:24] Loading model...
|
| 207 |
+
[2026-04-25 18:48:27] Loaded pretrained: EleutherAI/pythia-1b
|
| 208 |
+
[2026-04-25 18:48:27] Total params: 1,011,781,632
|
| 209 |
+
[2026-04-25 18:48:27] Trainable params: 1,011,781,632
|
| 210 |
+
[2026-04-25 18:48:27] Creating dataloaders...
|
| 211 |
+
[2026-04-25 18:48:27] Train dataset size: 316397
|
| 212 |
+
[2026-04-25 18:48:27] Train batches per epoch (before DDP split): 79100
|
| 213 |
+
[2026-04-25 18:48:27] Validation dataset size: 2000
|
| 214 |
+
[2026-04-25 18:48:27] Validation batches: 167
|
| 215 |
+
[2026-04-25 18:48:27] Creating optimizer...
|
| 216 |
+
[2026-04-25 18:48:27] Total steps: 9887, Steps per epoch: 39550
|
| 217 |
+
[2026-04-25 18:48:27] Preparing model, optimizer, and dataloaders with Accelerate...
|
| 218 |
+
[2026-04-25 18:48:28] Train batches per epoch (after DDP split): 39550
|
| 219 |
+
[2026-04-25 18:48:28] Starting training...
|
| 220 |
+
[2026-04-25 18:48:28]
|
| 221 |
+
============================================================
|
| 222 |
+
[2026-04-25 18:48:28] EPOCH 1/1
|
| 223 |
+
[2026-04-25 18:48:28] ============================================================
|
| 224 |
+
[2026-04-25 18:48:32] Epoch 1 | Step 10 | Loss: 2.5532 | LR: 2.36e-06
|
| 225 |
+
[2026-04-25 18:48:34] Epoch 1 | Step 20 | Loss: 2.4795 | LR: 2.73e-06
|
| 226 |
+
[2026-04-25 18:48:36] Epoch 1 | Step 30 | Loss: 2.3311 | LR: 3.09e-06
|
| 227 |
+
[2026-04-25 18:48:39] Epoch 1 | Step 40 | Loss: 2.2161 | LR: 3.46e-06
|
| 228 |
+
[2026-04-25 18:48:42] Epoch 1 | Step 50 | Loss: 2.0812 | LR: 3.82e-06
|
| 229 |
+
[2026-04-25 18:48:44] Epoch 1 | Step 60 | Loss: 1.9707 | LR: 4.19e-06
|
| 230 |
+
[2026-04-25 18:48:47] Epoch 1 | Step 70 | Loss: 1.8690 | LR: 4.55e-06
|
| 231 |
+
[2026-04-25 18:48:50] Epoch 1 | Step 80 | Loss: 1.8062 | LR: 4.91e-06
|
| 232 |
+
[2026-04-25 18:48:52] Epoch 1 | Step 90 | Loss: 1.7392 | LR: 5.28e-06
|
| 233 |
+
[2026-04-25 18:48:54] Epoch 1 | Step 100 | Loss: 1.6842 | LR: 5.64e-06
|
| 234 |
+
[2026-04-25 18:48:57] Epoch 1 | Step 110 | Loss: 1.6506 | LR: 6.01e-06
|
| 235 |
+
[2026-04-25 18:48:59] Epoch 1 | Step 120 | Loss: 1.6147 | LR: 6.37e-06
|
| 236 |
+
[2026-04-25 18:49:02] Epoch 1 | Step 130 | Loss: 1.5910 | LR: 6.74e-06
|
| 237 |
+
[2026-04-25 18:49:04] Epoch 1 | Step 140 | Loss: 1.5694 | LR: 7.10e-06
|
| 238 |
+
[2026-04-25 18:49:07] Epoch 1 | Step 150 | Loss: 1.5360 | LR: 7.47e-06
|
| 239 |
+
[2026-04-25 18:49:09] Epoch 1 | Step 160 | Loss: 1.5069 | LR: 7.83e-06
|
| 240 |
+
[2026-04-25 18:49:12] Epoch 1 | Step 170 | Loss: 1.4848 | LR: 8.19e-06
|
| 241 |
+
[2026-04-25 18:49:14] Epoch 1 | Step 180 | Loss: 1.4571 | LR: 8.56e-06
|
| 242 |
+
[2026-04-25 18:49:17] Epoch 1 | Step 190 | Loss: 1.4424 | LR: 8.92e-06
|
| 243 |
+
[2026-04-25 18:49:19] Epoch 1 | Step 200 | Loss: 1.4271 | LR: 9.29e-06
|
| 244 |
+
[2026-04-25 18:49:22] Epoch 1 | Step 210 | Loss: 1.4197 | LR: 9.65e-06
|
| 245 |
+
[2026-04-25 18:49:24] Epoch 1 | Step 220 | Loss: 1.4071 | LR: 1.00e-05
|
| 246 |
+
[2026-04-25 18:49:26] Epoch 1 | Step 230 | Loss: 1.3887 | LR: 1.04e-05
|
| 247 |
+
[2026-04-25 18:49:29] Epoch 1 | Step 240 | Loss: 1.3747 | LR: 1.07e-05
|
| 248 |
+
[2026-04-25 18:49:31] Epoch 1 | Step 250 | Loss: 1.3629 | LR: 1.11e-05
|
| 249 |
+
[2026-04-25 18:49:34] Epoch 1 | Step 260 | Loss: 1.3584 | LR: 1.15e-05
|
| 250 |
+
[2026-04-25 18:49:37] Epoch 1 | Step 270 | Loss: 1.3467 | LR: 1.18e-05
|
| 251 |
+
[2026-04-25 18:49:39] Epoch 1 | Step 280 | Loss: 1.3339 | LR: 1.22e-05
|
| 252 |
+
[2026-04-25 18:49:41] Epoch 1 | Step 290 | Loss: 1.3252 | LR: 1.26e-05
|
| 253 |
+
[2026-04-25 18:49:44] Epoch 1 | Step 300 | Loss: 1.3174 | LR: 1.29e-05
|
| 254 |
+
[2026-04-25 18:49:47] Epoch 1 | Step 310 | Loss: 1.3085 | LR: 1.33e-05
|
| 255 |
+
[2026-04-25 18:49:49] Epoch 1 | Step 320 | Loss: 1.2988 | LR: 1.37e-05
|
| 256 |
+
[2026-04-25 18:49:52] Epoch 1 | Step 330 | Loss: 1.2891 | LR: 1.40e-05
|
| 257 |
+
[2026-04-25 18:49:54] Epoch 1 | Step 340 | Loss: 1.2812 | LR: 1.44e-05
|
| 258 |
+
[2026-04-25 18:49:57] Epoch 1 | Step 350 | Loss: 1.2758 | LR: 1.48e-05
|
| 259 |
+
[2026-04-25 18:49:59] Epoch 1 | Step 360 | Loss: 1.2672 | LR: 1.51e-05
|
| 260 |
+
[2026-04-25 18:50:02] Epoch 1 | Step 370 | Loss: 1.2578 | LR: 1.55e-05
|
| 261 |
+
[2026-04-25 18:50:04] Epoch 1 | Step 380 | Loss: 1.2505 | LR: 1.58e-05
|
| 262 |
+
[2026-04-25 18:50:07] Epoch 1 | Step 390 | Loss: 1.2448 | LR: 1.62e-05
|
| 263 |
+
[2026-04-25 18:50:10] Epoch 1 | Step 400 | Loss: 1.2397 | LR: 1.66e-05
|
| 264 |
+
[2026-04-25 18:50:12] Epoch 1 | Step 410 | Loss: 1.2351 | LR: 1.69e-05
|
| 265 |
+
[2026-04-25 18:50:15] Epoch 1 | Step 420 | Loss: 1.2302 | LR: 1.73e-05
|
| 266 |
+
[2026-04-25 18:50:17] Epoch 1 | Step 430 | Loss: 1.2292 | LR: 1.77e-05
|
| 267 |
+
[2026-04-25 18:50:20] Epoch 1 | Step 440 | Loss: 1.2219 | LR: 1.80e-05
|
| 268 |
+
[2026-04-25 18:50:23] Epoch 1 | Step 450 | Loss: 1.2174 | LR: 1.84e-05
|
| 269 |
+
[2026-04-25 18:50:25] Epoch 1 | Step 460 | Loss: 1.2144 | LR: 1.88e-05
|
| 270 |
+
[2026-04-25 18:50:28] Epoch 1 | Step 470 | Loss: 1.2094 | LR: 1.91e-05
|
| 271 |
+
[2026-04-25 18:50:30] Epoch 1 | Step 480 | Loss: 1.2067 | LR: 1.95e-05
|
| 272 |
+
[2026-04-25 18:50:33] Epoch 1 | Step 490 | Loss: 1.2017 | LR: 1.99e-05
|
| 273 |
+
[2026-04-25 18:50:35] Epoch 1 | Step 500 | Loss: 1.1974 | LR: 2.00e-05
|
| 274 |
+
[2026-04-25 18:50:37] Epoch 1 | Step 510 | Loss: 1.1927 | LR: 2.00e-05
|
| 275 |
+
[2026-04-25 18:50:40] Epoch 1 | Step 520 | Loss: 1.1904 | LR: 2.00e-05
|
| 276 |
+
[2026-04-25 18:50:42] Epoch 1 | Step 530 | Loss: 1.1863 | LR: 2.00e-05
|
| 277 |
+
[2026-04-25 18:50:45] Epoch 1 | Step 540 | Loss: 1.1815 | LR: 2.00e-05
|
| 278 |
+
[2026-04-25 18:50:48] Epoch 1 | Step 550 | Loss: 1.1778 | LR: 2.00e-05
|
| 279 |
+
[2026-04-25 18:50:50] Epoch 1 | Step 560 | Loss: 1.1744 | LR: 2.00e-05
|
| 280 |
+
[2026-04-25 18:50:52] Epoch 1 | Step 570 | Loss: 1.1728 | LR: 2.00e-05
|
| 281 |
+
[2026-04-25 18:50:55] Epoch 1 | Step 580 | Loss: 1.1712 | LR: 2.00e-05
|
| 282 |
+
[2026-04-25 18:50:57] Epoch 1 | Step 590 | Loss: 1.1700 | LR: 2.00e-05
|
| 283 |
+
[2026-04-25 18:51:00] Epoch 1 | Step 600 | Loss: 1.1683 | LR: 2.00e-05
|
| 284 |
+
[2026-04-25 18:51:03] Epoch 1 | Step 610 | Loss: 1.1683 | LR: 2.00e-05
|
| 285 |
+
[2026-04-25 18:51:05] Epoch 1 | Step 620 | Loss: 1.1674 | LR: 2.00e-05
|
| 286 |
+
[2026-04-25 18:51:07] Epoch 1 | Step 630 | Loss: 1.1651 | LR: 2.00e-05
|
| 287 |
+
[2026-04-25 18:51:10] Epoch 1 | Step 640 | Loss: 1.1644 | LR: 2.00e-05
|
| 288 |
+
[2026-04-25 18:51:12] Epoch 1 | Step 650 | Loss: 1.1624 | LR: 2.00e-05
|
| 289 |
+
[2026-04-25 18:51:15] Epoch 1 | Step 660 | Loss: 1.1616 | LR: 2.00e-05
|
| 290 |
+
[2026-04-25 18:51:17] Epoch 1 | Step 670 | Loss: 1.1591 | LR: 2.00e-05
|
| 291 |
+
[2026-04-25 18:51:20] Epoch 1 | Step 680 | Loss: 1.1578 | LR: 2.00e-05
|
| 292 |
+
[2026-04-25 18:51:23] Epoch 1 | Step 690 | Loss: 1.1557 | LR: 2.00e-05
|
| 293 |
+
[2026-04-25 18:51:25] Epoch 1 | Step 700 | Loss: 1.1555 | LR: 2.00e-05
|
| 294 |
+
[2026-04-25 18:51:27] Epoch 1 | Step 710 | Loss: 1.1538 | LR: 2.00e-05
|
| 295 |
+
[2026-04-25 18:51:30] Epoch 1 | Step 720 | Loss: 1.1524 | LR: 2.00e-05
|
| 296 |
+
[2026-04-25 18:51:32] Epoch 1 | Step 730 | Loss: 1.1515 | LR: 2.00e-05
|
| 297 |
+
[2026-04-25 18:51:35] Epoch 1 | Step 740 | Loss: 1.1496 | LR: 2.00e-05
|
| 298 |
+
[2026-04-25 18:51:37] Epoch 1 | Step 750 | Loss: 1.1476 | LR: 2.00e-05
|
| 299 |
+
[2026-04-25 18:51:40] Epoch 1 | Step 760 | Loss: 1.1481 | LR: 2.00e-05
|
| 300 |
+
[2026-04-25 18:51:42] Epoch 1 | Step 770 | Loss: 1.1486 | LR: 2.00e-05
|
| 301 |
+
[2026-04-25 18:51:45] Epoch 1 | Step 780 | Loss: 1.1472 | LR: 2.00e-05
|
| 302 |
+
[2026-04-25 18:51:47] Epoch 1 | Step 790 | Loss: 1.1457 | LR: 2.00e-05
|
| 303 |
+
[2026-04-25 18:51:50] Epoch 1 | Step 800 | Loss: 1.1437 | LR: 2.00e-05
|
| 304 |
+
[2026-04-25 18:51:52] Epoch 1 | Step 810 | Loss: 1.1427 | LR: 2.00e-05
|
| 305 |
+
[2026-04-25 18:51:55] Epoch 1 | Step 820 | Loss: 1.1412 | LR: 2.00e-05
|
| 306 |
+
[2026-04-25 18:51:57] Epoch 1 | Step 830 | Loss: 1.1406 | LR: 2.00e-05
|
| 307 |
+
[2026-04-25 18:52:00] Epoch 1 | Step 840 | Loss: 1.1392 | LR: 2.00e-05
|
| 308 |
+
[2026-04-25 18:52:03] Epoch 1 | Step 850 | Loss: 1.1368 | LR: 2.00e-05
|
| 309 |
+
[2026-04-25 18:52:05] Epoch 1 | Step 860 | Loss: 1.1367 | LR: 2.00e-05
|
| 310 |
+
[2026-04-25 18:52:07] Epoch 1 | Step 870 | Loss: 1.1369 | LR: 2.00e-05
|
| 311 |
+
[2026-04-25 18:52:10] Epoch 1 | Step 880 | Loss: 1.1365 | LR: 2.00e-05
|
| 312 |
+
[2026-04-25 18:52:12] Epoch 1 | Step 890 | Loss: 1.1358 | LR: 2.00e-05
|
| 313 |
+
[2026-04-25 18:52:15] Epoch 1 | Step 900 | Loss: 1.1344 | LR: 2.00e-05
|
| 314 |
+
[2026-04-25 18:52:17] Epoch 1 | Step 910 | Loss: 1.1343 | LR: 2.00e-05
|
| 315 |
+
[2026-04-25 18:52:20] Epoch 1 | Step 920 | Loss: 1.1349 | LR: 2.00e-05
|
| 316 |
+
[2026-04-25 18:52:22] Epoch 1 | Step 930 | Loss: 1.1340 | LR: 2.00e-05
|
| 317 |
+
[2026-04-25 18:52:25] Epoch 1 | Step 940 | Loss: 1.1336 | LR: 2.00e-05
|
| 318 |
+
[2026-04-25 18:52:27] Epoch 1 | Step 950 | Loss: 1.1315 | LR: 2.00e-05
|
| 319 |
+
[2026-04-25 18:52:31] Epoch 1 | Step 960 | Loss: 1.1304 | LR: 2.00e-05
|
| 320 |
+
[2026-04-25 18:52:33] Epoch 1 | Step 970 | Loss: 1.1299 | LR: 2.00e-05
|
| 321 |
+
[2026-04-25 18:52:35] Epoch 1 | Step 980 | Loss: 1.1287 | LR: 2.00e-05
|
| 322 |
+
[2026-04-25 18:52:38] Epoch 1 | Step 990 | Loss: 1.1267 | LR: 2.00e-05
|
| 323 |
+
[2026-04-25 18:52:41] Epoch 1 | Step 1000 | Loss: 1.1258 | LR: 2.00e-05
|
| 324 |
+
[2026-04-25 18:52:43] Epoch 1 | Step 1010 | Loss: 1.1258 | LR: 2.00e-05
|
| 325 |
+
[2026-04-25 18:52:46] Epoch 1 | Step 1020 | Loss: 1.1252 | LR: 2.00e-05
|
| 326 |
+
[2026-04-25 18:52:48] Epoch 1 | Step 1030 | Loss: 1.1253 | LR: 2.00e-05
|
| 327 |
+
[2026-04-25 18:52:51] Epoch 1 | Step 1040 | Loss: 1.1235 | LR: 2.00e-05
|
| 328 |
+
[2026-04-25 18:52:53] Epoch 1 | Step 1050 | Loss: 1.1223 | LR: 2.00e-05
|
| 329 |
+
[2026-04-25 18:52:55] Epoch 1 | Step 1060 | Loss: 1.1201 | LR: 2.00e-05
|
| 330 |
+
[2026-04-25 18:52:58] Epoch 1 | Step 1070 | Loss: 1.1190 | LR: 2.00e-05
|
| 331 |
+
[2026-04-25 18:53:00] Epoch 1 | Step 1080 | Loss: 1.1196 | LR: 2.00e-05
|
| 332 |
+
[2026-04-25 18:53:03] Epoch 1 | Step 1090 | Loss: 1.1210 | LR: 2.00e-05
|
| 333 |
+
[2026-04-25 18:53:05] Epoch 1 | Step 1100 | Loss: 1.1201 | LR: 2.00e-05
|
| 334 |
+
[2026-04-25 18:53:08] Epoch 1 | Step 1110 | Loss: 1.1202 | LR: 2.00e-05
|
| 335 |
+
[2026-04-25 18:53:10] Epoch 1 | Step 1120 | Loss: 1.1203 | LR: 2.00e-05
|
| 336 |
+
[2026-04-25 18:53:13] Epoch 1 | Step 1130 | Loss: 1.1203 | LR: 2.00e-05
|
| 337 |
+
[2026-04-25 18:53:16] Epoch 1 | Step 1140 | Loss: 1.1196 | LR: 2.00e-05
|
| 338 |
+
[2026-04-25 18:53:18] Epoch 1 | Step 1150 | Loss: 1.1174 | LR: 2.00e-05
|
| 339 |
+
[2026-04-25 18:53:21] Epoch 1 | Step 1160 | Loss: 1.1177 | LR: 2.00e-05
|
| 340 |
+
[2026-04-25 18:53:23] Epoch 1 | Step 1170 | Loss: 1.1182 | LR: 2.00e-05
|
| 341 |
+
[2026-04-25 18:53:26] Epoch 1 | Step 1180 | Loss: 1.1176 | LR: 2.00e-05
|
| 342 |
+
[2026-04-25 18:53:29] Epoch 1 | Step 1190 | Loss: 1.1175 | LR: 2.00e-05
|
| 343 |
+
[2026-04-25 18:53:31] Epoch 1 | Step 1200 | Loss: 1.1163 | LR: 2.00e-05
|
| 344 |
+
[2026-04-25 18:53:34] Epoch 1 | Step 1210 | Loss: 1.1145 | LR: 2.00e-05
|
| 345 |
+
[2026-04-25 18:53:36] Epoch 1 | Step 1220 | Loss: 1.1127 | LR: 2.00e-05
|
| 346 |
+
[2026-04-25 18:53:39] Epoch 1 | Step 1230 | Loss: 1.1126 | LR: 2.00e-05
|
| 347 |
+
[2026-04-25 18:53:42] Epoch 1 | Step 1240 | Loss: 1.1121 | LR: 2.00e-05
|
| 348 |
+
[2026-04-25 18:53:44] Epoch 1 | Step 1250 | Loss: 1.1107 | LR: 2.00e-05
|
| 349 |
+
[2026-04-25 18:53:47] Epoch 1 | Step 1260 | Loss: 1.1101 | LR: 2.00e-05
|
| 350 |
+
[2026-04-25 18:53:49] Epoch 1 | Step 1270 | Loss: 1.1081 | LR: 2.00e-05
|
| 351 |
+
[2026-04-25 18:53:51] Epoch 1 | Step 1280 | Loss: 1.1084 | LR: 2.00e-05
|
| 352 |
+
[2026-04-25 18:53:54] Epoch 1 | Step 1290 | Loss: 1.1084 | LR: 2.00e-05
|
| 353 |
+
[2026-04-25 18:53:56] Epoch 1 | Step 1300 | Loss: 1.1073 | LR: 2.00e-05
|
| 354 |
+
[2026-04-25 18:53:59] Epoch 1 | Step 1310 | Loss: 1.1068 | LR: 2.00e-05
|
| 355 |
+
[2026-04-25 18:54:01] Epoch 1 | Step 1320 | Loss: 1.1067 | LR: 2.00e-05
|
| 356 |
+
[2026-04-25 18:54:04] Epoch 1 | Step 1330 | Loss: 1.1055 | LR: 2.00e-05
|
| 357 |
+
[2026-04-25 18:54:06] Epoch 1 | Step 1340 | Loss: 1.1052 | LR: 2.00e-05
|
| 358 |
+
[2026-04-25 18:54:09] Epoch 1 | Step 1350 | Loss: 1.1055 | LR: 2.00e-05
|
| 359 |
+
[2026-04-25 18:54:11] Epoch 1 | Step 1360 | Loss: 1.1049 | LR: 2.00e-05
|
| 360 |
+
[2026-04-25 18:54:14] Epoch 1 | Step 1370 | Loss: 1.1044 | LR: 2.00e-05
|
| 361 |
+
[2026-04-25 18:54:17] Epoch 1 | Step 1380 | Loss: 1.1049 | LR: 2.00e-05
|
| 362 |
+
[2026-04-25 18:54:19] Epoch 1 | Step 1390 | Loss: 1.1052 | LR: 2.00e-05
|
| 363 |
+
[2026-04-25 18:54:22] Epoch 1 | Step 1400 | Loss: 1.1049 | LR: 2.00e-05
|
| 364 |
+
[2026-04-25 18:54:24] Epoch 1 | Step 1410 | Loss: 1.1035 | LR: 2.00e-05
|
| 365 |
+
[2026-04-25 18:54:27] Epoch 1 | Step 1420 | Loss: 1.1031 | LR: 2.00e-05
|
| 366 |
+
[2026-04-25 18:54:29] Epoch 1 | Step 1430 | Loss: 1.1026 | LR: 2.00e-05
|
| 367 |
+
[2026-04-25 18:54:32] Epoch 1 | Step 1440 | Loss: 1.1017 | LR: 2.00e-05
|
| 368 |
+
[2026-04-25 18:54:34] Epoch 1 | Step 1450 | Loss: 1.1011 | LR: 2.00e-05
|
| 369 |
+
[2026-04-25 18:54:37] Epoch 1 | Step 1460 | Loss: 1.0998 | LR: 2.00e-05
|
| 370 |
+
[2026-04-25 18:54:40] Epoch 1 | Step 1470 | Loss: 1.0997 | LR: 2.00e-05
|
| 371 |
+
[2026-04-25 18:54:42] Epoch 1 | Step 1480 | Loss: 1.0997 | LR: 2.00e-05
|
| 372 |
+
[2026-04-25 18:54:45] Epoch 1 | Step 1490 | Loss: 1.0997 | LR: 2.00e-05
|
| 373 |
+
[2026-04-25 18:54:47] Epoch 1 | Step 1500 | Loss: 1.0988 | LR: 2.00e-05
|
| 374 |
+
[2026-04-25 18:54:50] Epoch 1 | Step 1510 | Loss: 1.0988 | LR: 2.00e-05
|
| 375 |
+
[2026-04-25 18:54:52] Epoch 1 | Step 1520 | Loss: 1.0988 | LR: 2.00e-05
|
| 376 |
+
[2026-04-25 18:54:54] Epoch 1 | Step 1530 | Loss: 1.0982 | LR: 2.00e-05
|
| 377 |
+
[2026-04-25 18:54:57] Epoch 1 | Step 1540 | Loss: 1.0984 | LR: 2.00e-05
|
| 378 |
+
[2026-04-25 18:54:59] Epoch 1 | Step 1550 | Loss: 1.0983 | LR: 2.00e-05
|
| 379 |
+
[2026-04-25 18:55:02] Epoch 1 | Step 1560 | Loss: 1.0975 | LR: 2.00e-05
|
| 380 |
+
[2026-04-25 18:55:05] Epoch 1 | Step 1570 | Loss: 1.0979 | LR: 2.00e-05
|
| 381 |
+
[2026-04-25 18:55:07] Epoch 1 | Step 1580 | Loss: 1.0972 | LR: 2.00e-05
|
| 382 |
+
[2026-04-25 18:55:09] Epoch 1 | Step 1590 | Loss: 1.0970 | LR: 2.00e-05
|
| 383 |
+
[2026-04-25 18:55:12] Epoch 1 | Step 1600 | Loss: 1.0969 | LR: 2.00e-05
|
| 384 |
+
[2026-04-25 18:55:14] Epoch 1 | Step 1610 | Loss: 1.0955 | LR: 2.00e-05
|
| 385 |
+
[2026-04-25 18:55:17] Epoch 1 | Step 1620 | Loss: 1.0946 | LR: 2.00e-05
|
| 386 |
+
[2026-04-25 18:55:19] Epoch 1 | Step 1630 | Loss: 1.0948 | LR: 2.00e-05
|
| 387 |
+
[2026-04-25 18:55:22] Epoch 1 | Step 1640 | Loss: 1.0944 | LR: 2.00e-05
|
| 388 |
+
[2026-04-25 18:55:24] Epoch 1 | Step 1650 | Loss: 1.0936 | LR: 2.00e-05
|
| 389 |
+
[2026-04-25 18:55:26] Epoch 1 | Step 1660 | Loss: 1.0928 | LR: 2.00e-05
|
| 390 |
+
[2026-04-25 18:55:29] Epoch 1 | Step 1670 | Loss: 1.0933 | LR: 2.00e-05
|
| 391 |
+
[2026-04-25 18:55:31] Epoch 1 | Step 1680 | Loss: 1.0932 | LR: 2.00e-05
|
| 392 |
+
[2026-04-25 18:55:34] Epoch 1 | Step 1690 | Loss: 1.0927 | LR: 2.00e-05
|
| 393 |
+
[2026-04-25 18:55:36] Epoch 1 | Step 1700 | Loss: 1.0914 | LR: 2.00e-05
|
| 394 |
+
[2026-04-25 18:55:39] Epoch 1 | Step 1710 | Loss: 1.0909 | LR: 2.00e-05
|
| 395 |
+
[2026-04-25 18:55:41] Epoch 1 | Step 1720 | Loss: 1.0902 | LR: 2.00e-05
|
| 396 |
+
[2026-04-25 18:55:44] Epoch 1 | Step 1730 | Loss: 1.0898 | LR: 2.00e-05
|
| 397 |
+
[2026-04-25 18:55:46] Epoch 1 | Step 1740 | Loss: 1.0898 | LR: 2.00e-05
|
| 398 |
+
[2026-04-25 18:55:49] Epoch 1 | Step 1750 | Loss: 1.0906 | LR: 2.00e-05
|
| 399 |
+
[2026-04-25 18:55:51] Epoch 1 | Step 1760 | Loss: 1.0898 | LR: 2.00e-05
|
| 400 |
+
[2026-04-25 18:55:54] Epoch 1 | Step 1770 | Loss: 1.0899 | LR: 2.00e-05
|
| 401 |
+
[2026-04-25 18:55:56] Epoch 1 | Step 1780 | Loss: 1.0895 | LR: 2.00e-05
|
| 402 |
+
[2026-04-25 18:55:59] Epoch 1 | Step 1790 | Loss: 1.0895 | LR: 2.00e-05
|
| 403 |
+
[2026-04-25 18:56:01] Epoch 1 | Step 1800 | Loss: 1.0887 | LR: 2.00e-05
|
| 404 |
+
[2026-04-25 18:56:04] Epoch 1 | Step 1810 | Loss: 1.0886 | LR: 2.00e-05
|
| 405 |
+
[2026-04-25 18:56:07] Epoch 1 | Step 1820 | Loss: 1.0890 | LR: 2.00e-05
|
| 406 |
+
[2026-04-25 18:56:09] Epoch 1 | Step 1830 | Loss: 1.0890 | LR: 2.00e-05
|
| 407 |
+
[2026-04-25 18:56:12] Epoch 1 | Step 1840 | Loss: 1.0890 | LR: 2.00e-05
|
| 408 |
+
[2026-04-25 18:56:14] Epoch 1 | Step 1850 | Loss: 1.0884 | LR: 2.00e-05
|
| 409 |
+
[2026-04-25 18:56:17] Epoch 1 | Step 1860 | Loss: 1.0884 | LR: 2.00e-05
|
| 410 |
+
[2026-04-25 18:56:19] Epoch 1 | Step 1870 | Loss: 1.0880 | LR: 2.00e-05
|
| 411 |
+
[2026-04-25 18:56:22] Epoch 1 | Step 1880 | Loss: 1.0874 | LR: 2.00e-05
|
| 412 |
+
[2026-04-25 18:56:25] Epoch 1 | Step 1890 | Loss: 1.0876 | LR: 2.00e-05
|
| 413 |
+
[2026-04-25 18:56:27] Epoch 1 | Step 1900 | Loss: 1.0872 | LR: 2.00e-05
|
| 414 |
+
[2026-04-25 18:56:30] Epoch 1 | Step 1910 | Loss: 1.0872 | LR: 2.00e-05
|
| 415 |
+
[2026-04-25 18:56:32] Epoch 1 | Step 1920 | Loss: 1.0874 | LR: 2.00e-05
|
| 416 |
+
[2026-04-25 18:56:35] Epoch 1 | Step 1930 | Loss: 1.0871 | LR: 2.00e-05
|
| 417 |
+
[2026-04-25 18:56:37] Epoch 1 | Step 1940 | Loss: 1.0865 | LR: 2.00e-05
|
| 418 |
+
[2026-04-25 18:56:40] Epoch 1 | Step 1950 | Loss: 1.0860 | LR: 2.00e-05
|
| 419 |
+
[2026-04-25 18:56:43] Epoch 1 | Step 1960 | Loss: 1.0859 | LR: 2.00e-05
|
| 420 |
+
[2026-04-25 18:56:45] Epoch 1 | Step 1970 | Loss: 1.0858 | LR: 2.00e-05
|
| 421 |
+
[2026-04-25 18:56:47] Epoch 1 | Step 1980 | Loss: 1.0861 | LR: 2.00e-05
|
| 422 |
+
[2026-04-25 18:56:50] Epoch 1 | Step 1990 | Loss: 1.0861 | LR: 2.00e-05
|
| 423 |
+
[2026-04-25 18:56:52] Epoch 1 | Step 2000 | Loss: 1.0859 | LR: 2.00e-05
|
| 424 |
+
[2026-04-25 18:56:52] Validation | Batch 10/84 | Loss: 1.0209
|
| 425 |
+
[2026-04-25 18:56:53] Validation | Batch 20/84 | Loss: 1.0159
|
| 426 |
+
[2026-04-25 18:56:53] Validation | Batch 30/84 | Loss: 1.0893
|
| 427 |
+
[2026-04-25 18:56:54] Validation | Batch 40/84 | Loss: 1.0904
|
| 428 |
+
[2026-04-25 18:56:54] Validation | Batch 50/84 | Loss: 1.0944
|
| 429 |
+
[2026-04-25 18:56:55] Validation | Batch 60/84 | Loss: 1.0662
|
| 430 |
+
[2026-04-25 18:56:55] Validation | Batch 70/84 | Loss: 1.0474
|
| 431 |
+
[2026-04-25 18:56:56] Validation | Batch 80/84 | Loss: 1.0540
|
| 432 |
+
[2026-04-25 18:56:56] Validation | Batch 84/84 | Loss: 1.0472
|
| 433 |
+
[2026-04-25 18:56:56] Validation | Loss: 1.0472 | PPL: 2.92 | Time: 3.82s
|
| 434 |
+
[2026-04-25 18:56:58] New best model saved! Val loss: 1.0472
|
| 435 |
+
[2026-04-25 18:57:01] Epoch 1 | Step 2010 | Loss: 1.0858 | LR: 2.00e-05
|
| 436 |
+
[2026-04-25 18:57:04] Epoch 1 | Step 2020 | Loss: 1.0856 | LR: 2.00e-05
|
| 437 |
+
[2026-04-25 18:57:06] Epoch 1 | Step 2030 | Loss: 1.0857 | LR: 2.00e-05
|
| 438 |
+
[2026-04-25 18:57:08] Epoch 1 | Step 2040 | Loss: 1.0855 | LR: 2.00e-05
|
| 439 |
+
[2026-04-25 18:57:11] Epoch 1 | Step 2050 | Loss: 1.0854 | LR: 2.00e-05
|
| 440 |
+
[2026-04-25 18:57:14] Epoch 1 | Step 2060 | Loss: 1.0849 | LR: 2.00e-05
|
| 441 |
+
[2026-04-25 18:57:16] Epoch 1 | Step 2070 | Loss: 1.0838 | LR: 2.00e-05
|
| 442 |
+
[2026-04-25 18:57:18] Epoch 1 | Step 2080 | Loss: 1.0832 | LR: 2.00e-05
|
| 443 |
+
[2026-04-25 18:57:21] Epoch 1 | Step 2090 | Loss: 1.0835 | LR: 2.00e-05
|
| 444 |
+
[2026-04-25 18:57:23] Epoch 1 | Step 2100 | Loss: 1.0835 | LR: 2.00e-05
|
| 445 |
+
[2026-04-25 18:57:26] Epoch 1 | Step 2110 | Loss: 1.0834 | LR: 2.00e-05
|
| 446 |
+
[2026-04-25 18:57:29] Epoch 1 | Step 2120 | Loss: 1.0830 | LR: 2.00e-05
|
| 447 |
+
[2026-04-25 18:57:31] Epoch 1 | Step 2130 | Loss: 1.0830 | LR: 2.00e-05
|
| 448 |
+
[2026-04-25 18:57:34] Epoch 1 | Step 2140 | Loss: 1.0826 | LR: 2.00e-05
|
| 449 |
+
[2026-04-25 18:57:36] Epoch 1 | Step 2150 | Loss: 1.0824 | LR: 2.00e-05
|
| 450 |
+
[2026-04-25 18:57:38] Epoch 1 | Step 2160 | Loss: 1.0827 | LR: 2.00e-05
|
| 451 |
+
[2026-04-25 18:57:41] Epoch 1 | Step 2170 | Loss: 1.0822 | LR: 2.00e-05
|
| 452 |
+
[2026-04-25 18:57:43] Epoch 1 | Step 2180 | Loss: 1.0817 | LR: 2.00e-05
|
| 453 |
+
[2026-04-25 18:57:46] Epoch 1 | Step 2190 | Loss: 1.0818 | LR: 2.00e-05
|
| 454 |
+
[2026-04-25 18:57:48] Epoch 1 | Step 2200 | Loss: 1.0816 | LR: 2.00e-05
|
| 455 |
+
[2026-04-25 18:57:51] Epoch 1 | Step 2210 | Loss: 1.0813 | LR: 2.00e-05
|
| 456 |
+
[2026-04-25 18:57:53] Epoch 1 | Step 2220 | Loss: 1.0818 | LR: 2.00e-05
|
| 457 |
+
[2026-04-25 18:57:56] Epoch 1 | Step 2230 | Loss: 1.0824 | LR: 2.00e-05
|
| 458 |
+
[2026-04-25 18:57:58] Epoch 1 | Step 2240 | Loss: 1.0829 | LR: 2.00e-05
|
| 459 |
+
[2026-04-25 18:58:01] Epoch 1 | Step 2250 | Loss: 1.0831 | LR: 2.00e-05
|
| 460 |
+
[2026-04-25 18:58:03] Epoch 1 | Step 2260 | Loss: 1.0829 | LR: 2.00e-05
|
| 461 |
+
[2026-04-25 18:58:06] Epoch 1 | Step 2270 | Loss: 1.0829 | LR: 2.00e-05
|
| 462 |
+
[2026-04-25 18:58:08] Epoch 1 | Step 2280 | Loss: 1.0829 | LR: 2.00e-05
|
| 463 |
+
[2026-04-25 18:58:11] Epoch 1 | Step 2290 | Loss: 1.0837 | LR: 2.00e-05
|
| 464 |
+
[2026-04-25 18:58:13] Epoch 1 | Step 2300 | Loss: 1.0836 | LR: 2.00e-05
|
| 465 |
+
[2026-04-25 18:58:15] Epoch 1 | Step 2310 | Loss: 1.0833 | LR: 2.00e-05
|
| 466 |
+
[2026-04-25 18:58:18] Epoch 1 | Step 2320 | Loss: 1.0834 | LR: 2.00e-05
|
| 467 |
+
[2026-04-25 18:58:20] Epoch 1 | Step 2330 | Loss: 1.0831 | LR: 2.00e-05
|
| 468 |
+
[2026-04-25 18:58:23] Epoch 1 | Step 2340 | Loss: 1.0827 | LR: 2.00e-05
|
| 469 |
+
[2026-04-25 18:58:25] Epoch 1 | Step 2350 | Loss: 1.0823 | LR: 2.00e-05
|
| 470 |
+
[2026-04-25 18:58:28] Epoch 1 | Step 2360 | Loss: 1.0824 | LR: 2.00e-05
|
| 471 |
+
[2026-04-25 18:58:30] Epoch 1 | Step 2370 | Loss: 1.0822 | LR: 2.00e-05
|
| 472 |
+
[2026-04-25 18:58:33] Epoch 1 | Step 2380 | Loss: 1.0817 | LR: 2.00e-05
|
| 473 |
+
[2026-04-25 18:58:35] Epoch 1 | Step 2390 | Loss: 1.0818 | LR: 2.00e-05
|
| 474 |
+
[2026-04-25 18:58:38] Epoch 1 | Step 2400 | Loss: 1.0812 | LR: 2.00e-05
|
| 475 |
+
[2026-04-25 18:58:40] Epoch 1 | Step 2410 | Loss: 1.0814 | LR: 2.00e-05
|
| 476 |
+
[2026-04-25 18:58:43] Epoch 1 | Step 2420 | Loss: 1.0813 | LR: 2.00e-05
|
| 477 |
+
[2026-04-25 18:58:45] Epoch 1 | Step 2430 | Loss: 1.0815 | LR: 2.00e-05
|
| 478 |
+
[2026-04-25 18:58:48] Epoch 1 | Step 2440 | Loss: 1.0810 | LR: 2.00e-05
|
| 479 |
+
[2026-04-25 18:58:50] Epoch 1 | Step 2450 | Loss: 1.0806 | LR: 2.00e-05
|
| 480 |
+
[2026-04-25 18:58:53] Epoch 1 | Step 2460 | Loss: 1.0804 | LR: 2.00e-05
|
| 481 |
+
[2026-04-25 18:58:56] Epoch 1 | Step 2470 | Loss: 1.0805 | LR: 2.00e-05
|
| 482 |
+
[2026-04-25 18:58:58] Epoch 1 | Step 2480 | Loss: 1.0805 | LR: 2.00e-05
|
| 483 |
+
[2026-04-25 18:59:01] Epoch 1 | Step 2490 | Loss: 1.0800 | LR: 2.00e-05
|
| 484 |
+
[2026-04-25 18:59:03] Epoch 1 | Step 2500 | Loss: 1.0796 | LR: 2.00e-05
|
| 485 |
+
[2026-04-25 18:59:06] Epoch 1 | Step 2510 | Loss: 1.0797 | LR: 2.00e-05
|
| 486 |
+
[2026-04-25 18:59:08] Epoch 1 | Step 2520 | Loss: 1.0789 | LR: 2.00e-05
|
| 487 |
+
[2026-04-25 18:59:10] Epoch 1 | Step 2530 | Loss: 1.0785 | LR: 2.00e-05
|
| 488 |
+
[2026-04-25 18:59:13] Epoch 1 | Step 2540 | Loss: 1.0783 | LR: 2.00e-05
|
| 489 |
+
[2026-04-25 18:59:16] Epoch 1 | Step 2550 | Loss: 1.0776 | LR: 2.00e-05
|
| 490 |
+
[2026-04-25 18:59:18] Epoch 1 | Step 2560 | Loss: 1.0775 | LR: 2.00e-05
|
| 491 |
+
[2026-04-25 18:59:21] Epoch 1 | Step 2570 | Loss: 1.0779 | LR: 2.00e-05
|
| 492 |
+
[2026-04-25 18:59:24] Epoch 1 | Step 2580 | Loss: 1.0781 | LR: 2.00e-05
|
| 493 |
+
[2026-04-25 18:59:26] Epoch 1 | Step 2590 | Loss: 1.0783 | LR: 2.00e-05
|
| 494 |
+
[2026-04-25 18:59:29] Epoch 1 | Step 2600 | Loss: 1.0782 | LR: 2.00e-05
|
| 495 |
+
[2026-04-25 18:59:31] Epoch 1 | Step 2610 | Loss: 1.0781 | LR: 2.00e-05
|
| 496 |
+
[2026-04-25 18:59:33] Epoch 1 | Step 2620 | Loss: 1.0777 | LR: 2.00e-05
|
| 497 |
+
[2026-04-25 18:59:36] Epoch 1 | Step 2630 | Loss: 1.0774 | LR: 2.00e-05
|
| 498 |
+
[2026-04-25 18:59:38] Epoch 1 | Step 2640 | Loss: 1.0774 | LR: 2.00e-05
|
| 499 |
+
[2026-04-25 18:59:41] Epoch 1 | Step 2650 | Loss: 1.0770 | LR: 2.00e-05
|
| 500 |
+
[2026-04-25 18:59:43] Epoch 1 | Step 2660 | Loss: 1.0771 | LR: 2.00e-05
|
| 501 |
+
[2026-04-25 18:59:46] Epoch 1 | Step 2670 | Loss: 1.0768 | LR: 2.00e-05
|
| 502 |
+
[2026-04-25 18:59:48] Epoch 1 | Step 2680 | Loss: 1.0765 | LR: 2.00e-05
|
| 503 |
+
[2026-04-25 18:59:51] Epoch 1 | Step 2690 | Loss: 1.0763 | LR: 2.00e-05
|
| 504 |
+
[2026-04-25 18:59:54] Epoch 1 | Step 2700 | Loss: 1.0759 | LR: 2.00e-05
|
| 505 |
+
[2026-04-25 18:59:56] Epoch 1 | Step 2710 | Loss: 1.0753 | LR: 2.00e-05
|
| 506 |
+
[2026-04-25 18:59:59] Epoch 1 | Step 2720 | Loss: 1.0754 | LR: 2.00e-05
|
| 507 |
+
[2026-04-25 19:00:01] Epoch 1 | Step 2730 | Loss: 1.0751 | LR: 2.00e-05
|
| 508 |
+
[2026-04-25 19:00:03] Epoch 1 | Step 2740 | Loss: 1.0755 | LR: 2.00e-05
|
| 509 |
+
[2026-04-25 19:00:06] Epoch 1 | Step 2750 | Loss: 1.0756 | LR: 2.00e-05
|
| 510 |
+
[2026-04-25 19:00:08] Epoch 1 | Step 2760 | Loss: 1.0752 | LR: 2.00e-05
|
| 511 |
+
[2026-04-25 19:00:11] Epoch 1 | Step 2770 | Loss: 1.0750 | LR: 2.00e-05
|
| 512 |
+
[2026-04-25 19:00:13] Epoch 1 | Step 2780 | Loss: 1.0752 | LR: 2.00e-05
|
| 513 |
+
[2026-04-25 19:00:16] Epoch 1 | Step 2790 | Loss: 1.0751 | LR: 2.00e-05
|
| 514 |
+
[2026-04-25 19:00:18] Epoch 1 | Step 2800 | Loss: 1.0748 | LR: 2.00e-05
|
| 515 |
+
[2026-04-25 19:00:21] Epoch 1 | Step 2810 | Loss: 1.0748 | LR: 2.00e-05
|
| 516 |
+
[2026-04-25 19:00:23] Epoch 1 | Step 2820 | Loss: 1.0746 | LR: 2.00e-05
|
| 517 |
+
[2026-04-25 19:00:26] Epoch 1 | Step 2830 | Loss: 1.0743 | LR: 2.00e-05
|
| 518 |
+
[2026-04-25 19:00:28] Epoch 1 | Step 2840 | Loss: 1.0749 | LR: 2.00e-05
|
| 519 |
+
[2026-04-25 19:00:30] Epoch 1 | Step 2850 | Loss: 1.0748 | LR: 2.00e-05
|
| 520 |
+
[2026-04-25 19:00:33] Epoch 1 | Step 2860 | Loss: 1.0745 | LR: 2.00e-05
|
| 521 |
+
[2026-04-25 19:00:35] Epoch 1 | Step 2870 | Loss: 1.0746 | LR: 2.00e-05
|
| 522 |
+
[2026-04-25 19:00:38] Epoch 1 | Step 2880 | Loss: 1.0742 | LR: 2.00e-05
|
| 523 |
+
[2026-04-25 19:00:41] Epoch 1 | Step 2890 | Loss: 1.0740 | LR: 2.00e-05
|
| 524 |
+
[2026-04-25 19:00:43] Epoch 1 | Step 2900 | Loss: 1.0735 | LR: 2.00e-05
|
| 525 |
+
[2026-04-25 19:00:46] Epoch 1 | Step 2910 | Loss: 1.0734 | LR: 2.00e-05
|
| 526 |
+
[2026-04-25 19:00:49] Epoch 1 | Step 2920 | Loss: 1.0736 | LR: 2.00e-05
|
| 527 |
+
[2026-04-25 19:00:51] Epoch 1 | Step 2930 | Loss: 1.0734 | LR: 2.00e-05
|
| 528 |
+
[2026-04-25 19:00:54] Epoch 1 | Step 2940 | Loss: 1.0729 | LR: 2.00e-05
|
| 529 |
+
[2026-04-25 19:00:56] Epoch 1 | Step 2950 | Loss: 1.0731 | LR: 2.00e-05
|
| 530 |
+
[2026-04-25 19:00:59] Epoch 1 | Step 2960 | Loss: 1.0732 | LR: 2.00e-05
|
| 531 |
+
[2026-04-25 19:01:02] Epoch 1 | Step 2970 | Loss: 1.0732 | LR: 2.00e-05
|
| 532 |
+
[2026-04-25 19:01:04] Epoch 1 | Step 2980 | Loss: 1.0729 | LR: 2.00e-05
|
| 533 |
+
[2026-04-25 19:01:07] Epoch 1 | Step 2990 | Loss: 1.0732 | LR: 2.00e-05
|
| 534 |
+
[2026-04-25 19:01:09] Epoch 1 | Step 3000 | Loss: 1.0730 | LR: 2.00e-05
|
| 535 |
+
[2026-04-25 19:01:12] Epoch 1 | Step 3010 | Loss: 1.0731 | LR: 2.00e-05
|
| 536 |
+
[2026-04-25 19:01:14] Epoch 1 | Step 3020 | Loss: 1.0728 | LR: 2.00e-05
|
| 537 |
+
[2026-04-25 19:01:17] Epoch 1 | Step 3030 | Loss: 1.0726 | LR: 2.00e-05
|
| 538 |
+
[2026-04-25 19:01:20] Epoch 1 | Step 3040 | Loss: 1.0720 | LR: 2.00e-05
|
| 539 |
+
[2026-04-25 19:01:22] Epoch 1 | Step 3050 | Loss: 1.0715 | LR: 2.00e-05
|
| 540 |
+
[2026-04-25 19:01:24] Epoch 1 | Step 3060 | Loss: 1.0714 | LR: 2.00e-05
|
| 541 |
+
[2026-04-25 19:01:27] Epoch 1 | Step 3070 | Loss: 1.0711 | LR: 2.00e-05
|
| 542 |
+
[2026-04-25 19:01:30] Epoch 1 | Step 3080 | Loss: 1.0712 | LR: 2.00e-05
|
| 543 |
+
[2026-04-25 19:01:32] Epoch 1 | Step 3090 | Loss: 1.0708 | LR: 2.00e-05
|
| 544 |
+
[2026-04-25 19:01:34] Epoch 1 | Step 3100 | Loss: 1.0706 | LR: 2.00e-05
|
| 545 |
+
[2026-04-25 19:01:37] Epoch 1 | Step 3110 | Loss: 1.0703 | LR: 2.00e-05
|
| 546 |
+
[2026-04-25 19:01:39] Epoch 1 | Step 3120 | Loss: 1.0707 | LR: 2.00e-05
|
| 547 |
+
[2026-04-25 19:01:42] Epoch 1 | Step 3130 | Loss: 1.0704 | LR: 2.00e-05
|
| 548 |
+
[2026-04-25 19:01:44] Epoch 1 | Step 3140 | Loss: 1.0704 | LR: 2.00e-05
|
| 549 |
+
[2026-04-25 19:01:47] Epoch 1 | Step 3150 | Loss: 1.0706 | LR: 2.00e-05
|
| 550 |
+
[2026-04-25 19:01:50] Epoch 1 | Step 3160 | Loss: 1.0706 | LR: 2.00e-05
|
| 551 |
+
[2026-04-25 19:01:52] Epoch 1 | Step 3170 | Loss: 1.0705 | LR: 2.00e-05
|
| 552 |
+
[2026-04-25 19:01:55] Epoch 1 | Step 3180 | Loss: 1.0705 | LR: 2.00e-05
|
| 553 |
+
[2026-04-25 19:01:57] Epoch 1 | Step 3190 | Loss: 1.0700 | LR: 2.00e-05
|
| 554 |
+
[2026-04-25 19:01:59] Epoch 1 | Step 3200 | Loss: 1.0698 | LR: 2.00e-05
|
| 555 |
+
[2026-04-25 19:02:02] Epoch 1 | Step 3210 | Loss: 1.0696 | LR: 2.00e-05
|
| 556 |
+
[2026-04-25 19:02:05] Epoch 1 | Step 3220 | Loss: 1.0691 | LR: 2.00e-05
|
| 557 |
+
[2026-04-25 19:02:07] Epoch 1 | Step 3230 | Loss: 1.0695 | LR: 2.00e-05
|
| 558 |
+
[2026-04-25 19:02:09] Epoch 1 | Step 3240 | Loss: 1.0693 | LR: 2.00e-05
|
| 559 |
+
[2026-04-25 19:02:12] Epoch 1 | Step 3250 | Loss: 1.0694 | LR: 2.00e-05
|
| 560 |
+
[2026-04-25 19:02:15] Epoch 1 | Step 3260 | Loss: 1.0692 | LR: 2.00e-05
|
| 561 |
+
[2026-04-25 19:02:17] Epoch 1 | Step 3270 | Loss: 1.0690 | LR: 2.00e-05
|
| 562 |
+
[2026-04-25 19:02:20] Epoch 1 | Step 3280 | Loss: 1.0686 | LR: 2.00e-05
|
| 563 |
+
[2026-04-25 19:02:22] Epoch 1 | Step 3290 | Loss: 1.0684 | LR: 2.00e-05
|
| 564 |
+
[2026-04-25 19:02:25] Epoch 1 | Step 3300 | Loss: 1.0684 | LR: 2.00e-05
|
| 565 |
+
[2026-04-25 19:02:27] Epoch 1 | Step 3310 | Loss: 1.0682 | LR: 2.00e-05
|
| 566 |
+
[2026-04-25 19:02:30] Epoch 1 | Step 3320 | Loss: 1.0682 | LR: 2.00e-05
|
| 567 |
+
[2026-04-25 19:02:33] Epoch 1 | Step 3330 | Loss: 1.0680 | LR: 2.00e-05
|
| 568 |
+
[2026-04-25 19:02:35] Epoch 1 | Step 3340 | Loss: 1.0681 | LR: 2.00e-05
|
| 569 |
+
[2026-04-25 19:02:38] Epoch 1 | Step 3350 | Loss: 1.0677 | LR: 2.00e-05
|
| 570 |
+
[2026-04-25 19:02:40] Epoch 1 | Step 3360 | Loss: 1.0675 | LR: 2.00e-05
|
| 571 |
+
[2026-04-25 19:02:43] Epoch 1 | Step 3370 | Loss: 1.0676 | LR: 2.00e-05
|
| 572 |
+
[2026-04-25 19:02:45] Epoch 1 | Step 3380 | Loss: 1.0671 | LR: 2.00e-05
|
| 573 |
+
[2026-04-25 19:02:48] Epoch 1 | Step 3390 | Loss: 1.0673 | LR: 2.00e-05
|
| 574 |
+
[2026-04-25 19:02:51] Epoch 1 | Step 3400 | Loss: 1.0677 | LR: 2.00e-05
|
| 575 |
+
[2026-04-25 19:02:53] Epoch 1 | Step 3410 | Loss: 1.0675 | LR: 2.00e-05
|
| 576 |
+
[2026-04-25 19:02:56] Epoch 1 | Step 3420 | Loss: 1.0671 | LR: 2.00e-05
|
| 577 |
+
[2026-04-25 19:02:59] Epoch 1 | Step 3430 | Loss: 1.0671 | LR: 2.00e-05
|
| 578 |
+
[2026-04-25 19:03:01] Epoch 1 | Step 3440 | Loss: 1.0672 | LR: 2.00e-05
|
| 579 |
+
[2026-04-25 19:03:04] Epoch 1 | Step 3450 | Loss: 1.0670 | LR: 2.00e-05
|
| 580 |
+
[2026-04-25 19:03:06] Epoch 1 | Step 3460 | Loss: 1.0669 | LR: 2.00e-05
|
| 581 |
+
[2026-04-25 19:03:09] Epoch 1 | Step 3470 | Loss: 1.0668 | LR: 2.00e-05
|
| 582 |
+
[2026-04-25 19:03:11] Epoch 1 | Step 3480 | Loss: 1.0667 | LR: 2.00e-05
|
| 583 |
+
[2026-04-25 19:03:13] Epoch 1 | Step 3490 | Loss: 1.0665 | LR: 2.00e-05
|
| 584 |
+
[2026-04-25 19:03:16] Epoch 1 | Step 3500 | Loss: 1.0662 | LR: 2.00e-05
|
| 585 |
+
[2026-04-25 19:03:19] Epoch 1 | Step 3510 | Loss: 1.0664 | LR: 2.00e-05
|
| 586 |
+
[2026-04-25 19:03:21] Epoch 1 | Step 3520 | Loss: 1.0660 | LR: 2.00e-05
|
| 587 |
+
[2026-04-25 19:03:24] Epoch 1 | Step 3530 | Loss: 1.0662 | LR: 2.00e-05
|
| 588 |
+
[2026-04-25 19:03:26] Epoch 1 | Step 3540 | Loss: 1.0659 | LR: 2.00e-05
|
| 589 |
+
[2026-04-25 19:03:29] Epoch 1 | Step 3550 | Loss: 1.0658 | LR: 2.00e-05
|
| 590 |
+
[2026-04-25 19:03:32] Epoch 1 | Step 3560 | Loss: 1.0657 | LR: 2.00e-05
|
| 591 |
+
[2026-04-25 19:03:34] Epoch 1 | Step 3570 | Loss: 1.0656 | LR: 2.00e-05
|
| 592 |
+
[2026-04-25 19:03:37] Epoch 1 | Step 3580 | Loss: 1.0655 | LR: 2.00e-05
|
| 593 |
+
[2026-04-25 19:03:39] Epoch 1 | Step 3590 | Loss: 1.0654 | LR: 2.00e-05
|
| 594 |
+
[2026-04-25 19:03:42] Epoch 1 | Step 3600 | Loss: 1.0650 | LR: 2.00e-05
|
| 595 |
+
[2026-04-25 19:03:44] Epoch 1 | Step 3610 | Loss: 1.0648 | LR: 2.00e-05
|
| 596 |
+
[2026-04-25 19:03:47] Epoch 1 | Step 3620 | Loss: 1.0646 | LR: 2.00e-05
|
| 597 |
+
[2026-04-25 19:03:49] Epoch 1 | Step 3630 | Loss: 1.0649 | LR: 2.00e-05
|
| 598 |
+
[2026-04-25 19:03:51] Epoch 1 | Step 3640 | Loss: 1.0651 | LR: 2.00e-05
|
| 599 |
+
[2026-04-25 19:03:54] Epoch 1 | Step 3650 | Loss: 1.0651 | LR: 2.00e-05
|
| 600 |
+
[2026-04-25 19:03:56] Epoch 1 | Step 3660 | Loss: 1.0650 | LR: 2.00e-05
|
| 601 |
+
[2026-04-25 19:03:59] Epoch 1 | Step 3670 | Loss: 1.0646 | LR: 2.00e-05
|
| 602 |
+
[2026-04-25 19:04:01] Epoch 1 | Step 3680 | Loss: 1.0646 | LR: 2.00e-05
|
| 603 |
+
[2026-04-25 19:04:04] Epoch 1 | Step 3690 | Loss: 1.0644 | LR: 2.00e-05
|
| 604 |
+
[2026-04-25 19:04:06] Epoch 1 | Step 3700 | Loss: 1.0642 | LR: 2.00e-05
|
| 605 |
+
[2026-04-25 19:04:09] Epoch 1 | Step 3710 | Loss: 1.0640 | LR: 2.00e-05
|
| 606 |
+
[2026-04-25 19:04:12] Epoch 1 | Step 3720 | Loss: 1.0639 | LR: 2.00e-05
|
| 607 |
+
[2026-04-25 19:04:14] Epoch 1 | Step 3730 | Loss: 1.0640 | LR: 2.00e-05
|
| 608 |
+
[2026-04-25 19:04:16] Epoch 1 | Step 3740 | Loss: 1.0641 | LR: 2.00e-05
|
| 609 |
+
[2026-04-25 19:04:19] Epoch 1 | Step 3750 | Loss: 1.0638 | LR: 2.00e-05
|
| 610 |
+
[2026-04-25 19:04:22] Epoch 1 | Step 3760 | Loss: 1.0639 | LR: 2.00e-05
|
| 611 |
+
[2026-04-25 19:04:24] Epoch 1 | Step 3770 | Loss: 1.0640 | LR: 2.00e-05
|
| 612 |
+
[2026-04-25 19:04:27] Epoch 1 | Step 3780 | Loss: 1.0639 | LR: 2.00e-05
|
| 613 |
+
[2026-04-25 19:04:29] Epoch 1 | Step 3790 | Loss: 1.0639 | LR: 2.00e-05
|
| 614 |
+
[2026-04-25 19:04:32] Epoch 1 | Step 3800 | Loss: 1.0641 | LR: 2.00e-05
|
| 615 |
+
[2026-04-25 19:04:34] Epoch 1 | Step 3810 | Loss: 1.0635 | LR: 2.00e-05
|
| 616 |
+
[2026-04-25 19:04:37] Epoch 1 | Step 3820 | Loss: 1.0633 | LR: 2.00e-05
|
| 617 |
+
[2026-04-25 19:04:39] Epoch 1 | Step 3830 | Loss: 1.0631 | LR: 2.00e-05
|
| 618 |
+
[2026-04-25 19:04:42] Epoch 1 | Step 3840 | Loss: 1.0632 | LR: 2.00e-05
|
| 619 |
+
[2026-04-25 19:04:44] Epoch 1 | Step 3850 | Loss: 1.0629 | LR: 2.00e-05
|
| 620 |
+
[2026-04-25 19:04:47] Epoch 1 | Step 3860 | Loss: 1.0627 | LR: 2.00e-05
|
| 621 |
+
[2026-04-25 19:04:49] Epoch 1 | Step 3870 | Loss: 1.0627 | LR: 2.00e-05
|
| 622 |
+
[2026-04-25 19:04:52] Epoch 1 | Step 3880 | Loss: 1.0621 | LR: 2.00e-05
|
| 623 |
+
[2026-04-25 19:04:55] Epoch 1 | Step 3890 | Loss: 1.0619 | LR: 2.00e-05
|
| 624 |
+
[2026-04-25 19:04:57] Epoch 1 | Step 3900 | Loss: 1.0619 | LR: 2.00e-05
|
| 625 |
+
[2026-04-25 19:05:00] Epoch 1 | Step 3910 | Loss: 1.0621 | LR: 2.00e-05
|
| 626 |
+
[2026-04-25 19:05:02] Epoch 1 | Step 3920 | Loss: 1.0622 | LR: 2.00e-05
|
| 627 |
+
[2026-04-25 19:05:04] Epoch 1 | Step 3930 | Loss: 1.0620 | LR: 2.00e-05
|
| 628 |
+
[2026-04-25 19:05:07] Epoch 1 | Step 3940 | Loss: 1.0620 | LR: 2.00e-05
|
| 629 |
+
[2026-04-25 19:05:10] Epoch 1 | Step 3950 | Loss: 1.0617 | LR: 2.00e-05
|
| 630 |
+
[2026-04-25 19:05:12] Epoch 1 | Step 3960 | Loss: 1.0617 | LR: 2.00e-05
|
| 631 |
+
[2026-04-25 19:05:15] Epoch 1 | Step 3970 | Loss: 1.0615 | LR: 2.00e-05
|
| 632 |
+
[2026-04-25 19:05:18] Epoch 1 | Step 3980 | Loss: 1.0614 | LR: 2.00e-05
|
| 633 |
+
[2026-04-25 19:05:20] Epoch 1 | Step 3990 | Loss: 1.0612 | LR: 1.99e-05
|
| 634 |
+
[2026-04-25 19:05:22] Epoch 1 | Step 4000 | Loss: 1.0611 | LR: 1.99e-05
|
| 635 |
+
[2026-04-25 19:05:23] Validation | Batch 10/84 | Loss: 0.9893
|
| 636 |
+
[2026-04-25 19:05:23] Validation | Batch 20/84 | Loss: 0.9886
|
| 637 |
+
[2026-04-25 19:05:24] Validation | Batch 30/84 | Loss: 1.0646
|
| 638 |
+
[2026-04-25 19:05:24] Validation | Batch 40/84 | Loss: 1.0736
|
| 639 |
+
[2026-04-25 19:05:25] Validation | Batch 50/84 | Loss: 1.0745
|
| 640 |
+
[2026-04-25 19:05:25] Validation | Batch 60/84 | Loss: 1.0514
|
| 641 |
+
[2026-04-25 19:05:26] Validation | Batch 70/84 | Loss: 1.0341
|
| 642 |
+
[2026-04-25 19:05:26] Validation | Batch 80/84 | Loss: 1.0417
|
| 643 |
+
[2026-04-25 19:05:26] Validation | Batch 84/84 | Loss: 1.0352
|
| 644 |
+
[2026-04-25 19:05:27] Validation | Loss: 1.0352 | PPL: 2.88 | Time: 3.75s
|
| 645 |
+
[2026-04-25 19:05:29] New best model saved! Val loss: 1.0352
|
| 646 |
+
[2026-04-25 19:05:31] Epoch 1 | Step 4010 | Loss: 1.0610 | LR: 1.99e-05
|
| 647 |
+
[2026-04-25 19:05:34] Epoch 1 | Step 4020 | Loss: 1.0611 | LR: 1.98e-05
|
| 648 |
+
[2026-04-25 19:05:36] Epoch 1 | Step 4030 | Loss: 1.0608 | LR: 1.97e-05
|
| 649 |
+
[2026-04-25 19:05:38] Epoch 1 | Step 4040 | Loss: 1.0604 | LR: 1.97e-05
|
| 650 |
+
[2026-04-25 19:05:41] Epoch 1 | Step 4050 | Loss: 1.0602 | LR: 1.96e-05
|
| 651 |
+
[2026-04-25 19:05:43] Epoch 1 | Step 4060 | Loss: 1.0596 | LR: 1.95e-05
|
| 652 |
+
[2026-04-25 19:05:46] Epoch 1 | Step 4070 | Loss: 1.0596 | LR: 1.94e-05
|
| 653 |
+
[2026-04-25 19:05:48] Epoch 1 | Step 4080 | Loss: 1.0596 | LR: 1.93e-05
|
| 654 |
+
[2026-04-25 19:05:51] Epoch 1 | Step 4090 | Loss: 1.0597 | LR: 1.92e-05
|
| 655 |
+
[2026-04-25 19:05:53] Epoch 1 | Step 4100 | Loss: 1.0598 | LR: 1.91e-05
|
| 656 |
+
[2026-04-25 19:05:56] Epoch 1 | Step 4110 | Loss: 1.0597 | LR: 1.89e-05
|
| 657 |
+
[2026-04-25 19:05:58] Epoch 1 | Step 4120 | Loss: 1.0599 | LR: 1.88e-05
|
| 658 |
+
[2026-04-25 19:06:01] Epoch 1 | Step 4130 | Loss: 1.0597 | LR: 1.86e-05
|
| 659 |
+
[2026-04-25 19:06:04] Epoch 1 | Step 4140 | Loss: 1.0598 | LR: 1.85e-05
|
| 660 |
+
[2026-04-25 19:06:06] Epoch 1 | Step 4150 | Loss: 1.0603 | LR: 1.83e-05
|
| 661 |
+
[2026-04-25 19:06:09] Epoch 1 | Step 4160 | Loss: 1.0605 | LR: 1.82e-05
|
| 662 |
+
[2026-04-25 19:06:11] Epoch 1 | Step 4170 | Loss: 1.0603 | LR: 1.80e-05
|
| 663 |
+
[2026-04-25 19:06:14] Epoch 1 | Step 4180 | Loss: 1.0603 | LR: 1.78e-05
|
| 664 |
+
[2026-04-25 19:06:17] Epoch 1 | Step 4190 | Loss: 1.0601 | LR: 1.76e-05
|
| 665 |
+
[2026-04-25 19:06:19] Epoch 1 | Step 4200 | Loss: 1.0604 | LR: 1.74e-05
|
| 666 |
+
[2026-04-25 19:06:22] Epoch 1 | Step 4210 | Loss: 1.0603 | LR: 1.72e-05
|
| 667 |
+
[2026-04-25 19:06:25] Epoch 1 | Step 4220 | Loss: 1.0607 | LR: 1.70e-05
|
| 668 |
+
[2026-04-25 19:06:27] Epoch 1 | Step 4230 | Loss: 1.0608 | LR: 1.68e-05
|
| 669 |
+
[2026-04-25 19:06:30] Epoch 1 | Step 4240 | Loss: 1.0609 | LR: 1.66e-05
|
| 670 |
+
[2026-04-25 19:06:33] Epoch 1 | Step 4250 | Loss: 1.0609 | LR: 1.63e-05
|
| 671 |
+
[2026-04-25 19:06:35] Epoch 1 | Step 4260 | Loss: 1.0605 | LR: 1.61e-05
|
| 672 |
+
[2026-04-25 19:06:37] Epoch 1 | Step 4270 | Loss: 1.0608 | LR: 1.59e-05
|
| 673 |
+
[2026-04-25 19:06:40] Epoch 1 | Step 4280 | Loss: 1.0606 | LR: 1.56e-05
|
| 674 |
+
[2026-04-25 19:06:42] Epoch 1 | Step 4290 | Loss: 1.0603 | LR: 1.54e-05
|
| 675 |
+
[2026-04-25 19:06:45] Epoch 1 | Step 4300 | Loss: 1.0604 | LR: 1.51e-05
|
| 676 |
+
[2026-04-25 19:06:47] Epoch 1 | Step 4310 | Loss: 1.0605 | LR: 1.49e-05
|
| 677 |
+
[2026-04-25 19:06:50] Epoch 1 | Step 4320 | Loss: 1.0606 | LR: 1.46e-05
|
| 678 |
+
[2026-04-25 19:06:53] Epoch 1 | Step 4330 | Loss: 1.0604 | LR: 1.43e-05
|
| 679 |
+
[2026-04-25 19:06:55] Epoch 1 | Step 4340 | Loss: 1.0603 | LR: 1.41e-05
|
| 680 |
+
[2026-04-25 19:06:57] Epoch 1 | Step 4350 | Loss: 1.0600 | LR: 1.38e-05
|
| 681 |
+
[2026-04-25 19:07:00] Epoch 1 | Step 4360 | Loss: 1.0599 | LR: 1.35e-05
|
| 682 |
+
[2026-04-25 19:07:03] Epoch 1 | Step 4370 | Loss: 1.0600 | LR: 1.32e-05
|
| 683 |
+
[2026-04-25 19:07:05] Epoch 1 | Step 4380 | Loss: 1.0598 | LR: 1.30e-05
|
| 684 |
+
[2026-04-25 19:07:07] Epoch 1 | Step 4390 | Loss: 1.0598 | LR: 1.27e-05
|
| 685 |
+
[2026-04-25 19:07:10] Epoch 1 | Step 4400 | Loss: 1.0596 | LR: 1.24e-05
|
| 686 |
+
[2026-04-25 19:07:12] Epoch 1 | Step 4410 | Loss: 1.0591 | LR: 1.21e-05
|
| 687 |
+
[2026-04-25 19:07:15] Epoch 1 | Step 4420 | Loss: 1.0594 | LR: 1.18e-05
|
| 688 |
+
[2026-04-25 19:07:17] Epoch 1 | Step 4430 | Loss: 1.0593 | LR: 1.16e-05
|
| 689 |
+
[2026-04-25 19:07:20] Epoch 1 | Step 4440 | Loss: 1.0596 | LR: 1.13e-05
|
| 690 |
+
[2026-04-25 19:07:23] Epoch 1 | Step 4450 | Loss: 1.0594 | LR: 1.10e-05
|
| 691 |
+
[2026-04-25 19:07:25] Epoch 1 | Step 4460 | Loss: 1.0597 | LR: 1.07e-05
|
| 692 |
+
[2026-04-25 19:07:28] Epoch 1 | Step 4470 | Loss: 1.0595 | LR: 1.04e-05
|
| 693 |
+
[2026-04-25 19:07:30] Epoch 1 | Step 4480 | Loss: 1.0593 | LR: 1.01e-05
|
| 694 |
+
[2026-04-25 19:07:33] Epoch 1 | Step 4490 | Loss: 1.0591 | LR: 9.84e-06
|
| 695 |
+
[2026-04-25 19:07:35] Epoch 1 | Step 4500 | Loss: 1.0592 | LR: 9.55e-06
|
| 696 |
+
[2026-04-25 19:07:38] Epoch 1 | Step 4510 | Loss: 1.0588 | LR: 9.27e-06
|
| 697 |
+
[2026-04-25 19:07:40] Epoch 1 | Step 4520 | Loss: 1.0585 | LR: 8.99e-06
|
| 698 |
+
[2026-04-25 19:07:43] Epoch 1 | Step 4530 | Loss: 1.0582 | LR: 8.72e-06
|
| 699 |
+
[2026-04-25 19:07:46] Epoch 1 | Step 4540 | Loss: 1.0581 | LR: 8.44e-06
|
| 700 |
+
[2026-04-25 19:07:49] Epoch 1 | Step 4550 | Loss: 1.0577 | LR: 8.17e-06
|
| 701 |
+
[2026-04-25 19:07:51] Epoch 1 | Step 4560 | Loss: 1.0577 | LR: 7.90e-06
|
| 702 |
+
[2026-04-25 19:07:54] Epoch 1 | Step 4570 | Loss: 1.0577 | LR: 7.63e-06
|
| 703 |
+
[2026-04-25 19:07:56] Epoch 1 | Step 4580 | Loss: 1.0575 | LR: 7.37e-06
|
| 704 |
+
[2026-04-25 19:07:59] Epoch 1 | Step 4590 | Loss: 1.0573 | LR: 7.11e-06
|
| 705 |
+
[2026-04-25 19:08:01] Epoch 1 | Step 4600 | Loss: 1.0571 | LR: 6.85e-06
|
| 706 |
+
[2026-04-25 19:08:03] Epoch 1 | Step 4610 | Loss: 1.0570 | LR: 6.60e-06
|
| 707 |
+
[2026-04-25 19:08:06] Epoch 1 | Step 4620 | Loss: 1.0570 | LR: 6.35e-06
|
| 708 |
+
[2026-04-25 19:08:08] Epoch 1 | Step 4630 | Loss: 1.0569 | LR: 6.11e-06
|
| 709 |
+
[2026-04-25 19:08:11] Epoch 1 | Step 4640 | Loss: 1.0568 | LR: 5.87e-06
|
| 710 |
+
[2026-04-25 19:08:13] Epoch 1 | Step 4650 | Loss: 1.0568 | LR: 5.64e-06
|
| 711 |
+
[2026-04-25 19:08:16] Epoch 1 | Step 4660 | Loss: 1.0566 | LR: 5.41e-06
|
| 712 |
+
[2026-04-25 19:08:18] Epoch 1 | Step 4670 | Loss: 1.0564 | LR: 5.19e-06
|
| 713 |
+
[2026-04-25 19:08:21] Epoch 1 | Step 4680 | Loss: 1.0565 | LR: 4.98e-06
|
| 714 |
+
[2026-04-25 19:08:24] Epoch 1 | Step 4690 | Loss: 1.0563 | LR: 4.77e-06
|
| 715 |
+
[2026-04-25 19:08:26] Epoch 1 | Step 4700 | Loss: 1.0565 | LR: 4.56e-06
|
| 716 |
+
[2026-04-25 19:08:29] Epoch 1 | Step 4710 | Loss: 1.0563 | LR: 4.37e-06
|
| 717 |
+
[2026-04-25 19:08:31] Epoch 1 | Step 4720 | Loss: 1.0562 | LR: 4.18e-06
|
| 718 |
+
[2026-04-25 19:08:33] Epoch 1 | Step 4730 | Loss: 1.0562 | LR: 3.99e-06
|
| 719 |
+
[2026-04-25 19:08:36] Epoch 1 | Step 4740 | Loss: 1.0560 | LR: 3.82e-06
|
| 720 |
+
[2026-04-25 19:08:38] Epoch 1 | Step 4750 | Loss: 1.0559 | LR: 3.65e-06
|
| 721 |
+
[2026-04-25 19:08:40] Epoch 1 | Step 4760 | Loss: 1.0557 | LR: 3.49e-06
|
| 722 |
+
[2026-04-25 19:08:43] Epoch 1 | Step 4770 | Loss: 1.0554 | LR: 3.33e-06
|
| 723 |
+
[2026-04-25 19:08:46] Epoch 1 | Step 4780 | Loss: 1.0554 | LR: 3.19e-06
|
| 724 |
+
[2026-04-25 19:08:48] Epoch 1 | Step 4790 | Loss: 1.0553 | LR: 3.05e-06
|
| 725 |
+
[2026-04-25 19:08:51] Epoch 1 | Step 4800 | Loss: 1.0551 | LR: 2.92e-06
|
| 726 |
+
[2026-04-25 19:08:53] Epoch 1 | Step 4810 | Loss: 1.0548 | LR: 2.80e-06
|
| 727 |
+
[2026-04-25 19:08:56] Epoch 1 | Step 4820 | Loss: 1.0545 | LR: 2.68e-06
|
| 728 |
+
[2026-04-25 19:08:58] Epoch 1 | Step 4830 | Loss: 1.0542 | LR: 2.58e-06
|
| 729 |
+
[2026-04-25 19:09:01] Epoch 1 | Step 4840 | Loss: 1.0541 | LR: 2.48e-06
|
| 730 |
+
[2026-04-25 19:09:04] Epoch 1 | Step 4850 | Loss: 1.0542 | LR: 2.39e-06
|
| 731 |
+
[2026-04-25 19:09:06] Epoch 1 | Step 4860 | Loss: 1.0544 | LR: 2.32e-06
|
| 732 |
+
[2026-04-25 19:09:09] Epoch 1 | Step 4870 | Loss: 1.0545 | LR: 2.24e-06
|
| 733 |
+
[2026-04-25 19:09:11] Epoch 1 | Step 4880 | Loss: 1.0544 | LR: 2.18e-06
|
| 734 |
+
[2026-04-25 19:09:14] Epoch 1 | Step 4890 | Loss: 1.0541 | LR: 2.13e-06
|
| 735 |
+
[2026-04-25 19:09:17] Epoch 1 | Step 4900 | Loss: 1.0541 | LR: 2.09e-06
|
| 736 |
+
[2026-04-25 19:09:19] Epoch 1 | Step 4910 | Loss: 1.0539 | LR: 2.05e-06
|
| 737 |
+
[2026-04-25 19:09:22] Epoch 1 | Step 4920 | Loss: 1.0539 | LR: 2.03e-06
|
| 738 |
+
[2026-04-25 19:09:24] Epoch 1 | Step 4930 | Loss: 1.0538 | LR: 2.01e-06
|
| 739 |
+
[2026-04-25 19:09:26] Epoch 1 | Step 4940 | Loss: 1.0537 | LR: 2.00e-06
|
| 740 |
+
[2026-04-25 19:09:29] Epoch 1 | Step 4950 | Loss: 1.0536 | LR: 2.00e-06
|
| 741 |
+
[2026-04-25 19:09:31] Epoch 1 | Step 4960 | Loss: 1.0536 | LR: 2.00e-06
|
| 742 |
+
[2026-04-25 19:09:34] Epoch 1 | Step 4970 | Loss: 1.0534 | LR: 2.00e-06
|
| 743 |
+
[2026-04-25 19:09:36] Epoch 1 | Step 4980 | Loss: 1.0533 | LR: 2.00e-06
|
| 744 |
+
[2026-04-25 19:09:39] Epoch 1 | Step 4990 | Loss: 1.0530 | LR: 2.00e-06
|
| 745 |
+
[2026-04-25 19:09:41] Epoch 1 | Step 5000 | Loss: 1.0532 | LR: 2.00e-06
|
| 746 |
+
[2026-04-25 19:09:44] Epoch 1 | Step 5010 | Loss: 1.0530 | LR: 2.00e-06
|
| 747 |
+
[2026-04-25 19:09:46] Epoch 1 | Step 5020 | Loss: 1.0527 | LR: 2.00e-06
|
| 748 |
+
[2026-04-25 19:09:48] Epoch 1 | Step 5030 | Loss: 1.0527 | LR: 2.00e-06
|
| 749 |
+
[2026-04-25 19:09:51] Epoch 1 | Step 5040 | Loss: 1.0525 | LR: 2.00e-06
|
| 750 |
+
[2026-04-25 19:09:53] Epoch 1 | Step 5050 | Loss: 1.0524 | LR: 2.00e-06
|
| 751 |
+
[2026-04-25 19:09:56] Epoch 1 | Step 5060 | Loss: 1.0523 | LR: 2.00e-06
|
| 752 |
+
[2026-04-25 19:09:59] Epoch 1 | Step 5070 | Loss: 1.0522 | LR: 2.00e-06
|
| 753 |
+
[2026-04-25 19:10:01] Epoch 1 | Step 5080 | Loss: 1.0523 | LR: 2.00e-06
|
| 754 |
+
[2026-04-25 19:10:04] Epoch 1 | Step 5090 | Loss: 1.0523 | LR: 2.00e-06
|
| 755 |
+
[2026-04-25 19:10:07] Epoch 1 | Step 5100 | Loss: 1.0521 | LR: 2.00e-06
|
| 756 |
+
[2026-04-25 19:10:09] Epoch 1 | Step 5110 | Loss: 1.0520 | LR: 2.00e-06
|
| 757 |
+
[2026-04-25 19:10:12] Epoch 1 | Step 5120 | Loss: 1.0521 | LR: 2.00e-06
|
| 758 |
+
[2026-04-25 19:10:14] Epoch 1 | Step 5130 | Loss: 1.0520 | LR: 2.00e-06
|
| 759 |
+
[2026-04-25 19:10:17] Epoch 1 | Step 5140 | Loss: 1.0518 | LR: 2.00e-06
|
| 760 |
+
[2026-04-25 19:10:19] Epoch 1 | Step 5150 | Loss: 1.0516 | LR: 2.00e-06
|
| 761 |
+
[2026-04-25 19:10:22] Epoch 1 | Step 5160 | Loss: 1.0511 | LR: 2.00e-06
|
| 762 |
+
[2026-04-25 19:10:24] Epoch 1 | Step 5170 | Loss: 1.0511 | LR: 2.00e-06
|
| 763 |
+
[2026-04-25 19:10:27] Epoch 1 | Step 5180 | Loss: 1.0510 | LR: 2.00e-06
|
| 764 |
+
[2026-04-25 19:10:29] Epoch 1 | Step 5190 | Loss: 1.0510 | LR: 2.00e-06
|
| 765 |
+
[2026-04-25 19:10:32] Epoch 1 | Step 5200 | Loss: 1.0509 | LR: 2.00e-06
|
| 766 |
+
[2026-04-25 19:10:35] Epoch 1 | Step 5210 | Loss: 1.0508 | LR: 2.00e-06
|
| 767 |
+
[2026-04-25 19:10:37] Epoch 1 | Step 5220 | Loss: 1.0507 | LR: 2.00e-06
|
| 768 |
+
[2026-04-25 19:10:40] Epoch 1 | Step 5230 | Loss: 1.0506 | LR: 2.00e-06
|
| 769 |
+
[2026-04-25 19:10:43] Epoch 1 | Step 5240 | Loss: 1.0506 | LR: 2.00e-06
|
| 770 |
+
[2026-04-25 19:10:45] Epoch 1 | Step 5250 | Loss: 1.0506 | LR: 2.00e-06
|
| 771 |
+
[2026-04-25 19:10:48] Epoch 1 | Step 5260 | Loss: 1.0505 | LR: 2.00e-06
|
| 772 |
+
[2026-04-25 19:10:50] Epoch 1 | Step 5270 | Loss: 1.0504 | LR: 2.00e-06
|
| 773 |
+
[2026-04-25 19:10:53] Epoch 1 | Step 5280 | Loss: 1.0501 | LR: 2.00e-06
|
| 774 |
+
[2026-04-25 19:10:55] Epoch 1 | Step 5290 | Loss: 1.0498 | LR: 2.00e-06
|
| 775 |
+
[2026-04-25 19:10:58] Epoch 1 | Step 5300 | Loss: 1.0497 | LR: 2.00e-06
|
| 776 |
+
[2026-04-25 19:11:00] Epoch 1 | Step 5310 | Loss: 1.0498 | LR: 2.00e-06
|
| 777 |
+
[2026-04-25 19:11:03] Epoch 1 | Step 5320 | Loss: 1.0496 | LR: 2.00e-06
|
| 778 |
+
[2026-04-25 19:11:05] Epoch 1 | Step 5330 | Loss: 1.0496 | LR: 2.00e-06
|
| 779 |
+
[2026-04-25 19:11:08] Epoch 1 | Step 5340 | Loss: 1.0494 | LR: 2.00e-06
|
| 780 |
+
[2026-04-25 19:11:10] Epoch 1 | Step 5350 | Loss: 1.0493 | LR: 2.00e-06
|
| 781 |
+
[2026-04-25 19:11:13] Epoch 1 | Step 5360 | Loss: 1.0493 | LR: 2.00e-06
|
| 782 |
+
[2026-04-25 19:11:15] Epoch 1 | Step 5370 | Loss: 1.0493 | LR: 2.00e-06
|
| 783 |
+
[2026-04-25 19:11:17] Epoch 1 | Step 5380 | Loss: 1.0491 | LR: 2.00e-06
|
| 784 |
+
[2026-04-25 19:11:20] Epoch 1 | Step 5390 | Loss: 1.0489 | LR: 2.00e-06
|
| 785 |
+
[2026-04-25 19:11:23] Epoch 1 | Step 5400 | Loss: 1.0486 | LR: 2.00e-06
|
| 786 |
+
[2026-04-25 19:11:25] Epoch 1 | Step 5410 | Loss: 1.0486 | LR: 2.00e-06
|
| 787 |
+
[2026-04-25 19:11:28] Epoch 1 | Step 5420 | Loss: 1.0484 | LR: 2.00e-06
|
| 788 |
+
[2026-04-25 19:11:30] Epoch 1 | Step 5430 | Loss: 1.0483 | LR: 2.00e-06
|
| 789 |
+
[2026-04-25 19:11:32] Epoch 1 | Step 5440 | Loss: 1.0484 | LR: 2.00e-06
|
| 790 |
+
[2026-04-25 19:11:35] Epoch 1 | Step 5450 | Loss: 1.0485 | LR: 2.00e-06
|
| 791 |
+
[2026-04-25 19:11:38] Epoch 1 | Step 5460 | Loss: 1.0483 | LR: 2.00e-06
|
| 792 |
+
[2026-04-25 19:11:40] Epoch 1 | Step 5470 | Loss: 1.0481 | LR: 2.00e-06
|
| 793 |
+
[2026-04-25 19:11:43] Epoch 1 | Step 5480 | Loss: 1.0481 | LR: 2.00e-06
|
| 794 |
+
[2026-04-25 19:11:45] Epoch 1 | Step 5490 | Loss: 1.0482 | LR: 2.00e-06
|
| 795 |
+
[2026-04-25 19:11:48] Epoch 1 | Step 5500 | Loss: 1.0481 | LR: 2.00e-06
|
| 796 |
+
[2026-04-25 19:11:50] Epoch 1 | Step 5510 | Loss: 1.0482 | LR: 2.00e-06
|
| 797 |
+
[2026-04-25 19:11:53] Epoch 1 | Step 5520 | Loss: 1.0481 | LR: 2.00e-06
|
| 798 |
+
[2026-04-25 19:11:55] Epoch 1 | Step 5530 | Loss: 1.0480 | LR: 2.00e-06
|
| 799 |
+
[2026-04-25 19:11:58] Epoch 1 | Step 5540 | Loss: 1.0476 | LR: 2.00e-06
|
| 800 |
+
[2026-04-25 19:12:00] Epoch 1 | Step 5550 | Loss: 1.0476 | LR: 2.00e-06
|
| 801 |
+
[2026-04-25 19:12:03] Epoch 1 | Step 5560 | Loss: 1.0475 | LR: 2.00e-06
|
| 802 |
+
[2026-04-25 19:12:05] Epoch 1 | Step 5570 | Loss: 1.0478 | LR: 2.00e-06
|
| 803 |
+
[2026-04-25 19:12:08] Epoch 1 | Step 5580 | Loss: 1.0476 | LR: 2.00e-06
|
| 804 |
+
[2026-04-25 19:12:10] Epoch 1 | Step 5590 | Loss: 1.0473 | LR: 2.00e-06
|
| 805 |
+
[2026-04-25 19:12:13] Epoch 1 | Step 5600 | Loss: 1.0475 | LR: 2.00e-06
|
| 806 |
+
[2026-04-25 19:12:16] Epoch 1 | Step 5610 | Loss: 1.0475 | LR: 2.00e-06
|
| 807 |
+
[2026-04-25 19:12:18] Epoch 1 | Step 5620 | Loss: 1.0474 | LR: 2.00e-06
|
| 808 |
+
[2026-04-25 19:12:20] Epoch 1 | Step 5630 | Loss: 1.0474 | LR: 2.00e-06
|
| 809 |
+
[2026-04-25 19:12:23] Epoch 1 | Step 5640 | Loss: 1.0474 | LR: 2.00e-06
|
| 810 |
+
[2026-04-25 19:12:26] Epoch 1 | Step 5650 | Loss: 1.0473 | LR: 2.00e-06
|
| 811 |
+
[2026-04-25 19:12:28] Epoch 1 | Step 5660 | Loss: 1.0471 | LR: 2.00e-06
|
| 812 |
+
[2026-04-25 19:12:30] Epoch 1 | Step 5670 | Loss: 1.0470 | LR: 2.00e-06
|
| 813 |
+
[2026-04-25 19:12:33] Epoch 1 | Step 5680 | Loss: 1.0467 | LR: 2.00e-06
|
| 814 |
+
[2026-04-25 19:12:35] Epoch 1 | Step 5690 | Loss: 1.0467 | LR: 2.00e-06
|
| 815 |
+
[2026-04-25 19:12:38] Epoch 1 | Step 5700 | Loss: 1.0466 | LR: 2.00e-06
|
| 816 |
+
[2026-04-25 19:12:41] Epoch 1 | Step 5710 | Loss: 1.0467 | LR: 2.00e-06
|
| 817 |
+
[2026-04-25 19:12:43] Epoch 1 | Step 5720 | Loss: 1.0467 | LR: 2.00e-06
|
| 818 |
+
[2026-04-25 19:12:46] Epoch 1 | Step 5730 | Loss: 1.0466 | LR: 2.00e-06
|
| 819 |
+
[2026-04-25 19:12:48] Epoch 1 | Step 5740 | Loss: 1.0467 | LR: 2.00e-06
|
| 820 |
+
[2026-04-25 19:12:51] Epoch 1 | Step 5750 | Loss: 1.0466 | LR: 2.00e-06
|
| 821 |
+
[2026-04-25 19:12:54] Epoch 1 | Step 5760 | Loss: 1.0465 | LR: 2.00e-06
|
| 822 |
+
[2026-04-25 19:12:56] Epoch 1 | Step 5770 | Loss: 1.0466 | LR: 2.00e-06
|
| 823 |
+
[2026-04-25 19:12:59] Epoch 1 | Step 5780 | Loss: 1.0464 | LR: 2.00e-06
|
| 824 |
+
[2026-04-25 19:13:01] Epoch 1 | Step 5790 | Loss: 1.0465 | LR: 2.00e-06
|
| 825 |
+
[2026-04-25 19:13:04] Epoch 1 | Step 5800 | Loss: 1.0467 | LR: 2.00e-06
|
| 826 |
+
[2026-04-25 19:13:06] Epoch 1 | Step 5810 | Loss: 1.0466 | LR: 2.00e-06
|
| 827 |
+
[2026-04-25 19:13:09] Epoch 1 | Step 5820 | Loss: 1.0464 | LR: 2.00e-06
|
| 828 |
+
[2026-04-25 19:13:11] Epoch 1 | Step 5830 | Loss: 1.0463 | LR: 2.00e-06
|
| 829 |
+
[2026-04-25 19:13:14] Epoch 1 | Step 5840 | Loss: 1.0463 | LR: 2.00e-06
|
| 830 |
+
[2026-04-25 19:13:16] Epoch 1 | Step 5850 | Loss: 1.0464 | LR: 2.00e-06
|
| 831 |
+
[2026-04-25 19:13:18] Epoch 1 | Step 5860 | Loss: 1.0463 | LR: 2.00e-06
|
| 832 |
+
[2026-04-25 19:13:21] Epoch 1 | Step 5870 | Loss: 1.0463 | LR: 2.00e-06
|
| 833 |
+
[2026-04-25 19:13:24] Epoch 1 | Step 5880 | Loss: 1.0463 | LR: 2.00e-06
|
| 834 |
+
[2026-04-25 19:13:26] Epoch 1 | Step 5890 | Loss: 1.0463 | LR: 2.00e-06
|
| 835 |
+
[2026-04-25 19:13:29] Epoch 1 | Step 5900 | Loss: 1.0462 | LR: 2.00e-06
|
| 836 |
+
[2026-04-25 19:13:32] Epoch 1 | Step 5910 | Loss: 1.0462 | LR: 2.00e-06
|
| 837 |
+
[2026-04-25 19:13:34] Epoch 1 | Step 5920 | Loss: 1.0459 | LR: 2.00e-06
|
| 838 |
+
[2026-04-25 19:13:37] Epoch 1 | Step 5930 | Loss: 1.0460 | LR: 2.00e-06
|
| 839 |
+
[2026-04-25 19:13:40] Epoch 1 | Step 5940 | Loss: 1.0458 | LR: 2.00e-06
|
| 840 |
+
[2026-04-25 19:13:42] Epoch 1 | Step 5950 | Loss: 1.0459 | LR: 2.00e-06
|
| 841 |
+
[2026-04-25 19:13:45] Epoch 1 | Step 5960 | Loss: 1.0459 | LR: 2.00e-06
|
| 842 |
+
[2026-04-25 19:13:47] Epoch 1 | Step 5970 | Loss: 1.0460 | LR: 2.00e-06
|
| 843 |
+
[2026-04-25 19:13:50] Epoch 1 | Step 5980 | Loss: 1.0459 | LR: 2.00e-06
|
| 844 |
+
[2026-04-25 19:13:53] Epoch 1 | Step 5990 | Loss: 1.0461 | LR: 2.00e-06
|
| 845 |
+
[2026-04-25 19:13:55] Epoch 1 | Step 6000 | Loss: 1.0459 | LR: 2.00e-06
|
| 846 |
+
[2026-04-25 19:13:56] Validation | Batch 10/84 | Loss: 0.9715
|
| 847 |
+
[2026-04-25 19:13:56] Validation | Batch 20/84 | Loss: 0.9678
|
| 848 |
+
[2026-04-25 19:13:57] Validation | Batch 30/84 | Loss: 1.0471
|
| 849 |
+
[2026-04-25 19:13:57] Validation | Batch 40/84 | Loss: 1.0534
|
| 850 |
+
[2026-04-25 19:13:57] Validation | Batch 50/84 | Loss: 1.0527
|
| 851 |
+
[2026-04-25 19:13:58] Validation | Batch 60/84 | Loss: 1.0254
|
| 852 |
+
[2026-04-25 19:13:58] Validation | Batch 70/84 | Loss: 1.0086
|
| 853 |
+
[2026-04-25 19:13:59] Validation | Batch 80/84 | Loss: 1.0154
|
| 854 |
+
[2026-04-25 19:13:59] Validation | Batch 84/84 | Loss: 1.0084
|
| 855 |
+
[2026-04-25 19:13:59] Validation | Loss: 1.0084 | PPL: 2.80 | Time: 3.77s
|
| 856 |
+
[2026-04-25 19:14:02] New best model saved! Val loss: 1.0084
|
| 857 |
+
[2026-04-25 19:14:04] Epoch 1 | Step 6010 | Loss: 1.0459 | LR: 2.00e-06
|
| 858 |
+
[2026-04-25 19:14:07] Epoch 1 | Step 6020 | Loss: 1.0457 | LR: 2.00e-06
|
| 859 |
+
[2026-04-25 19:14:09] Epoch 1 | Step 6030 | Loss: 1.0459 | LR: 2.00e-06
|
| 860 |
+
[2026-04-25 19:14:12] Epoch 1 | Step 6040 | Loss: 1.0459 | LR: 2.00e-06
|
| 861 |
+
[2026-04-25 19:14:15] Epoch 1 | Step 6050 | Loss: 1.0459 | LR: 2.00e-06
|
| 862 |
+
[2026-04-25 19:14:17] Epoch 1 | Step 6060 | Loss: 1.0459 | LR: 2.00e-06
|
| 863 |
+
[2026-04-25 19:14:20] Epoch 1 | Step 6070 | Loss: 1.0457 | LR: 2.00e-06
|
| 864 |
+
[2026-04-25 19:14:22] Epoch 1 | Step 6080 | Loss: 1.0458 | LR: 2.00e-06
|
| 865 |
+
[2026-04-25 19:14:25] Epoch 1 | Step 6090 | Loss: 1.0458 | LR: 2.00e-06
|
| 866 |
+
[2026-04-25 19:14:27] Epoch 1 | Step 6100 | Loss: 1.0459 | LR: 2.00e-06
|
| 867 |
+
[2026-04-25 19:14:30] Epoch 1 | Step 6110 | Loss: 1.0459 | LR: 2.00e-06
|
| 868 |
+
[2026-04-25 19:14:33] Epoch 1 | Step 6120 | Loss: 1.0458 | LR: 2.00e-06
|
| 869 |
+
[2026-04-25 19:14:35] Epoch 1 | Step 6130 | Loss: 1.0457 | LR: 2.00e-06
|
| 870 |
+
[2026-04-25 19:14:38] Epoch 1 | Step 6140 | Loss: 1.0453 | LR: 2.00e-06
|
| 871 |
+
[2026-04-25 19:14:40] Epoch 1 | Step 6150 | Loss: 1.0452 | LR: 2.00e-06
|
| 872 |
+
[2026-04-25 19:14:43] Epoch 1 | Step 6160 | Loss: 1.0451 | LR: 2.00e-06
|
| 873 |
+
[2026-04-25 19:14:45] Epoch 1 | Step 6170 | Loss: 1.0453 | LR: 2.00e-06
|
| 874 |
+
[2026-04-25 19:14:48] Epoch 1 | Step 6180 | Loss: 1.0450 | LR: 2.00e-06
|
| 875 |
+
[2026-04-25 19:14:50] Epoch 1 | Step 6190 | Loss: 1.0448 | LR: 2.00e-06
|
| 876 |
+
[2026-04-25 19:14:53] Epoch 1 | Step 6200 | Loss: 1.0446 | LR: 2.00e-06
|
| 877 |
+
[2026-04-25 19:14:55] Epoch 1 | Step 6210 | Loss: 1.0447 | LR: 2.00e-06
|
| 878 |
+
[2026-04-25 19:14:58] Epoch 1 | Step 6220 | Loss: 1.0448 | LR: 2.00e-06
|
| 879 |
+
[2026-04-25 19:15:01] Epoch 1 | Step 6230 | Loss: 1.0446 | LR: 2.00e-06
|
| 880 |
+
[2026-04-25 19:15:03] Epoch 1 | Step 6240 | Loss: 1.0446 | LR: 2.00e-06
|
| 881 |
+
[2026-04-25 19:15:06] Epoch 1 | Step 6250 | Loss: 1.0443 | LR: 2.00e-06
|
| 882 |
+
[2026-04-25 19:15:08] Epoch 1 | Step 6260 | Loss: 1.0443 | LR: 2.00e-06
|
| 883 |
+
[2026-04-25 19:15:11] Epoch 1 | Step 6270 | Loss: 1.0442 | LR: 2.00e-06
|
| 884 |
+
[2026-04-25 19:15:13] Epoch 1 | Step 6280 | Loss: 1.0440 | LR: 2.00e-06
|
| 885 |
+
[2026-04-25 19:15:16] Epoch 1 | Step 6290 | Loss: 1.0439 | LR: 2.00e-06
|
| 886 |
+
[2026-04-25 19:15:19] Epoch 1 | Step 6300 | Loss: 1.0439 | LR: 2.00e-06
|
| 887 |
+
[2026-04-25 19:15:21] Epoch 1 | Step 6310 | Loss: 1.0439 | LR: 2.00e-06
|
| 888 |
+
[2026-04-25 19:15:24] Epoch 1 | Step 6320 | Loss: 1.0439 | LR: 2.00e-06
|
| 889 |
+
[2026-04-25 19:15:26] Epoch 1 | Step 6330 | Loss: 1.0441 | LR: 2.00e-06
|
| 890 |
+
[2026-04-25 19:15:29] Epoch 1 | Step 6340 | Loss: 1.0441 | LR: 2.00e-06
|
| 891 |
+
[2026-04-25 19:15:31] Epoch 1 | Step 6350 | Loss: 1.0441 | LR: 2.00e-06
|
| 892 |
+
[2026-04-25 19:15:34] Epoch 1 | Step 6360 | Loss: 1.0441 | LR: 2.00e-06
|
| 893 |
+
[2026-04-25 19:15:36] Epoch 1 | Step 6370 | Loss: 1.0441 | LR: 2.00e-06
|
| 894 |
+
[2026-04-25 19:15:38] Epoch 1 | Step 6380 | Loss: 1.0441 | LR: 2.00e-06
|
| 895 |
+
[2026-04-25 19:15:41] Epoch 1 | Step 6390 | Loss: 1.0439 | LR: 2.00e-06
|
| 896 |
+
[2026-04-25 19:15:43] Epoch 1 | Step 6400 | Loss: 1.0438 | LR: 2.00e-06
|
| 897 |
+
[2026-04-25 19:15:46] Epoch 1 | Step 6410 | Loss: 1.0437 | LR: 2.00e-06
|
| 898 |
+
[2026-04-25 19:15:48] Epoch 1 | Step 6420 | Loss: 1.0436 | LR: 2.00e-06
|
| 899 |
+
[2026-04-25 19:15:51] Epoch 1 | Step 6430 | Loss: 1.0435 | LR: 2.00e-06
|
| 900 |
+
[2026-04-25 19:15:53] Epoch 1 | Step 6440 | Loss: 1.0435 | LR: 2.00e-06
|
| 901 |
+
[2026-04-25 19:15:55] Epoch 1 | Step 6450 | Loss: 1.0434 | LR: 2.00e-06
|
| 902 |
+
[2026-04-25 19:15:58] Epoch 1 | Step 6460 | Loss: 1.0430 | LR: 2.00e-06
|
| 903 |
+
[2026-04-25 19:16:00] Epoch 1 | Step 6470 | Loss: 1.0430 | LR: 2.00e-06
|
| 904 |
+
[2026-04-25 19:16:03] Epoch 1 | Step 6480 | Loss: 1.0431 | LR: 2.00e-06
|
| 905 |
+
[2026-04-25 19:16:05] Epoch 1 | Step 6490 | Loss: 1.0432 | LR: 2.00e-06
|
| 906 |
+
[2026-04-25 19:16:08] Epoch 1 | Step 6500 | Loss: 1.0430 | LR: 2.00e-06
|
| 907 |
+
[2026-04-25 19:16:10] Epoch 1 | Step 6510 | Loss: 1.0428 | LR: 2.00e-06
|
| 908 |
+
[2026-04-25 19:16:13] Epoch 1 | Step 6520 | Loss: 1.0426 | LR: 2.00e-06
|
| 909 |
+
[2026-04-25 19:16:15] Epoch 1 | Step 6530 | Loss: 1.0424 | LR: 2.00e-06
|
| 910 |
+
[2026-04-25 19:16:18] Epoch 1 | Step 6540 | Loss: 1.0423 | LR: 2.00e-06
|
| 911 |
+
[2026-04-25 19:16:20] Epoch 1 | Step 6550 | Loss: 1.0422 | LR: 2.00e-06
|
| 912 |
+
[2026-04-25 19:16:22] Epoch 1 | Step 6560 | Loss: 1.0421 | LR: 2.00e-06
|
| 913 |
+
[2026-04-25 19:16:25] Epoch 1 | Step 6570 | Loss: 1.0421 | LR: 2.00e-06
|
| 914 |
+
[2026-04-25 19:16:28] Epoch 1 | Step 6580 | Loss: 1.0420 | LR: 2.00e-06
|
| 915 |
+
[2026-04-25 19:16:30] Epoch 1 | Step 6590 | Loss: 1.0419 | LR: 2.00e-06
|
| 916 |
+
[2026-04-25 19:16:33] Epoch 1 | Step 6600 | Loss: 1.0418 | LR: 2.00e-06
|
| 917 |
+
[2026-04-25 19:16:35] Epoch 1 | Step 6610 | Loss: 1.0418 | LR: 2.00e-06
|
| 918 |
+
[2026-04-25 19:16:38] Epoch 1 | Step 6620 | Loss: 1.0417 | LR: 2.00e-06
|
| 919 |
+
[2026-04-25 19:16:40] Epoch 1 | Step 6630 | Loss: 1.0416 | LR: 2.00e-06
|
| 920 |
+
[2026-04-25 19:16:43] Epoch 1 | Step 6640 | Loss: 1.0416 | LR: 2.00e-06
|
| 921 |
+
[2026-04-25 19:16:45] Epoch 1 | Step 6650 | Loss: 1.0417 | LR: 2.00e-06
|
| 922 |
+
[2026-04-25 19:16:48] Epoch 1 | Step 6660 | Loss: 1.0414 | LR: 2.00e-06
|
| 923 |
+
[2026-04-25 19:16:50] Epoch 1 | Step 6670 | Loss: 1.0414 | LR: 2.00e-06
|
| 924 |
+
[2026-04-25 19:16:53] Epoch 1 | Step 6680 | Loss: 1.0414 | LR: 2.00e-06
|
| 925 |
+
[2026-04-25 19:16:55] Epoch 1 | Step 6690 | Loss: 1.0414 | LR: 2.00e-06
|
| 926 |
+
[2026-04-25 19:16:58] Epoch 1 | Step 6700 | Loss: 1.0413 | LR: 2.00e-06
|
| 927 |
+
[2026-04-25 19:17:00] Epoch 1 | Step 6710 | Loss: 1.0413 | LR: 2.00e-06
|
| 928 |
+
[2026-04-25 19:17:03] Epoch 1 | Step 6720 | Loss: 1.0412 | LR: 2.00e-06
|
| 929 |
+
[2026-04-25 19:17:05] Epoch 1 | Step 6730 | Loss: 1.0414 | LR: 2.00e-06
|
| 930 |
+
[2026-04-25 19:17:08] Epoch 1 | Step 6740 | Loss: 1.0412 | LR: 2.00e-06
|
| 931 |
+
[2026-04-25 19:17:10] Epoch 1 | Step 6750 | Loss: 1.0411 | LR: 2.00e-06
|
| 932 |
+
[2026-04-25 19:17:13] Epoch 1 | Step 6760 | Loss: 1.0411 | LR: 2.00e-06
|
| 933 |
+
[2026-04-25 19:17:15] Epoch 1 | Step 6770 | Loss: 1.0410 | LR: 2.00e-06
|
| 934 |
+
[2026-04-25 19:17:18] Epoch 1 | Step 6780 | Loss: 1.0410 | LR: 2.00e-06
|
| 935 |
+
[2026-04-25 19:17:20] Epoch 1 | Step 6790 | Loss: 1.0411 | LR: 2.00e-06
|
| 936 |
+
[2026-04-25 19:17:23] Epoch 1 | Step 6800 | Loss: 1.0412 | LR: 2.00e-06
|
| 937 |
+
[2026-04-25 19:17:25] Epoch 1 | Step 6810 | Loss: 1.0412 | LR: 2.00e-06
|
| 938 |
+
[2026-04-25 19:17:28] Epoch 1 | Step 6820 | Loss: 1.0413 | LR: 2.00e-06
|
| 939 |
+
[2026-04-25 19:17:30] Epoch 1 | Step 6830 | Loss: 1.0414 | LR: 2.00e-06
|
| 940 |
+
[2026-04-25 19:17:33] Epoch 1 | Step 6840 | Loss: 1.0415 | LR: 2.00e-06
|
| 941 |
+
[2026-04-25 19:17:35] Epoch 1 | Step 6850 | Loss: 1.0415 | LR: 2.00e-06
|
| 942 |
+
[2026-04-25 19:17:38] Epoch 1 | Step 6860 | Loss: 1.0414 | LR: 2.00e-06
|
| 943 |
+
[2026-04-25 19:17:40] Epoch 1 | Step 6870 | Loss: 1.0413 | LR: 2.00e-06
|
| 944 |
+
[2026-04-25 19:17:43] Epoch 1 | Step 6880 | Loss: 1.0413 | LR: 2.00e-06
|
| 945 |
+
[2026-04-25 19:17:45] Epoch 1 | Step 6890 | Loss: 1.0414 | LR: 2.00e-06
|
| 946 |
+
[2026-04-25 19:17:48] Epoch 1 | Step 6900 | Loss: 1.0413 | LR: 2.00e-06
|
| 947 |
+
[2026-04-25 19:17:50] Epoch 1 | Step 6910 | Loss: 1.0410 | LR: 2.00e-06
|
| 948 |
+
[2026-04-25 19:17:53] Epoch 1 | Step 6920 | Loss: 1.0410 | LR: 2.00e-06
|
| 949 |
+
[2026-04-25 19:17:55] Epoch 1 | Step 6930 | Loss: 1.0410 | LR: 2.00e-06
|
| 950 |
+
[2026-04-25 19:17:58] Epoch 1 | Step 6940 | Loss: 1.0409 | LR: 2.00e-06
|
| 951 |
+
[2026-04-25 19:18:01] Epoch 1 | Step 6950 | Loss: 1.0408 | LR: 2.00e-06
|
| 952 |
+
[2026-04-25 19:18:03] Epoch 1 | Step 6960 | Loss: 1.0408 | LR: 2.00e-06
|
| 953 |
+
[2026-04-25 19:18:06] Epoch 1 | Step 6970 | Loss: 1.0407 | LR: 2.00e-06
|
| 954 |
+
[2026-04-25 19:18:08] Epoch 1 | Step 6980 | Loss: 1.0407 | LR: 2.00e-06
|
| 955 |
+
[2026-04-25 19:18:10] Epoch 1 | Step 6990 | Loss: 1.0404 | LR: 2.00e-06
|
| 956 |
+
[2026-04-25 19:18:13] Epoch 1 | Step 7000 | Loss: 1.0403 | LR: 2.00e-06
|
| 957 |
+
[2026-04-25 19:18:15] Epoch 1 | Step 7010 | Loss: 1.0402 | LR: 2.00e-06
|
| 958 |
+
[2026-04-25 19:18:18] Epoch 1 | Step 7020 | Loss: 1.0403 | LR: 2.00e-06
|
| 959 |
+
[2026-04-25 19:18:20] Epoch 1 | Step 7030 | Loss: 1.0402 | LR: 2.00e-06
|
| 960 |
+
[2026-04-25 19:18:23] Epoch 1 | Step 7040 | Loss: 1.0403 | LR: 2.00e-06
|
| 961 |
+
[2026-04-25 19:18:25] Epoch 1 | Step 7050 | Loss: 1.0401 | LR: 2.00e-06
|
| 962 |
+
[2026-04-25 19:18:28] Epoch 1 | Step 7060 | Loss: 1.0400 | LR: 2.00e-06
|
| 963 |
+
[2026-04-25 19:18:30] Epoch 1 | Step 7070 | Loss: 1.0401 | LR: 2.00e-06
|
| 964 |
+
[2026-04-25 19:18:33] Epoch 1 | Step 7080 | Loss: 1.0400 | LR: 2.00e-06
|
| 965 |
+
[2026-04-25 19:18:35] Epoch 1 | Step 7090 | Loss: 1.0400 | LR: 2.00e-06
|
| 966 |
+
[2026-04-25 19:18:38] Epoch 1 | Step 7100 | Loss: 1.0398 | LR: 2.00e-06
|
| 967 |
+
[2026-04-25 19:18:40] Epoch 1 | Step 7110 | Loss: 1.0397 | LR: 2.00e-06
|
| 968 |
+
[2026-04-25 19:18:43] Epoch 1 | Step 7120 | Loss: 1.0398 | LR: 2.00e-06
|
| 969 |
+
[2026-04-25 19:18:45] Epoch 1 | Step 7130 | Loss: 1.0396 | LR: 2.00e-06
|
| 970 |
+
[2026-04-25 19:18:48] Epoch 1 | Step 7140 | Loss: 1.0395 | LR: 2.00e-06
|
| 971 |
+
[2026-04-25 19:18:50] Epoch 1 | Step 7150 | Loss: 1.0396 | LR: 2.00e-06
|
| 972 |
+
[2026-04-25 19:18:53] Epoch 1 | Step 7160 | Loss: 1.0394 | LR: 2.00e-06
|
| 973 |
+
[2026-04-25 19:18:55] Epoch 1 | Step 7170 | Loss: 1.0394 | LR: 2.00e-06
|
| 974 |
+
[2026-04-25 19:18:58] Epoch 1 | Step 7180 | Loss: 1.0394 | LR: 2.00e-06
|
| 975 |
+
[2026-04-25 19:19:00] Epoch 1 | Step 7190 | Loss: 1.0395 | LR: 2.00e-06
|
| 976 |
+
[2026-04-25 19:19:03] Epoch 1 | Step 7200 | Loss: 1.0394 | LR: 2.00e-06
|
| 977 |
+
[2026-04-25 19:19:06] Epoch 1 | Step 7210 | Loss: 1.0392 | LR: 2.00e-06
|
| 978 |
+
[2026-04-25 19:19:08] Epoch 1 | Step 7220 | Loss: 1.0393 | LR: 2.00e-06
|
| 979 |
+
[2026-04-25 19:19:11] Epoch 1 | Step 7230 | Loss: 1.0393 | LR: 2.00e-06
|
| 980 |
+
[2026-04-25 19:19:13] Epoch 1 | Step 7240 | Loss: 1.0393 | LR: 2.00e-06
|
| 981 |
+
[2026-04-25 19:19:16] Epoch 1 | Step 7250 | Loss: 1.0392 | LR: 2.00e-06
|
| 982 |
+
[2026-04-25 19:19:19] Epoch 1 | Step 7260 | Loss: 1.0392 | LR: 2.00e-06
|
| 983 |
+
[2026-04-25 19:19:21] Epoch 1 | Step 7270 | Loss: 1.0393 | LR: 2.00e-06
|
| 984 |
+
[2026-04-25 19:19:24] Epoch 1 | Step 7280 | Loss: 1.0393 | LR: 2.00e-06
|
| 985 |
+
[2026-04-25 19:19:26] Epoch 1 | Step 7290 | Loss: 1.0391 | LR: 2.00e-06
|
| 986 |
+
[2026-04-25 19:19:29] Epoch 1 | Step 7300 | Loss: 1.0389 | LR: 2.00e-06
|
| 987 |
+
[2026-04-25 19:19:31] Epoch 1 | Step 7310 | Loss: 1.0387 | LR: 2.00e-06
|
| 988 |
+
[2026-04-25 19:19:34] Epoch 1 | Step 7320 | Loss: 1.0385 | LR: 2.00e-06
|
| 989 |
+
[2026-04-25 19:19:36] Epoch 1 | Step 7330 | Loss: 1.0386 | LR: 2.00e-06
|
| 990 |
+
[2026-04-25 19:19:39] Epoch 1 | Step 7340 | Loss: 1.0387 | LR: 2.00e-06
|
| 991 |
+
[2026-04-25 19:19:42] Epoch 1 | Step 7350 | Loss: 1.0388 | LR: 2.00e-06
|
| 992 |
+
[2026-04-25 19:19:44] Epoch 1 | Step 7360 | Loss: 1.0387 | LR: 2.00e-06
|
| 993 |
+
[2026-04-25 19:19:47] Epoch 1 | Step 7370 | Loss: 1.0385 | LR: 2.00e-06
|
| 994 |
+
[2026-04-25 19:19:49] Epoch 1 | Step 7380 | Loss: 1.0383 | LR: 2.00e-06
|
| 995 |
+
[2026-04-25 19:19:52] Epoch 1 | Step 7390 | Loss: 1.0382 | LR: 2.00e-06
|
| 996 |
+
[2026-04-25 19:19:54] Epoch 1 | Step 7400 | Loss: 1.0381 | LR: 2.00e-06
|
| 997 |
+
[2026-04-25 19:19:57] Epoch 1 | Step 7410 | Loss: 1.0382 | LR: 2.00e-06
|
| 998 |
+
[2026-04-25 19:19:59] Epoch 1 | Step 7420 | Loss: 1.0382 | LR: 2.00e-06
|
| 999 |
+
[2026-04-25 19:20:02] Epoch 1 | Step 7430 | Loss: 1.0381 | LR: 2.00e-06
|
| 1000 |
+
[2026-04-25 19:20:04] Epoch 1 | Step 7440 | Loss: 1.0381 | LR: 2.00e-06
|
| 1001 |
+
[2026-04-25 19:20:07] Epoch 1 | Step 7450 | Loss: 1.0380 | LR: 2.00e-06
|
| 1002 |
+
[2026-04-25 19:20:09] Epoch 1 | Step 7460 | Loss: 1.0379 | LR: 2.00e-06
|
| 1003 |
+
[2026-04-25 19:20:12] Epoch 1 | Step 7470 | Loss: 1.0379 | LR: 2.00e-06
|
| 1004 |
+
[2026-04-25 19:20:14] Epoch 1 | Step 7480 | Loss: 1.0379 | LR: 2.00e-06
|
| 1005 |
+
[2026-04-25 19:20:17] Epoch 1 | Step 7490 | Loss: 1.0380 | LR: 2.00e-06
|
| 1006 |
+
[2026-04-25 19:20:19] Epoch 1 | Step 7500 | Loss: 1.0380 | LR: 2.00e-06
|
| 1007 |
+
[2026-04-25 19:20:22] Epoch 1 | Step 7510 | Loss: 1.0381 | LR: 2.00e-06
|
| 1008 |
+
[2026-04-25 19:20:24] Epoch 1 | Step 7520 | Loss: 1.0380 | LR: 2.00e-06
|
| 1009 |
+
[2026-04-25 19:20:26] Epoch 1 | Step 7530 | Loss: 1.0379 | LR: 2.00e-06
|
| 1010 |
+
[2026-04-25 19:20:29] Epoch 1 | Step 7540 | Loss: 1.0378 | LR: 2.00e-06
|
| 1011 |
+
[2026-04-25 19:20:31] Epoch 1 | Step 7550 | Loss: 1.0379 | LR: 2.00e-06
|
| 1012 |
+
[2026-04-25 19:20:34] Epoch 1 | Step 7560 | Loss: 1.0379 | LR: 2.00e-06
|
| 1013 |
+
[2026-04-25 19:20:36] Epoch 1 | Step 7570 | Loss: 1.0378 | LR: 2.00e-06
|
| 1014 |
+
[2026-04-25 19:20:39] Epoch 1 | Step 7580 | Loss: 1.0377 | LR: 2.00e-06
|
| 1015 |
+
[2026-04-25 19:20:41] Epoch 1 | Step 7590 | Loss: 1.0376 | LR: 2.00e-06
|
| 1016 |
+
[2026-04-25 19:20:43] Epoch 1 | Step 7600 | Loss: 1.0375 | LR: 2.00e-06
|
| 1017 |
+
[2026-04-25 19:20:46] Epoch 1 | Step 7610 | Loss: 1.0374 | LR: 2.00e-06
|
| 1018 |
+
[2026-04-25 19:20:48] Epoch 1 | Step 7620 | Loss: 1.0373 | LR: 2.00e-06
|
| 1019 |
+
[2026-04-25 19:20:51] Epoch 1 | Step 7630 | Loss: 1.0372 | LR: 2.00e-06
|
| 1020 |
+
[2026-04-25 19:20:53] Epoch 1 | Step 7640 | Loss: 1.0371 | LR: 2.00e-06
|
| 1021 |
+
[2026-04-25 19:20:56] Epoch 1 | Step 7650 | Loss: 1.0370 | LR: 2.00e-06
|
| 1022 |
+
[2026-04-25 19:20:59] Epoch 1 | Step 7660 | Loss: 1.0369 | LR: 2.00e-06
|
| 1023 |
+
[2026-04-25 19:21:01] Epoch 1 | Step 7670 | Loss: 1.0367 | LR: 2.00e-06
|
| 1024 |
+
[2026-04-25 19:21:04] Epoch 1 | Step 7680 | Loss: 1.0367 | LR: 2.00e-06
|
| 1025 |
+
[2026-04-25 19:21:06] Epoch 1 | Step 7690 | Loss: 1.0368 | LR: 2.00e-06
|
| 1026 |
+
[2026-04-25 19:21:09] Epoch 1 | Step 7700 | Loss: 1.0367 | LR: 2.00e-06
|
| 1027 |
+
[2026-04-25 19:21:12] Epoch 1 | Step 7710 | Loss: 1.0364 | LR: 2.00e-06
|
| 1028 |
+
[2026-04-25 19:21:14] Epoch 1 | Step 7720 | Loss: 1.0365 | LR: 2.00e-06
|
| 1029 |
+
[2026-04-25 19:21:17] Epoch 1 | Step 7730 | Loss: 1.0367 | LR: 2.00e-06
|
| 1030 |
+
[2026-04-25 19:21:19] Epoch 1 | Step 7740 | Loss: 1.0368 | LR: 2.00e-06
|
| 1031 |
+
[2026-04-25 19:21:22] Epoch 1 | Step 7750 | Loss: 1.0368 | LR: 2.00e-06
|
| 1032 |
+
[2026-04-25 19:21:25] Epoch 1 | Step 7760 | Loss: 1.0366 | LR: 2.00e-06
|
| 1033 |
+
[2026-04-25 19:21:27] Epoch 1 | Step 7770 | Loss: 1.0365 | LR: 2.00e-06
|
| 1034 |
+
[2026-04-25 19:21:30] Epoch 1 | Step 7780 | Loss: 1.0364 | LR: 2.00e-06
|
| 1035 |
+
[2026-04-25 19:21:32] Epoch 1 | Step 7790 | Loss: 1.0363 | LR: 2.00e-06
|
| 1036 |
+
[2026-04-25 19:21:35] Epoch 1 | Step 7800 | Loss: 1.0362 | LR: 2.00e-06
|
| 1037 |
+
[2026-04-25 19:21:37] Epoch 1 | Step 7810 | Loss: 1.0363 | LR: 2.00e-06
|
| 1038 |
+
[2026-04-25 19:21:40] Epoch 1 | Step 7820 | Loss: 1.0364 | LR: 2.00e-06
|
| 1039 |
+
[2026-04-25 19:21:42] Epoch 1 | Step 7830 | Loss: 1.0363 | LR: 2.00e-06
|
| 1040 |
+
[2026-04-25 19:21:45] Epoch 1 | Step 7840 | Loss: 1.0362 | LR: 2.00e-06
|
| 1041 |
+
[2026-04-25 19:21:47] Epoch 1 | Step 7850 | Loss: 1.0360 | LR: 2.00e-06
|
| 1042 |
+
[2026-04-25 19:21:50] Epoch 1 | Step 7860 | Loss: 1.0360 | LR: 2.00e-06
|
| 1043 |
+
[2026-04-25 19:21:53] Epoch 1 | Step 7870 | Loss: 1.0359 | LR: 2.00e-06
|
| 1044 |
+
[2026-04-25 19:21:55] Epoch 1 | Step 7880 | Loss: 1.0359 | LR: 2.00e-06
|
| 1045 |
+
[2026-04-25 19:21:58] Epoch 1 | Step 7890 | Loss: 1.0359 | LR: 2.00e-06
|
| 1046 |
+
[2026-04-25 19:22:01] Epoch 1 | Step 7900 | Loss: 1.0359 | LR: 2.00e-06
|
| 1047 |
+
[2026-04-25 19:22:04] Epoch 1 | Step 7910 | Loss: 1.0359 | LR: 2.00e-06
|
| 1048 |
+
[2026-04-25 19:22:06] Epoch 1 | Step 7920 | Loss: 1.0359 | LR: 2.00e-06
|
| 1049 |
+
[2026-04-25 19:22:09] Epoch 1 | Step 7930 | Loss: 1.0360 | LR: 2.00e-06
|
| 1050 |
+
[2026-04-25 19:22:11] Epoch 1 | Step 7940 | Loss: 1.0360 | LR: 2.00e-06
|
| 1051 |
+
[2026-04-25 19:22:14] Epoch 1 | Step 7950 | Loss: 1.0362 | LR: 2.00e-06
|
| 1052 |
+
[2026-04-25 19:22:16] Epoch 1 | Step 7960 | Loss: 1.0362 | LR: 2.00e-06
|
| 1053 |
+
[2026-04-25 19:22:19] Epoch 1 | Step 7970 | Loss: 1.0362 | LR: 2.00e-06
|
| 1054 |
+
[2026-04-25 19:22:21] Epoch 1 | Step 7980 | Loss: 1.0361 | LR: 2.00e-06
|
| 1055 |
+
[2026-04-25 19:22:24] Epoch 1 | Step 7990 | Loss: 1.0360 | LR: 2.00e-06
|
| 1056 |
+
[2026-04-25 19:22:26] Epoch 1 | Step 8000 | Loss: 1.0360 | LR: 2.00e-06
|
| 1057 |
+
[2026-04-25 19:22:27] Validation | Batch 10/84 | Loss: 0.9692
|
| 1058 |
+
[2026-04-25 19:22:27] Validation | Batch 20/84 | Loss: 0.9655
|
| 1059 |
+
[2026-04-25 19:22:28] Validation | Batch 30/84 | Loss: 1.0454
|
| 1060 |
+
[2026-04-25 19:22:28] Validation | Batch 40/84 | Loss: 1.0514
|
| 1061 |
+
[2026-04-25 19:22:28] Validation | Batch 50/84 | Loss: 1.0510
|
| 1062 |
+
[2026-04-25 19:22:29] Validation | Batch 60/84 | Loss: 1.0237
|
| 1063 |
+
[2026-04-25 19:22:29] Validation | Batch 70/84 | Loss: 1.0070
|
| 1064 |
+
[2026-04-25 19:22:30] Validation | Batch 80/84 | Loss: 1.0139
|
| 1065 |
+
[2026-04-25 19:22:30] Validation | Batch 84/84 | Loss: 1.0070
|
| 1066 |
+
[2026-04-25 19:22:30] Validation | Loss: 1.0070 | PPL: 2.80 | Time: 3.75s
|
| 1067 |
+
[2026-04-25 19:22:33] New best model saved! Val loss: 1.0070
|
| 1068 |
+
[2026-04-25 19:22:36] Epoch 1 | Step 8010 | Loss: 1.0359 | LR: 2.00e-06
|
| 1069 |
+
[2026-04-25 19:22:38] Epoch 1 | Step 8020 | Loss: 1.0357 | LR: 2.00e-06
|
| 1070 |
+
[2026-04-25 19:22:41] Epoch 1 | Step 8030 | Loss: 1.0356 | LR: 2.00e-06
|
| 1071 |
+
[2026-04-25 19:22:44] Epoch 1 | Step 8040 | Loss: 1.0357 | LR: 2.00e-06
|
| 1072 |
+
[2026-04-25 19:22:46] Epoch 1 | Step 8050 | Loss: 1.0356 | LR: 2.00e-06
|
| 1073 |
+
[2026-04-25 19:22:49] Epoch 1 | Step 8060 | Loss: 1.0356 | LR: 2.00e-06
|
| 1074 |
+
[2026-04-25 19:22:51] Epoch 1 | Step 8070 | Loss: 1.0355 | LR: 2.00e-06
|
| 1075 |
+
[2026-04-25 19:22:54] Epoch 1 | Step 8080 | Loss: 1.0355 | LR: 2.00e-06
|
| 1076 |
+
[2026-04-25 19:22:57] Epoch 1 | Step 8090 | Loss: 1.0353 | LR: 2.00e-06
|
| 1077 |
+
[2026-04-25 19:22:59] Epoch 1 | Step 8100 | Loss: 1.0352 | LR: 2.00e-06
|
| 1078 |
+
[2026-04-25 19:23:02] Epoch 1 | Step 8110 | Loss: 1.0353 | LR: 2.00e-06
|
| 1079 |
+
[2026-04-25 19:23:04] Epoch 1 | Step 8120 | Loss: 1.0353 | LR: 2.00e-06
|
| 1080 |
+
[2026-04-25 19:23:07] Epoch 1 | Step 8130 | Loss: 1.0352 | LR: 2.00e-06
|
| 1081 |
+
[2026-04-25 19:23:09] Epoch 1 | Step 8140 | Loss: 1.0353 | LR: 2.00e-06
|
| 1082 |
+
[2026-04-25 19:23:12] Epoch 1 | Step 8150 | Loss: 1.0353 | LR: 2.00e-06
|
| 1083 |
+
[2026-04-25 19:23:14] Epoch 1 | Step 8160 | Loss: 1.0351 | LR: 2.00e-06
|
| 1084 |
+
[2026-04-25 19:23:17] Epoch 1 | Step 8170 | Loss: 1.0351 | LR: 2.00e-06
|
| 1085 |
+
[2026-04-25 19:23:19] Epoch 1 | Step 8180 | Loss: 1.0351 | LR: 2.00e-06
|
| 1086 |
+
[2026-04-25 19:23:22] Epoch 1 | Step 8190 | Loss: 1.0350 | LR: 2.00e-06
|
| 1087 |
+
[2026-04-25 19:23:24] Epoch 1 | Step 8200 | Loss: 1.0350 | LR: 2.00e-06
|
| 1088 |
+
[2026-04-25 19:23:27] Epoch 1 | Step 8210 | Loss: 1.0350 | LR: 2.00e-06
|
| 1089 |
+
[2026-04-25 19:23:29] Epoch 1 | Step 8220 | Loss: 1.0349 | LR: 2.00e-06
|
| 1090 |
+
[2026-04-25 19:23:32] Epoch 1 | Step 8230 | Loss: 1.0350 | LR: 2.00e-06
|
| 1091 |
+
[2026-04-25 19:23:34] Epoch 1 | Step 8240 | Loss: 1.0350 | LR: 2.00e-06
|
| 1092 |
+
[2026-04-25 19:23:37] Epoch 1 | Step 8250 | Loss: 1.0350 | LR: 2.00e-06
|
| 1093 |
+
[2026-04-25 19:23:39] Epoch 1 | Step 8260 | Loss: 1.0351 | LR: 2.00e-06
|
| 1094 |
+
[2026-04-25 19:23:42] Epoch 1 | Step 8270 | Loss: 1.0352 | LR: 2.00e-06
|
| 1095 |
+
[2026-04-25 19:23:44] Epoch 1 | Step 8280 | Loss: 1.0352 | LR: 2.00e-06
|
| 1096 |
+
[2026-04-25 19:23:47] Epoch 1 | Step 8290 | Loss: 1.0353 | LR: 2.00e-06
|
| 1097 |
+
[2026-04-25 19:23:50] Epoch 1 | Step 8300 | Loss: 1.0351 | LR: 2.00e-06
|
| 1098 |
+
[2026-04-25 19:23:52] Epoch 1 | Step 8310 | Loss: 1.0352 | LR: 2.00e-06
|
| 1099 |
+
[2026-04-25 19:23:54] Epoch 1 | Step 8320 | Loss: 1.0351 | LR: 2.00e-06
|
| 1100 |
+
[2026-04-25 19:23:56] Epoch 1 | Step 8330 | Loss: 1.0352 | LR: 2.00e-06
|
| 1101 |
+
[2026-04-25 19:23:59] Epoch 1 | Step 8340 | Loss: 1.0352 | LR: 2.00e-06
|
| 1102 |
+
[2026-04-25 19:24:02] Epoch 1 | Step 8350 | Loss: 1.0350 | LR: 2.00e-06
|
| 1103 |
+
[2026-04-25 19:24:04] Epoch 1 | Step 8360 | Loss: 1.0350 | LR: 2.00e-06
|
| 1104 |
+
[2026-04-25 19:24:06] Epoch 1 | Step 8370 | Loss: 1.0350 | LR: 2.00e-06
|
| 1105 |
+
[2026-04-25 19:24:09] Epoch 1 | Step 8380 | Loss: 1.0349 | LR: 2.00e-06
|
| 1106 |
+
[2026-04-25 19:24:11] Epoch 1 | Step 8390 | Loss: 1.0350 | LR: 2.00e-06
|
| 1107 |
+
[2026-04-25 19:24:14] Epoch 1 | Step 8400 | Loss: 1.0350 | LR: 2.00e-06
|
| 1108 |
+
[2026-04-25 19:24:16] Epoch 1 | Step 8410 | Loss: 1.0350 | LR: 2.00e-06
|
| 1109 |
+
[2026-04-25 19:24:19] Epoch 1 | Step 8420 | Loss: 1.0351 | LR: 2.00e-06
|
| 1110 |
+
[2026-04-25 19:24:21] Epoch 1 | Step 8430 | Loss: 1.0349 | LR: 2.00e-06
|
| 1111 |
+
[2026-04-25 19:24:24] Epoch 1 | Step 8440 | Loss: 1.0349 | LR: 2.00e-06
|
| 1112 |
+
[2026-04-25 19:24:26] Epoch 1 | Step 8450 | Loss: 1.0349 | LR: 2.00e-06
|
| 1113 |
+
[2026-04-25 19:24:29] Epoch 1 | Step 8460 | Loss: 1.0349 | LR: 2.00e-06
|
| 1114 |
+
[2026-04-25 19:24:31] Epoch 1 | Step 8470 | Loss: 1.0348 | LR: 2.00e-06
|
| 1115 |
+
[2026-04-25 19:24:34] Epoch 1 | Step 8480 | Loss: 1.0348 | LR: 2.00e-06
|
| 1116 |
+
[2026-04-25 19:24:36] Epoch 1 | Step 8490 | Loss: 1.0346 | LR: 2.00e-06
|
| 1117 |
+
[2026-04-25 19:24:39] Epoch 1 | Step 8500 | Loss: 1.0347 | LR: 2.00e-06
|
| 1118 |
+
[2026-04-25 19:24:41] Epoch 1 | Step 8510 | Loss: 1.0346 | LR: 2.00e-06
|
| 1119 |
+
[2026-04-25 19:24:44] Epoch 1 | Step 8520 | Loss: 1.0346 | LR: 2.00e-06
|
| 1120 |
+
[2026-04-25 19:24:46] Epoch 1 | Step 8530 | Loss: 1.0346 | LR: 2.00e-06
|
| 1121 |
+
[2026-04-25 19:24:49] Epoch 1 | Step 8540 | Loss: 1.0347 | LR: 2.00e-06
|
| 1122 |
+
[2026-04-25 19:24:51] Epoch 1 | Step 8550 | Loss: 1.0347 | LR: 2.00e-06
|
| 1123 |
+
[2026-04-25 19:24:54] Epoch 1 | Step 8560 | Loss: 1.0347 | LR: 2.00e-06
|
| 1124 |
+
[2026-04-25 19:24:56] Epoch 1 | Step 8570 | Loss: 1.0347 | LR: 2.00e-06
|
| 1125 |
+
[2026-04-25 19:24:58] Epoch 1 | Step 8580 | Loss: 1.0346 | LR: 2.00e-06
|
| 1126 |
+
[2026-04-25 19:25:01] Epoch 1 | Step 8590 | Loss: 1.0345 | LR: 2.00e-06
|
| 1127 |
+
[2026-04-25 19:25:04] Epoch 1 | Step 8600 | Loss: 1.0343 | LR: 2.00e-06
|
| 1128 |
+
[2026-04-25 19:25:06] Epoch 1 | Step 8610 | Loss: 1.0344 | LR: 2.00e-06
|
| 1129 |
+
[2026-04-25 19:25:09] Epoch 1 | Step 8620 | Loss: 1.0344 | LR: 2.00e-06
|
| 1130 |
+
[2026-04-25 19:25:11] Epoch 1 | Step 8630 | Loss: 1.0342 | LR: 2.00e-06
|
| 1131 |
+
[2026-04-25 19:25:14] Epoch 1 | Step 8640 | Loss: 1.0343 | LR: 2.00e-06
|
| 1132 |
+
[2026-04-25 19:25:17] Epoch 1 | Step 8650 | Loss: 1.0344 | LR: 2.00e-06
|
| 1133 |
+
[2026-04-25 19:25:19] Epoch 1 | Step 8660 | Loss: 1.0342 | LR: 2.00e-06
|
| 1134 |
+
[2026-04-25 19:25:22] Epoch 1 | Step 8670 | Loss: 1.0343 | LR: 2.00e-06
|
| 1135 |
+
[2026-04-25 19:25:24] Epoch 1 | Step 8680 | Loss: 1.0343 | LR: 2.00e-06
|
| 1136 |
+
[2026-04-25 19:25:26] Epoch 1 | Step 8690 | Loss: 1.0342 | LR: 2.00e-06
|
| 1137 |
+
[2026-04-25 19:25:29] Epoch 1 | Step 8700 | Loss: 1.0342 | LR: 2.00e-06
|
| 1138 |
+
[2026-04-25 19:25:32] Epoch 1 | Step 8710 | Loss: 1.0339 | LR: 2.00e-06
|
| 1139 |
+
[2026-04-25 19:25:34] Epoch 1 | Step 8720 | Loss: 1.0338 | LR: 2.00e-06
|
| 1140 |
+
[2026-04-25 19:25:37] Epoch 1 | Step 8730 | Loss: 1.0338 | LR: 2.00e-06
|
| 1141 |
+
[2026-04-25 19:25:39] Epoch 1 | Step 8740 | Loss: 1.0339 | LR: 2.00e-06
|
| 1142 |
+
[2026-04-25 19:25:42] Epoch 1 | Step 8750 | Loss: 1.0339 | LR: 2.00e-06
|
| 1143 |
+
[2026-04-25 19:25:44] Epoch 1 | Step 8760 | Loss: 1.0338 | LR: 2.00e-06
|
| 1144 |
+
[2026-04-25 19:25:47] Epoch 1 | Step 8770 | Loss: 1.0337 | LR: 2.00e-06
|
| 1145 |
+
[2026-04-25 19:25:50] Epoch 1 | Step 8780 | Loss: 1.0337 | LR: 2.00e-06
|
| 1146 |
+
[2026-04-25 19:25:52] Epoch 1 | Step 8790 | Loss: 1.0337 | LR: 2.00e-06
|
| 1147 |
+
[2026-04-25 19:25:55] Epoch 1 | Step 8800 | Loss: 1.0335 | LR: 2.00e-06
|
| 1148 |
+
[2026-04-25 19:25:57] Epoch 1 | Step 8810 | Loss: 1.0334 | LR: 2.00e-06
|
| 1149 |
+
[2026-04-25 19:26:00] Epoch 1 | Step 8820 | Loss: 1.0334 | LR: 2.00e-06
|
| 1150 |
+
[2026-04-25 19:26:02] Epoch 1 | Step 8830 | Loss: 1.0334 | LR: 2.00e-06
|
| 1151 |
+
[2026-04-25 19:26:05] Epoch 1 | Step 8840 | Loss: 1.0334 | LR: 2.00e-06
|
| 1152 |
+
[2026-04-25 19:26:08] Epoch 1 | Step 8850 | Loss: 1.0333 | LR: 2.00e-06
|
| 1153 |
+
[2026-04-25 19:26:10] Epoch 1 | Step 8860 | Loss: 1.0333 | LR: 2.00e-06
|
| 1154 |
+
[2026-04-25 19:26:13] Epoch 1 | Step 8870 | Loss: 1.0334 | LR: 2.00e-06
|
| 1155 |
+
[2026-04-25 19:26:15] Epoch 1 | Step 8880 | Loss: 1.0333 | LR: 2.00e-06
|
| 1156 |
+
[2026-04-25 19:26:18] Epoch 1 | Step 8890 | Loss: 1.0332 | LR: 2.00e-06
|
| 1157 |
+
[2026-04-25 19:26:20] Epoch 1 | Step 8900 | Loss: 1.0330 | LR: 2.00e-06
|
| 1158 |
+
[2026-04-25 19:26:22] Epoch 1 | Step 8910 | Loss: 1.0331 | LR: 2.00e-06
|
| 1159 |
+
[2026-04-25 19:26:25] Epoch 1 | Step 8920 | Loss: 1.0329 | LR: 2.00e-06
|
| 1160 |
+
[2026-04-25 19:26:27] Epoch 1 | Step 8930 | Loss: 1.0327 | LR: 2.00e-06
|
| 1161 |
+
[2026-04-25 19:26:30] Epoch 1 | Step 8940 | Loss: 1.0328 | LR: 2.00e-06
|
| 1162 |
+
[2026-04-25 19:26:32] Epoch 1 | Step 8950 | Loss: 1.0328 | LR: 2.00e-06
|
| 1163 |
+
[2026-04-25 19:26:35] Epoch 1 | Step 8960 | Loss: 1.0328 | LR: 2.00e-06
|
| 1164 |
+
[2026-04-25 19:26:37] Epoch 1 | Step 8970 | Loss: 1.0328 | LR: 2.00e-06
|
| 1165 |
+
[2026-04-25 19:26:40] Epoch 1 | Step 8980 | Loss: 1.0327 | LR: 2.00e-06
|
| 1166 |
+
[2026-04-25 19:26:42] Epoch 1 | Step 8990 | Loss: 1.0326 | LR: 2.00e-06
|
| 1167 |
+
[2026-04-25 19:26:45] Epoch 1 | Step 9000 | Loss: 1.0325 | LR: 2.00e-06
|
| 1168 |
+
[2026-04-25 19:26:48] Epoch 1 | Step 9010 | Loss: 1.0326 | LR: 2.00e-06
|
| 1169 |
+
[2026-04-25 19:26:50] Epoch 1 | Step 9020 | Loss: 1.0327 | LR: 2.00e-06
|
| 1170 |
+
[2026-04-25 19:26:53] Epoch 1 | Step 9030 | Loss: 1.0326 | LR: 2.00e-06
|
| 1171 |
+
[2026-04-25 19:26:55] Epoch 1 | Step 9040 | Loss: 1.0326 | LR: 2.00e-06
|
| 1172 |
+
[2026-04-25 19:26:58] Epoch 1 | Step 9050 | Loss: 1.0325 | LR: 2.00e-06
|
| 1173 |
+
[2026-04-25 19:27:00] Epoch 1 | Step 9060 | Loss: 1.0326 | LR: 2.00e-06
|
| 1174 |
+
[2026-04-25 19:27:03] Epoch 1 | Step 9070 | Loss: 1.0324 | LR: 2.00e-06
|
| 1175 |
+
[2026-04-25 19:27:05] Epoch 1 | Step 9080 | Loss: 1.0325 | LR: 2.00e-06
|
| 1176 |
+
[2026-04-25 19:27:07] Epoch 1 | Step 9090 | Loss: 1.0324 | LR: 2.00e-06
|
| 1177 |
+
[2026-04-25 19:27:10] Epoch 1 | Step 9100 | Loss: 1.0324 | LR: 2.00e-06
|
| 1178 |
+
[2026-04-25 19:27:13] Epoch 1 | Step 9110 | Loss: 1.0325 | LR: 2.00e-06
|
| 1179 |
+
[2026-04-25 19:27:15] Epoch 1 | Step 9120 | Loss: 1.0326 | LR: 2.00e-06
|
| 1180 |
+
[2026-04-25 19:27:17] Epoch 1 | Step 9130 | Loss: 1.0324 | LR: 2.00e-06
|
| 1181 |
+
[2026-04-25 19:27:20] Epoch 1 | Step 9140 | Loss: 1.0324 | LR: 2.00e-06
|
| 1182 |
+
[2026-04-25 19:27:22] Epoch 1 | Step 9150 | Loss: 1.0325 | LR: 2.00e-06
|
| 1183 |
+
[2026-04-25 19:27:25] Epoch 1 | Step 9160 | Loss: 1.0324 | LR: 2.00e-06
|
| 1184 |
+
[2026-04-25 19:27:27] Epoch 1 | Step 9170 | Loss: 1.0322 | LR: 2.00e-06
|
| 1185 |
+
[2026-04-25 19:27:30] Epoch 1 | Step 9180 | Loss: 1.0322 | LR: 2.00e-06
|
| 1186 |
+
[2026-04-25 19:27:32] Epoch 1 | Step 9190 | Loss: 1.0319 | LR: 2.00e-06
|
| 1187 |
+
[2026-04-25 19:27:35] Epoch 1 | Step 9200 | Loss: 1.0319 | LR: 2.00e-06
|
| 1188 |
+
[2026-04-25 19:27:38] Epoch 1 | Step 9210 | Loss: 1.0320 | LR: 2.00e-06
|
| 1189 |
+
[2026-04-25 19:27:40] Epoch 1 | Step 9220 | Loss: 1.0319 | LR: 2.00e-06
|
| 1190 |
+
[2026-04-25 19:27:43] Epoch 1 | Step 9230 | Loss: 1.0318 | LR: 2.00e-06
|
| 1191 |
+
[2026-04-25 19:27:45] Epoch 1 | Step 9240 | Loss: 1.0316 | LR: 2.00e-06
|
| 1192 |
+
[2026-04-25 19:27:48] Epoch 1 | Step 9250 | Loss: 1.0315 | LR: 2.00e-06
|
| 1193 |
+
[2026-04-25 19:27:50] Epoch 1 | Step 9260 | Loss: 1.0314 | LR: 2.00e-06
|
| 1194 |
+
[2026-04-25 19:27:53] Epoch 1 | Step 9270 | Loss: 1.0313 | LR: 2.00e-06
|
| 1195 |
+
[2026-04-25 19:27:55] Epoch 1 | Step 9280 | Loss: 1.0313 | LR: 2.00e-06
|
| 1196 |
+
[2026-04-25 19:27:58] Epoch 1 | Step 9290 | Loss: 1.0313 | LR: 2.00e-06
|
| 1197 |
+
[2026-04-25 19:28:00] Epoch 1 | Step 9300 | Loss: 1.0313 | LR: 2.00e-06
|
| 1198 |
+
[2026-04-25 19:28:03] Epoch 1 | Step 9310 | Loss: 1.0312 | LR: 2.00e-06
|
| 1199 |
+
[2026-04-25 19:28:05] Epoch 1 | Step 9320 | Loss: 1.0312 | LR: 2.00e-06
|
| 1200 |
+
[2026-04-25 19:28:08] Epoch 1 | Step 9330 | Loss: 1.0311 | LR: 2.00e-06
|
| 1201 |
+
[2026-04-25 19:28:10] Epoch 1 | Step 9340 | Loss: 1.0310 | LR: 2.00e-06
|
| 1202 |
+
[2026-04-25 19:28:13] Epoch 1 | Step 9350 | Loss: 1.0310 | LR: 2.00e-06
|
| 1203 |
+
[2026-04-25 19:28:15] Epoch 1 | Step 9360 | Loss: 1.0309 | LR: 2.00e-06
|
| 1204 |
+
[2026-04-25 19:28:18] Epoch 1 | Step 9370 | Loss: 1.0309 | LR: 2.00e-06
|
| 1205 |
+
[2026-04-25 19:28:20] Epoch 1 | Step 9380 | Loss: 1.0309 | LR: 2.00e-06
|
| 1206 |
+
[2026-04-25 19:28:23] Epoch 1 | Step 9390 | Loss: 1.0307 | LR: 2.00e-06
|
| 1207 |
+
[2026-04-25 19:28:25] Epoch 1 | Step 9400 | Loss: 1.0308 | LR: 2.00e-06
|
| 1208 |
+
[2026-04-25 19:28:28] Epoch 1 | Step 9410 | Loss: 1.0308 | LR: 2.00e-06
|
| 1209 |
+
[2026-04-25 19:28:31] Epoch 1 | Step 9420 | Loss: 1.0309 | LR: 2.00e-06
|
| 1210 |
+
[2026-04-25 19:28:33] Epoch 1 | Step 9430 | Loss: 1.0309 | LR: 2.00e-06
|
| 1211 |
+
[2026-04-25 19:28:36] Epoch 1 | Step 9440 | Loss: 1.0309 | LR: 2.00e-06
|
| 1212 |
+
[2026-04-25 19:28:38] Epoch 1 | Step 9450 | Loss: 1.0310 | LR: 2.00e-06
|
| 1213 |
+
[2026-04-25 19:28:41] Epoch 1 | Step 9460 | Loss: 1.0309 | LR: 2.00e-06
|
| 1214 |
+
[2026-04-25 19:28:43] Epoch 1 | Step 9470 | Loss: 1.0308 | LR: 2.00e-06
|
| 1215 |
+
[2026-04-25 19:28:46] Epoch 1 | Step 9480 | Loss: 1.0306 | LR: 2.00e-06
|
| 1216 |
+
[2026-04-25 19:28:48] Epoch 1 | Step 9490 | Loss: 1.0306 | LR: 2.00e-06
|
| 1217 |
+
[2026-04-25 19:28:50] Epoch 1 | Step 9500 | Loss: 1.0306 | LR: 2.00e-06
|
| 1218 |
+
[2026-04-25 19:28:53] Epoch 1 | Step 9510 | Loss: 1.0306 | LR: 2.00e-06
|
| 1219 |
+
[2026-04-25 19:28:56] Epoch 1 | Step 9520 | Loss: 1.0305 | LR: 2.00e-06
|
| 1220 |
+
[2026-04-25 19:28:58] Epoch 1 | Step 9530 | Loss: 1.0306 | LR: 2.00e-06
|
| 1221 |
+
[2026-04-25 19:29:01] Epoch 1 | Step 9540 | Loss: 1.0304 | LR: 2.00e-06
|
| 1222 |
+
[2026-04-25 19:29:03] Epoch 1 | Step 9550 | Loss: 1.0305 | LR: 2.00e-06
|
| 1223 |
+
[2026-04-25 19:29:06] Epoch 1 | Step 9560 | Loss: 1.0305 | LR: 2.00e-06
|
| 1224 |
+
[2026-04-25 19:29:08] Epoch 1 | Step 9570 | Loss: 1.0305 | LR: 2.00e-06
|
| 1225 |
+
[2026-04-25 19:29:11] Epoch 1 | Step 9580 | Loss: 1.0307 | LR: 2.00e-06
|
| 1226 |
+
[2026-04-25 19:29:13] Epoch 1 | Step 9590 | Loss: 1.0306 | LR: 2.00e-06
|
| 1227 |
+
[2026-04-25 19:29:16] Epoch 1 | Step 9600 | Loss: 1.0305 | LR: 2.00e-06
|
| 1228 |
+
[2026-04-25 19:29:19] Epoch 1 | Step 9610 | Loss: 1.0304 | LR: 2.00e-06
|
| 1229 |
+
[2026-04-25 19:29:22] Epoch 1 | Step 9620 | Loss: 1.0305 | LR: 2.00e-06
|
| 1230 |
+
[2026-04-25 19:29:24] Epoch 1 | Step 9630 | Loss: 1.0306 | LR: 2.00e-06
|
| 1231 |
+
[2026-04-25 19:29:27] Epoch 1 | Step 9640 | Loss: 1.0306 | LR: 2.00e-06
|
| 1232 |
+
[2026-04-25 19:29:30] Epoch 1 | Step 9650 | Loss: 1.0306 | LR: 2.00e-06
|
| 1233 |
+
[2026-04-25 19:29:32] Epoch 1 | Step 9660 | Loss: 1.0306 | LR: 2.00e-06
|
| 1234 |
+
[2026-04-25 19:29:35] Epoch 1 | Step 9670 | Loss: 1.0306 | LR: 2.00e-06
|
| 1235 |
+
[2026-04-25 19:29:37] Epoch 1 | Step 9680 | Loss: 1.0305 | LR: 2.00e-06
|
| 1236 |
+
[2026-04-25 19:29:40] Epoch 1 | Step 9690 | Loss: 1.0305 | LR: 2.00e-06
|
| 1237 |
+
[2026-04-25 19:29:42] Epoch 1 | Step 9700 | Loss: 1.0305 | LR: 2.00e-06
|
| 1238 |
+
[2026-04-25 19:29:45] Epoch 1 | Step 9710 | Loss: 1.0305 | LR: 2.00e-06
|
| 1239 |
+
[2026-04-25 19:29:48] Epoch 1 | Step 9720 | Loss: 1.0304 | LR: 2.00e-06
|
| 1240 |
+
[2026-04-25 19:29:50] Epoch 1 | Step 9730 | Loss: 1.0305 | LR: 2.00e-06
|
| 1241 |
+
[2026-04-25 19:29:52] Epoch 1 | Step 9740 | Loss: 1.0304 | LR: 2.00e-06
|
| 1242 |
+
[2026-04-25 19:29:55] Epoch 1 | Step 9750 | Loss: 1.0303 | LR: 2.00e-06
|
| 1243 |
+
[2026-04-25 19:29:57] Epoch 1 | Step 9760 | Loss: 1.0303 | LR: 2.00e-06
|
| 1244 |
+
[2026-04-25 19:30:00] Epoch 1 | Step 9770 | Loss: 1.0303 | LR: 2.00e-06
|
| 1245 |
+
[2026-04-25 19:30:02] Epoch 1 | Step 9780 | Loss: 1.0303 | LR: 2.00e-06
|
| 1246 |
+
[2026-04-25 19:30:05] Epoch 1 | Step 9790 | Loss: 1.0302 | LR: 2.00e-06
|
| 1247 |
+
[2026-04-25 19:30:08] Epoch 1 | Step 9800 | Loss: 1.0302 | LR: 2.00e-06
|
| 1248 |
+
[2026-04-25 19:30:10] Epoch 1 | Step 9810 | Loss: 1.0301 | LR: 2.00e-06
|
| 1249 |
+
[2026-04-25 19:30:13] Epoch 1 | Step 9820 | Loss: 1.0301 | LR: 2.00e-06
|
| 1250 |
+
[2026-04-25 19:30:16] Epoch 1 | Step 9830 | Loss: 1.0301 | LR: 2.00e-06
|
| 1251 |
+
[2026-04-25 19:30:18] Epoch 1 | Step 9840 | Loss: 1.0302 | LR: 2.00e-06
|
| 1252 |
+
[2026-04-25 19:30:21] Epoch 1 | Step 9850 | Loss: 1.0301 | LR: 2.00e-06
|
| 1253 |
+
[2026-04-25 19:30:23] Epoch 1 | Step 9860 | Loss: 1.0300 | LR: 2.00e-06
|
| 1254 |
+
[2026-04-25 19:30:26] Epoch 1 | Step 9870 | Loss: 1.0301 | LR: 2.00e-06
|
| 1255 |
+
[2026-04-25 19:30:28] Epoch 1 | Step 9880 | Loss: 1.0301 | LR: 2.00e-06
|
| 1256 |
+
[2026-04-25 19:30:30] Epoch 1 completed in 2521.97s | Loss: 1.0301
|
| 1257 |
+
[2026-04-25 19:30:30]
|
| 1258 |
+
Training completed!
|
| 1259 |
+
[2026-04-25 19:30:33] Final model: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_2e-5/model_final.pt
|
lr_sweep/pythia_1b_lr_5e-5/train.log
ADDED
|
@@ -0,0 +1,1201 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[2026-04-25 17:57:52,688][accelerate.utils.other][WARNING] - Detected kernel version 5.4.0, which is below the recommended minimum of 5.5.0; this can cause the process to hang. It is recommended to upgrade the kernel to the minimum version or higher.
|
| 2 |
+
[2026-04-25 17:57:52] CUDA_VISIBLE_DEVICES: 2,3
|
| 3 |
+
[2026-04-25 17:57:52] Number of processes: 2
|
| 4 |
+
[2026-04-25 17:57:52] Process index: 0
|
| 5 |
+
[2026-04-25 17:57:52] Mixed precision: bf16
|
| 6 |
+
[2026-04-25 17:57:52] ============================================================
|
| 7 |
+
[2026-04-25 17:57:52] Pythia Training Pipeline (Hydra + Trackio + Accelerate)
|
| 8 |
+
[2026-04-25 17:57:52] ============================================================
|
| 9 |
+
[2026-04-25 17:57:52] Config:
|
| 10 |
+
model:
|
| 11 |
+
name: EleutherAI/pythia-1b
|
| 12 |
+
checkpoint_path: null
|
| 13 |
+
from_scratch: false
|
| 14 |
+
training:
|
| 15 |
+
epochs: 1
|
| 16 |
+
batch_size: 4
|
| 17 |
+
eval_batch_size: 12
|
| 18 |
+
gradient_accumulation_steps: 4
|
| 19 |
+
lr: 5.0e-05
|
| 20 |
+
weight_decay: 0.1
|
| 21 |
+
betas:
|
| 22 |
+
- 0.9
|
| 23 |
+
- 0.95
|
| 24 |
+
eps: 1.0e-08
|
| 25 |
+
lr_scheduler: wsd
|
| 26 |
+
warmup_ratio: 0.1
|
| 27 |
+
decay_ratio: 0.2
|
| 28 |
+
warmup_steps: 100
|
| 29 |
+
min_lr_ratio: 0.1
|
| 30 |
+
max_grad_norm: 1.0
|
| 31 |
+
use_amp: true
|
| 32 |
+
resume: false
|
| 33 |
+
resume_checkpoint: null
|
| 34 |
+
data:
|
| 35 |
+
path: /workspace/byte-llms-code/code_completion_exp/datasets/data_V4_full
|
| 36 |
+
max_context_len: 4096
|
| 37 |
+
max_target_len: 256
|
| 38 |
+
num_workers: 4
|
| 39 |
+
pin_memory: true
|
| 40 |
+
max_train_samples: 20000
|
| 41 |
+
max_val_samples: 2000
|
| 42 |
+
logging:
|
| 43 |
+
log_interval: 10
|
| 44 |
+
save_interval: 3000
|
| 45 |
+
eval_interval: 1000
|
| 46 |
+
save_every_epoch: true
|
| 47 |
+
tracking:
|
| 48 |
+
enabled: true
|
| 49 |
+
backend: wandb
|
| 50 |
+
project: code-completion_lr-sweep
|
| 51 |
+
run_name: pythia_1b_lr_5e-5
|
| 52 |
+
entity: null
|
| 53 |
+
base_url: https://wandb.platun0v.ru
|
| 54 |
+
local_dir: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_5e-5
|
| 55 |
+
paths:
|
| 56 |
+
output_dir: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_5e-5
|
| 57 |
+
seed: 42
|
| 58 |
+
device: cuda
|
| 59 |
+
|
| 60 |
+
[2026-04-25 17:57:54] Initializing tokenizer...
|
| 61 |
+
[2026-04-25 17:57:55] Loading model...
|
| 62 |
+
[2026-04-25 17:57:58] Loaded pretrained: EleutherAI/pythia-1b
|
| 63 |
+
[2026-04-25 17:57:58] Total params: 1,011,781,632
|
| 64 |
+
[2026-04-25 17:57:58] Trainable params: 1,011,781,632
|
| 65 |
+
[2026-04-25 17:57:58] Creating dataloaders...
|
| 66 |
+
[2026-04-25 17:57:58] Train dataset size: 20000
|
| 67 |
+
[2026-04-25 17:57:58] Train batches per epoch (before DDP split): 5000
|
| 68 |
+
[2026-04-25 17:57:58] Validation dataset size: 2000
|
| 69 |
+
[2026-04-25 17:57:58] Validation batches: 167
|
| 70 |
+
[2026-04-25 17:57:58] Creating optimizer...
|
| 71 |
+
[2026-04-25 17:57:58] Total steps: 625, Steps per epoch: 2500
|
| 72 |
+
[2026-04-25 17:57:58] Preparing model, optimizer, and dataloaders with Accelerate...
|
| 73 |
+
[2026-04-25 17:58:00] Train batches per epoch (after DDP split): 2500
|
| 74 |
+
[2026-04-25 17:58:00] Starting training...
|
| 75 |
+
[2026-04-25 17:58:00]
|
| 76 |
+
============================================================
|
| 77 |
+
[2026-04-25 17:58:00] EPOCH 1/1
|
| 78 |
+
[2026-04-25 17:58:00] ============================================================
|
| 79 |
+
[2026-04-25 17:58:03] Epoch 1 | Step 10 | Loss: 1.6976 | LR: 1.95e-05
|
| 80 |
+
[2026-04-25 17:58:06] Epoch 1 | Step 20 | Loss: 1.3818 | LR: 3.40e-05
|
| 81 |
+
[2026-04-25 17:58:09] Epoch 1 | Step 30 | Loss: 1.2980 | LR: 4.85e-05
|
| 82 |
+
[2026-04-25 17:58:11] Epoch 1 | Step 40 | Loss: 1.2758 | LR: 5.00e-05
|
| 83 |
+
[2026-04-25 17:58:12] Training interrupted by user
|
| 84 |
+
[2026-04-25 17:58:19] Checkpoint saved: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_5e-5/checkpoints/checkpoint_step_0.pt
|
| 85 |
+
[2026-04-25 17:58:25]
|
| 86 |
+
Training completed!
|
| 87 |
+
[2026-04-25 17:58:27] Final model: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_5e-5/model_final.pt
|
| 88 |
+
[2026-04-25 19:30:44,525][accelerate.utils.other][WARNING] - Detected kernel version 5.4.0, which is below the recommended minimum of 5.5.0; this can cause the process to hang. It is recommended to upgrade the kernel to the minimum version or higher.
|
| 89 |
+
[2026-04-25 19:30:44] CUDA_VISIBLE_DEVICES: 2,3
|
| 90 |
+
[2026-04-25 19:30:44] Number of processes: 2
|
| 91 |
+
[2026-04-25 19:30:44] Process index: 0
|
| 92 |
+
[2026-04-25 19:30:44] Mixed precision: bf16
|
| 93 |
+
[2026-04-25 19:30:44] ============================================================
|
| 94 |
+
[2026-04-25 19:30:44] Pythia Training Pipeline (Hydra + Trackio + Accelerate)
|
| 95 |
+
[2026-04-25 19:30:44] ============================================================
|
| 96 |
+
[2026-04-25 19:30:44] Config:
|
| 97 |
+
model:
|
| 98 |
+
name: EleutherAI/pythia-1b
|
| 99 |
+
checkpoint_path: null
|
| 100 |
+
from_scratch: false
|
| 101 |
+
training:
|
| 102 |
+
epochs: 1
|
| 103 |
+
batch_size: 4
|
| 104 |
+
eval_batch_size: 12
|
| 105 |
+
gradient_accumulation_steps: 4
|
| 106 |
+
lr: 5.0e-05
|
| 107 |
+
weight_decay: 0.1
|
| 108 |
+
betas:
|
| 109 |
+
- 0.9
|
| 110 |
+
- 0.95
|
| 111 |
+
eps: 1.0e-08
|
| 112 |
+
lr_scheduler: wsd
|
| 113 |
+
warmup_ratio: 0.1
|
| 114 |
+
decay_ratio: 0.2
|
| 115 |
+
warmup_steps: 100
|
| 116 |
+
min_lr_ratio: 0.1
|
| 117 |
+
max_grad_norm: 1.0
|
| 118 |
+
use_amp: true
|
| 119 |
+
resume: false
|
| 120 |
+
resume_checkpoint: null
|
| 121 |
+
data:
|
| 122 |
+
path: /workspace/byte-llms-code/code_completion_exp/datasets/data_V4_full
|
| 123 |
+
max_context_len: 4096
|
| 124 |
+
max_target_len: 256
|
| 125 |
+
num_workers: 4
|
| 126 |
+
pin_memory: true
|
| 127 |
+
max_train_samples: null
|
| 128 |
+
max_val_samples: 2000
|
| 129 |
+
logging:
|
| 130 |
+
log_interval: 10
|
| 131 |
+
save_interval: 0
|
| 132 |
+
eval_interval: 2000
|
| 133 |
+
save_every_epoch: false
|
| 134 |
+
tracking:
|
| 135 |
+
enabled: true
|
| 136 |
+
backend: wandb
|
| 137 |
+
project: code-completion_lr-sweep
|
| 138 |
+
run_name: pythia_1b_lr_5e-5
|
| 139 |
+
entity: null
|
| 140 |
+
base_url: https://wandb.platun0v.ru
|
| 141 |
+
local_dir: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_5e-5
|
| 142 |
+
paths:
|
| 143 |
+
output_dir: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_5e-5
|
| 144 |
+
seed: 42
|
| 145 |
+
device: cuda
|
| 146 |
+
|
| 147 |
+
[2026-04-25 19:30:46] Initializing tokenizer...
|
| 148 |
+
[2026-04-25 19:30:47] Loading model...
|
| 149 |
+
[2026-04-25 19:30:50] Loaded pretrained: EleutherAI/pythia-1b
|
| 150 |
+
[2026-04-25 19:30:50] Total params: 1,011,781,632
|
| 151 |
+
[2026-04-25 19:30:50] Trainable params: 1,011,781,632
|
| 152 |
+
[2026-04-25 19:30:50] Creating dataloaders...
|
| 153 |
+
[2026-04-25 19:30:50] Train dataset size: 316397
|
| 154 |
+
[2026-04-25 19:30:50] Train batches per epoch (before DDP split): 79100
|
| 155 |
+
[2026-04-25 19:30:50] Validation dataset size: 2000
|
| 156 |
+
[2026-04-25 19:30:50] Validation batches: 167
|
| 157 |
+
[2026-04-25 19:30:50] Creating optimizer...
|
| 158 |
+
[2026-04-25 19:30:50] Total steps: 9887, Steps per epoch: 39550
|
| 159 |
+
[2026-04-25 19:30:50] Preparing model, optimizer, and dataloaders with Accelerate...
|
| 160 |
+
[2026-04-25 19:30:51] Train batches per epoch (after DDP split): 39550
|
| 161 |
+
[2026-04-25 19:30:51] Starting training...
|
| 162 |
+
[2026-04-25 19:30:51]
|
| 163 |
+
============================================================
|
| 164 |
+
[2026-04-25 19:30:51] EPOCH 1/1
|
| 165 |
+
[2026-04-25 19:30:51] ============================================================
|
| 166 |
+
[2026-04-25 19:30:55] Epoch 1 | Step 10 | Loss: 2.1524 | LR: 5.91e-06
|
| 167 |
+
[2026-04-25 19:30:57] Epoch 1 | Step 20 | Loss: 1.8675 | LR: 6.82e-06
|
| 168 |
+
[2026-04-25 19:31:00] Epoch 1 | Step 30 | Loss: 1.6663 | LR: 7.73e-06
|
| 169 |
+
[2026-04-25 19:31:03] Epoch 1 | Step 40 | Loss: 1.5603 | LR: 8.64e-06
|
| 170 |
+
[2026-04-25 19:31:05] Epoch 1 | Step 50 | Loss: 1.4615 | LR: 9.55e-06
|
| 171 |
+
[2026-04-25 19:31:08] Epoch 1 | Step 60 | Loss: 1.3987 | LR: 1.05e-05
|
| 172 |
+
[2026-04-25 19:31:11] Epoch 1 | Step 70 | Loss: 1.3411 | LR: 1.14e-05
|
| 173 |
+
[2026-04-25 19:31:13] Epoch 1 | Step 80 | Loss: 1.3206 | LR: 1.23e-05
|
| 174 |
+
[2026-04-25 19:31:16] Epoch 1 | Step 90 | Loss: 1.2938 | LR: 1.32e-05
|
| 175 |
+
[2026-04-25 19:31:19] Epoch 1 | Step 100 | Loss: 1.2705 | LR: 1.41e-05
|
| 176 |
+
[2026-04-25 19:31:21] Epoch 1 | Step 110 | Loss: 1.2636 | LR: 1.50e-05
|
| 177 |
+
[2026-04-25 19:31:24] Epoch 1 | Step 120 | Loss: 1.2522 | LR: 1.59e-05
|
| 178 |
+
[2026-04-25 19:31:26] Epoch 1 | Step 130 | Loss: 1.2495 | LR: 1.68e-05
|
| 179 |
+
[2026-04-25 19:31:29] Epoch 1 | Step 140 | Loss: 1.2478 | LR: 1.78e-05
|
| 180 |
+
[2026-04-25 19:31:31] Epoch 1 | Step 150 | Loss: 1.2312 | LR: 1.87e-05
|
| 181 |
+
[2026-04-25 19:31:34] Epoch 1 | Step 160 | Loss: 1.2177 | LR: 1.96e-05
|
| 182 |
+
[2026-04-25 19:31:36] Epoch 1 | Step 170 | Loss: 1.2106 | LR: 2.05e-05
|
| 183 |
+
[2026-04-25 19:31:39] Epoch 1 | Step 180 | Loss: 1.1969 | LR: 2.14e-05
|
| 184 |
+
[2026-04-25 19:31:41] Epoch 1 | Step 190 | Loss: 1.1936 | LR: 2.23e-05
|
| 185 |
+
[2026-04-25 19:31:44] Epoch 1 | Step 200 | Loss: 1.1900 | LR: 2.32e-05
|
| 186 |
+
[2026-04-25 19:31:46] Epoch 1 | Step 210 | Loss: 1.1927 | LR: 2.41e-05
|
| 187 |
+
[2026-04-25 19:31:49] Epoch 1 | Step 220 | Loss: 1.1902 | LR: 2.50e-05
|
| 188 |
+
[2026-04-25 19:31:51] Epoch 1 | Step 230 | Loss: 1.1810 | LR: 2.60e-05
|
| 189 |
+
[2026-04-25 19:31:54] Epoch 1 | Step 240 | Loss: 1.1758 | LR: 2.69e-05
|
| 190 |
+
[2026-04-25 19:31:56] Epoch 1 | Step 250 | Loss: 1.1720 | LR: 2.78e-05
|
| 191 |
+
[2026-04-25 19:31:59] Epoch 1 | Step 260 | Loss: 1.1742 | LR: 2.87e-05
|
| 192 |
+
[2026-04-25 19:32:02] Epoch 1 | Step 270 | Loss: 1.1699 | LR: 2.96e-05
|
| 193 |
+
[2026-04-25 19:32:04] Epoch 1 | Step 280 | Loss: 1.1642 | LR: 3.05e-05
|
| 194 |
+
[2026-04-25 19:32:07] Epoch 1 | Step 290 | Loss: 1.1616 | LR: 3.14e-05
|
| 195 |
+
[2026-04-25 19:32:09] Epoch 1 | Step 300 | Loss: 1.1595 | LR: 3.23e-05
|
| 196 |
+
[2026-04-25 19:32:12] Epoch 1 | Step 310 | Loss: 1.1566 | LR: 3.32e-05
|
| 197 |
+
[2026-04-25 19:32:14] Epoch 1 | Step 320 | Loss: 1.1522 | LR: 3.41e-05
|
| 198 |
+
[2026-04-25 19:32:17] Epoch 1 | Step 330 | Loss: 1.1486 | LR: 3.51e-05
|
| 199 |
+
[2026-04-25 19:32:20] Epoch 1 | Step 340 | Loss: 1.1466 | LR: 3.60e-05
|
| 200 |
+
[2026-04-25 19:32:22] Epoch 1 | Step 350 | Loss: 1.1467 | LR: 3.69e-05
|
| 201 |
+
[2026-04-25 19:32:25] Epoch 1 | Step 360 | Loss: 1.1434 | LR: 3.78e-05
|
| 202 |
+
[2026-04-25 19:32:28] Epoch 1 | Step 370 | Loss: 1.1389 | LR: 3.87e-05
|
| 203 |
+
[2026-04-25 19:32:31] Epoch 1 | Step 380 | Loss: 1.1363 | LR: 3.96e-05
|
| 204 |
+
[2026-04-25 19:32:33] Epoch 1 | Step 390 | Loss: 1.1352 | LR: 4.05e-05
|
| 205 |
+
[2026-04-25 19:32:36] Epoch 1 | Step 400 | Loss: 1.1346 | LR: 4.14e-05
|
| 206 |
+
[2026-04-25 19:32:38] Epoch 1 | Step 410 | Loss: 1.1341 | LR: 4.23e-05
|
| 207 |
+
[2026-04-25 19:32:41] Epoch 1 | Step 420 | Loss: 1.1331 | LR: 4.33e-05
|
| 208 |
+
[2026-04-25 19:32:44] Epoch 1 | Step 430 | Loss: 1.1364 | LR: 4.42e-05
|
| 209 |
+
[2026-04-25 19:32:46] Epoch 1 | Step 440 | Loss: 1.1334 | LR: 4.51e-05
|
| 210 |
+
[2026-04-25 19:32:49] Epoch 1 | Step 450 | Loss: 1.1329 | LR: 4.60e-05
|
| 211 |
+
[2026-04-25 19:32:52] Epoch 1 | Step 460 | Loss: 1.1340 | LR: 4.69e-05
|
| 212 |
+
[2026-04-25 19:32:54] Epoch 1 | Step 470 | Loss: 1.1333 | LR: 4.78e-05
|
| 213 |
+
[2026-04-25 19:32:57] Epoch 1 | Step 480 | Loss: 1.1345 | LR: 4.87e-05
|
| 214 |
+
[2026-04-25 19:33:00] Epoch 1 | Step 490 | Loss: 1.1336 | LR: 4.96e-05
|
| 215 |
+
[2026-04-25 19:33:02] Epoch 1 | Step 500 | Loss: 1.1338 | LR: 5.00e-05
|
| 216 |
+
[2026-04-25 19:33:04] Epoch 1 | Step 510 | Loss: 1.1334 | LR: 5.00e-05
|
| 217 |
+
[2026-04-25 19:33:07] Epoch 1 | Step 520 | Loss: 1.1345 | LR: 5.00e-05
|
| 218 |
+
[2026-04-25 19:33:10] Epoch 1 | Step 530 | Loss: 1.1333 | LR: 5.00e-05
|
| 219 |
+
[2026-04-25 19:33:12] Epoch 1 | Step 540 | Loss: 1.1320 | LR: 5.00e-05
|
| 220 |
+
[2026-04-25 19:33:15] Epoch 1 | Step 550 | Loss: 1.1320 | LR: 5.00e-05
|
| 221 |
+
[2026-04-25 19:33:17] Epoch 1 | Step 560 | Loss: 1.1316 | LR: 5.00e-05
|
| 222 |
+
[2026-04-25 19:33:20] Epoch 1 | Step 570 | Loss: 1.1330 | LR: 5.00e-05
|
| 223 |
+
[2026-04-25 19:33:22] Epoch 1 | Step 580 | Loss: 1.1348 | LR: 5.00e-05
|
| 224 |
+
[2026-04-25 19:33:25] Epoch 1 | Step 590 | Loss: 1.1367 | LR: 5.00e-05
|
| 225 |
+
[2026-04-25 19:33:28] Epoch 1 | Step 600 | Loss: 1.1377 | LR: 5.00e-05
|
| 226 |
+
[2026-04-25 19:33:30] Epoch 1 | Step 610 | Loss: 1.1404 | LR: 5.00e-05
|
| 227 |
+
[2026-04-25 19:33:32] Epoch 1 | Step 620 | Loss: 1.1430 | LR: 5.00e-05
|
| 228 |
+
[2026-04-25 19:33:35] Epoch 1 | Step 630 | Loss: 1.1441 | LR: 5.00e-05
|
| 229 |
+
[2026-04-25 19:33:37] Epoch 1 | Step 640 | Loss: 1.1462 | LR: 5.00e-05
|
| 230 |
+
[2026-04-25 19:33:40] Epoch 1 | Step 650 | Loss: 1.1474 | LR: 5.00e-05
|
| 231 |
+
[2026-04-25 19:33:42] Epoch 1 | Step 660 | Loss: 1.1492 | LR: 5.00e-05
|
| 232 |
+
[2026-04-25 19:33:45] Epoch 1 | Step 670 | Loss: 1.1490 | LR: 5.00e-05
|
| 233 |
+
[2026-04-25 19:33:48] Epoch 1 | Step 680 | Loss: 1.1500 | LR: 5.00e-05
|
| 234 |
+
[2026-04-25 19:33:50] Epoch 1 | Step 690 | Loss: 1.1503 | LR: 5.00e-05
|
| 235 |
+
[2026-04-25 19:33:53] Epoch 1 | Step 700 | Loss: 1.1522 | LR: 5.00e-05
|
| 236 |
+
[2026-04-25 19:33:55] Epoch 1 | Step 710 | Loss: 1.1525 | LR: 5.00e-05
|
| 237 |
+
[2026-04-25 19:33:58] Epoch 1 | Step 720 | Loss: 1.1535 | LR: 5.00e-05
|
| 238 |
+
[2026-04-25 19:34:00] Epoch 1 | Step 730 | Loss: 1.1543 | LR: 5.00e-05
|
| 239 |
+
[2026-04-25 19:34:03] Epoch 1 | Step 740 | Loss: 1.1542 | LR: 5.00e-05
|
| 240 |
+
[2026-04-25 19:34:05] Epoch 1 | Step 750 | Loss: 1.1544 | LR: 5.00e-05
|
| 241 |
+
[2026-04-25 19:34:08] Epoch 1 | Step 760 | Loss: 1.1564 | LR: 5.00e-05
|
| 242 |
+
[2026-04-25 19:34:10] Epoch 1 | Step 770 | Loss: 1.1587 | LR: 5.00e-05
|
| 243 |
+
[2026-04-25 19:34:13] Epoch 1 | Step 780 | Loss: 1.1598 | LR: 5.00e-05
|
| 244 |
+
[2026-04-25 19:34:15] Epoch 1 | Step 790 | Loss: 1.1607 | LR: 5.00e-05
|
| 245 |
+
[2026-04-25 19:34:18] Epoch 1 | Step 800 | Loss: 1.1601 | LR: 5.00e-05
|
| 246 |
+
[2026-04-25 19:34:20] Epoch 1 | Step 810 | Loss: 1.1607 | LR: 5.00e-05
|
| 247 |
+
[2026-04-25 19:34:23] Epoch 1 | Step 820 | Loss: 1.1608 | LR: 5.00e-05
|
| 248 |
+
[2026-04-25 19:34:25] Epoch 1 | Step 830 | Loss: 1.1620 | LR: 5.00e-05
|
| 249 |
+
[2026-04-25 19:34:28] Epoch 1 | Step 840 | Loss: 1.1619 | LR: 5.00e-05
|
| 250 |
+
[2026-04-25 19:34:31] Epoch 1 | Step 850 | Loss: 1.1610 | LR: 5.00e-05
|
| 251 |
+
[2026-04-25 19:34:33] Epoch 1 | Step 860 | Loss: 1.1625 | LR: 5.00e-05
|
| 252 |
+
[2026-04-25 19:34:36] Epoch 1 | Step 870 | Loss: 1.1644 | LR: 5.00e-05
|
| 253 |
+
[2026-04-25 19:34:38] Epoch 1 | Step 880 | Loss: 1.1657 | LR: 5.00e-05
|
| 254 |
+
[2026-04-25 19:34:40] Epoch 1 | Step 890 | Loss: 1.1664 | LR: 5.00e-05
|
| 255 |
+
[2026-04-25 19:34:43] Epoch 1 | Step 900 | Loss: 1.1663 | LR: 5.00e-05
|
| 256 |
+
[2026-04-25 19:34:45] Epoch 1 | Step 910 | Loss: 1.1678 | LR: 5.00e-05
|
| 257 |
+
[2026-04-25 19:34:48] Epoch 1 | Step 920 | Loss: 1.1698 | LR: 5.00e-05
|
| 258 |
+
[2026-04-25 19:34:51] Epoch 1 | Step 930 | Loss: 1.1699 | LR: 5.00e-05
|
| 259 |
+
[2026-04-25 19:34:53] Epoch 1 | Step 940 | Loss: 1.1709 | LR: 5.00e-05
|
| 260 |
+
[2026-04-25 19:34:56] Epoch 1 | Step 950 | Loss: 1.1697 | LR: 5.00e-05
|
| 261 |
+
[2026-04-25 19:34:59] Epoch 1 | Step 960 | Loss: 1.1699 | LR: 5.00e-05
|
| 262 |
+
[2026-04-25 19:35:02] Epoch 1 | Step 970 | Loss: 1.1707 | LR: 5.00e-05
|
| 263 |
+
[2026-04-25 19:35:04] Epoch 1 | Step 980 | Loss: 1.1705 | LR: 5.00e-05
|
| 264 |
+
[2026-04-25 19:35:07] Epoch 1 | Step 990 | Loss: 1.1698 | LR: 5.00e-05
|
| 265 |
+
[2026-04-25 19:35:10] Epoch 1 | Step 1000 | Loss: 1.1701 | LR: 5.00e-05
|
| 266 |
+
[2026-04-25 19:35:12] Epoch 1 | Step 1010 | Loss: 1.1715 | LR: 5.00e-05
|
| 267 |
+
[2026-04-25 19:35:14] Epoch 1 | Step 1020 | Loss: 1.1718 | LR: 5.00e-05
|
| 268 |
+
[2026-04-25 19:35:17] Epoch 1 | Step 1030 | Loss: 1.1732 | LR: 5.00e-05
|
| 269 |
+
[2026-04-25 19:35:19] Epoch 1 | Step 1040 | Loss: 1.1720 | LR: 5.00e-05
|
| 270 |
+
[2026-04-25 19:35:22] Epoch 1 | Step 1050 | Loss: 1.1719 | LR: 5.00e-05
|
| 271 |
+
[2026-04-25 19:35:24] Epoch 1 | Step 1060 | Loss: 1.1707 | LR: 5.00e-05
|
| 272 |
+
[2026-04-25 19:35:27] Epoch 1 | Step 1070 | Loss: 1.1707 | LR: 5.00e-05
|
| 273 |
+
[2026-04-25 19:35:29] Epoch 1 | Step 1080 | Loss: 1.1726 | LR: 5.00e-05
|
| 274 |
+
[2026-04-25 19:35:32] Epoch 1 | Step 1090 | Loss: 1.1749 | LR: 5.00e-05
|
| 275 |
+
[2026-04-25 19:35:34] Epoch 1 | Step 1100 | Loss: 1.1750 | LR: 5.00e-05
|
| 276 |
+
[2026-04-25 19:35:37] Epoch 1 | Step 1110 | Loss: 1.1760 | LR: 5.00e-05
|
| 277 |
+
[2026-04-25 19:35:39] Epoch 1 | Step 1120 | Loss: 1.1770 | LR: 5.00e-05
|
| 278 |
+
[2026-04-25 19:35:42] Epoch 1 | Step 1130 | Loss: 1.1777 | LR: 5.00e-05
|
| 279 |
+
[2026-04-25 19:35:45] Epoch 1 | Step 1140 | Loss: 1.1778 | LR: 5.00e-05
|
| 280 |
+
[2026-04-25 19:35:47] Epoch 1 | Step 1150 | Loss: 1.1764 | LR: 5.00e-05
|
| 281 |
+
[2026-04-25 19:35:50] Epoch 1 | Step 1160 | Loss: 1.1777 | LR: 5.00e-05
|
| 282 |
+
[2026-04-25 19:35:52] Epoch 1 | Step 1170 | Loss: 1.1790 | LR: 5.00e-05
|
| 283 |
+
[2026-04-25 19:35:55] Epoch 1 | Step 1180 | Loss: 1.1791 | LR: 5.00e-05
|
| 284 |
+
[2026-04-25 19:35:58] Epoch 1 | Step 1190 | Loss: 1.1799 | LR: 5.00e-05
|
| 285 |
+
[2026-04-25 19:36:00] Epoch 1 | Step 1200 | Loss: 1.1797 | LR: 5.00e-05
|
| 286 |
+
[2026-04-25 19:36:03] Epoch 1 | Step 1210 | Loss: 1.1787 | LR: 5.00e-05
|
| 287 |
+
[2026-04-25 19:36:05] Epoch 1 | Step 1220 | Loss: 1.1775 | LR: 5.00e-05
|
| 288 |
+
[2026-04-25 19:36:08] Epoch 1 | Step 1230 | Loss: 1.1783 | LR: 5.00e-05
|
| 289 |
+
[2026-04-25 19:36:11] Epoch 1 | Step 1240 | Loss: 1.1790 | LR: 5.00e-05
|
| 290 |
+
[2026-04-25 19:36:13] Epoch 1 | Step 1250 | Loss: 1.1786 | LR: 5.00e-05
|
| 291 |
+
[2026-04-25 19:36:16] Epoch 1 | Step 1260 | Loss: 1.1788 | LR: 5.00e-05
|
| 292 |
+
[2026-04-25 19:36:18] Epoch 1 | Step 1270 | Loss: 1.1777 | LR: 5.00e-05
|
| 293 |
+
[2026-04-25 19:36:20] Epoch 1 | Step 1280 | Loss: 1.1787 | LR: 5.00e-05
|
| 294 |
+
[2026-04-25 19:36:23] Epoch 1 | Step 1290 | Loss: 1.1795 | LR: 5.00e-05
|
| 295 |
+
[2026-04-25 19:36:26] Epoch 1 | Step 1300 | Loss: 1.1790 | LR: 5.00e-05
|
| 296 |
+
[2026-04-25 19:36:28] Epoch 1 | Step 1310 | Loss: 1.1792 | LR: 5.00e-05
|
| 297 |
+
[2026-04-25 19:36:31] Epoch 1 | Step 1320 | Loss: 1.1798 | LR: 5.00e-05
|
| 298 |
+
[2026-04-25 19:36:33] Epoch 1 | Step 1330 | Loss: 1.1793 | LR: 5.00e-05
|
| 299 |
+
[2026-04-25 19:36:35] Epoch 1 | Step 1340 | Loss: 1.1796 | LR: 5.00e-05
|
| 300 |
+
[2026-04-25 19:36:38] Epoch 1 | Step 1350 | Loss: 1.1804 | LR: 5.00e-05
|
| 301 |
+
[2026-04-25 19:36:41] Epoch 1 | Step 1360 | Loss: 1.1804 | LR: 5.00e-05
|
| 302 |
+
[2026-04-25 19:36:43] Epoch 1 | Step 1370 | Loss: 1.1805 | LR: 5.00e-05
|
| 303 |
+
[2026-04-25 19:36:46] Epoch 1 | Step 1380 | Loss: 1.1817 | LR: 5.00e-05
|
| 304 |
+
[2026-04-25 19:36:48] Epoch 1 | Step 1390 | Loss: 1.1826 | LR: 5.00e-05
|
| 305 |
+
[2026-04-25 19:36:51] Epoch 1 | Step 1400 | Loss: 1.1828 | LR: 5.00e-05
|
| 306 |
+
[2026-04-25 19:36:54] Epoch 1 | Step 1410 | Loss: 1.1819 | LR: 5.00e-05
|
| 307 |
+
[2026-04-25 19:36:56] Epoch 1 | Step 1420 | Loss: 1.1821 | LR: 5.00e-05
|
| 308 |
+
[2026-04-25 19:36:59] Epoch 1 | Step 1430 | Loss: 1.1823 | LR: 5.00e-05
|
| 309 |
+
[2026-04-25 19:37:01] Epoch 1 | Step 1440 | Loss: 1.1822 | LR: 5.00e-05
|
| 310 |
+
[2026-04-25 19:37:04] Epoch 1 | Step 1450 | Loss: 1.1824 | LR: 5.00e-05
|
| 311 |
+
[2026-04-25 19:37:06] Epoch 1 | Step 1460 | Loss: 1.1814 | LR: 5.00e-05
|
| 312 |
+
[2026-04-25 19:37:09] Epoch 1 | Step 1470 | Loss: 1.1823 | LR: 5.00e-05
|
| 313 |
+
[2026-04-25 19:37:12] Epoch 1 | Step 1480 | Loss: 1.1825 | LR: 5.00e-05
|
| 314 |
+
[2026-04-25 19:37:14] Epoch 1 | Step 1490 | Loss: 1.1834 | LR: 5.00e-05
|
| 315 |
+
[2026-04-25 19:37:16] Epoch 1 | Step 1500 | Loss: 1.1832 | LR: 5.00e-05
|
| 316 |
+
[2026-04-25 19:37:19] Epoch 1 | Step 1510 | Loss: 1.1836 | LR: 5.00e-05
|
| 317 |
+
[2026-04-25 19:37:22] Epoch 1 | Step 1520 | Loss: 1.1842 | LR: 5.00e-05
|
| 318 |
+
[2026-04-25 19:37:24] Epoch 1 | Step 1530 | Loss: 1.1842 | LR: 5.00e-05
|
| 319 |
+
[2026-04-25 19:37:26] Epoch 1 | Step 1540 | Loss: 1.1850 | LR: 5.00e-05
|
| 320 |
+
[2026-04-25 19:37:29] Epoch 1 | Step 1550 | Loss: 1.1855 | LR: 5.00e-05
|
| 321 |
+
[2026-04-25 19:37:31] Epoch 1 | Step 1560 | Loss: 1.1850 | LR: 5.00e-05
|
| 322 |
+
[2026-04-25 19:37:34] Epoch 1 | Step 1570 | Loss: 1.1859 | LR: 5.00e-05
|
| 323 |
+
[2026-04-25 19:37:37] Epoch 1 | Step 1580 | Loss: 1.1856 | LR: 5.00e-05
|
| 324 |
+
[2026-04-25 19:37:39] Epoch 1 | Step 1590 | Loss: 1.1859 | LR: 5.00e-05
|
| 325 |
+
[2026-04-25 19:37:41] Epoch 1 | Step 1600 | Loss: 1.1862 | LR: 5.00e-05
|
| 326 |
+
[2026-04-25 19:37:44] Epoch 1 | Step 1610 | Loss: 1.1852 | LR: 5.00e-05
|
| 327 |
+
[2026-04-25 19:37:46] Epoch 1 | Step 1620 | Loss: 1.1843 | LR: 5.00e-05
|
| 328 |
+
[2026-04-25 19:37:49] Epoch 1 | Step 1630 | Loss: 1.1851 | LR: 5.00e-05
|
| 329 |
+
[2026-04-25 19:37:51] Epoch 1 | Step 1640 | Loss: 1.1852 | LR: 5.00e-05
|
| 330 |
+
[2026-04-25 19:37:54] Epoch 1 | Step 1650 | Loss: 1.1847 | LR: 5.00e-05
|
| 331 |
+
[2026-04-25 19:37:56] Epoch 1 | Step 1660 | Loss: 1.1842 | LR: 5.00e-05
|
| 332 |
+
[2026-04-25 19:37:58] Epoch 1 | Step 1670 | Loss: 1.1854 | LR: 5.00e-05
|
| 333 |
+
[2026-04-25 19:38:01] Epoch 1 | Step 1680 | Loss: 1.1857 | LR: 5.00e-05
|
| 334 |
+
[2026-04-25 19:38:04] Epoch 1 | Step 1690 | Loss: 1.1855 | LR: 5.00e-05
|
| 335 |
+
[2026-04-25 19:38:06] Epoch 1 | Step 1700 | Loss: 1.1848 | LR: 5.00e-05
|
| 336 |
+
[2026-04-25 19:38:09] Epoch 1 | Step 1710 | Loss: 1.1846 | LR: 5.00e-05
|
| 337 |
+
[2026-04-25 19:38:11] Epoch 1 | Step 1720 | Loss: 1.1844 | LR: 5.00e-05
|
| 338 |
+
[2026-04-25 19:38:14] Epoch 1 | Step 1730 | Loss: 1.1842 | LR: 5.00e-05
|
| 339 |
+
[2026-04-25 19:38:16] Epoch 1 | Step 1740 | Loss: 1.1845 | LR: 5.00e-05
|
| 340 |
+
[2026-04-25 19:38:19] Epoch 1 | Step 1750 | Loss: 1.1859 | LR: 5.00e-05
|
| 341 |
+
[2026-04-25 19:38:21] Epoch 1 | Step 1760 | Loss: 1.1856 | LR: 5.00e-05
|
| 342 |
+
[2026-04-25 19:38:24] Epoch 1 | Step 1770 | Loss: 1.1861 | LR: 5.00e-05
|
| 343 |
+
[2026-04-25 19:38:26] Epoch 1 | Step 1780 | Loss: 1.1860 | LR: 5.00e-05
|
| 344 |
+
[2026-04-25 19:38:29] Epoch 1 | Step 1790 | Loss: 1.1865 | LR: 5.00e-05
|
| 345 |
+
[2026-04-25 19:38:31] Epoch 1 | Step 1800 | Loss: 1.1860 | LR: 5.00e-05
|
| 346 |
+
[2026-04-25 19:38:34] Epoch 1 | Step 1810 | Loss: 1.1864 | LR: 5.00e-05
|
| 347 |
+
[2026-04-25 19:38:37] Epoch 1 | Step 1820 | Loss: 1.1870 | LR: 5.00e-05
|
| 348 |
+
[2026-04-25 19:38:39] Epoch 1 | Step 1830 | Loss: 1.1873 | LR: 5.00e-05
|
| 349 |
+
[2026-04-25 19:38:42] Epoch 1 | Step 1840 | Loss: 1.1877 | LR: 5.00e-05
|
| 350 |
+
[2026-04-25 19:38:44] Epoch 1 | Step 1850 | Loss: 1.1874 | LR: 5.00e-05
|
| 351 |
+
[2026-04-25 19:38:47] Epoch 1 | Step 1860 | Loss: 1.1879 | LR: 5.00e-05
|
| 352 |
+
[2026-04-25 19:38:49] Epoch 1 | Step 1870 | Loss: 1.1876 | LR: 5.00e-05
|
| 353 |
+
[2026-04-25 19:38:52] Epoch 1 | Step 1880 | Loss: 1.1871 | LR: 5.00e-05
|
| 354 |
+
[2026-04-25 19:38:55] Epoch 1 | Step 1890 | Loss: 1.1877 | LR: 5.00e-05
|
| 355 |
+
[2026-04-25 19:38:58] Epoch 1 | Step 1900 | Loss: 1.1877 | LR: 5.00e-05
|
| 356 |
+
[2026-04-25 19:39:00] Epoch 1 | Step 1910 | Loss: 1.1881 | LR: 5.00e-05
|
| 357 |
+
[2026-04-25 19:39:02] Epoch 1 | Step 1920 | Loss: 1.1887 | LR: 5.00e-05
|
| 358 |
+
[2026-04-25 19:39:05] Epoch 1 | Step 1930 | Loss: 1.1888 | LR: 5.00e-05
|
| 359 |
+
[2026-04-25 19:39:08] Epoch 1 | Step 1940 | Loss: 1.1886 | LR: 5.00e-05
|
| 360 |
+
[2026-04-25 19:39:10] Epoch 1 | Step 1950 | Loss: 1.1883 | LR: 5.00e-05
|
| 361 |
+
[2026-04-25 19:39:13] Epoch 1 | Step 1960 | Loss: 1.1885 | LR: 5.00e-05
|
| 362 |
+
[2026-04-25 19:39:15] Epoch 1 | Step 1970 | Loss: 1.1887 | LR: 5.00e-05
|
| 363 |
+
[2026-04-25 19:39:17] Epoch 1 | Step 1980 | Loss: 1.1894 | LR: 5.00e-05
|
| 364 |
+
[2026-04-25 19:39:20] Epoch 1 | Step 1990 | Loss: 1.1897 | LR: 5.00e-05
|
| 365 |
+
[2026-04-25 19:39:22] Epoch 1 | Step 2000 | Loss: 1.1897 | LR: 5.00e-05
|
| 366 |
+
[2026-04-25 19:39:23] Validation | Batch 10/84 | Loss: 1.1552
|
| 367 |
+
[2026-04-25 19:39:23] Validation | Batch 20/84 | Loss: 1.1688
|
| 368 |
+
[2026-04-25 19:39:24] Validation | Batch 30/84 | Loss: 1.2551
|
| 369 |
+
[2026-04-25 19:39:24] Validation | Batch 40/84 | Loss: 1.2580
|
| 370 |
+
[2026-04-25 19:39:24] Validation | Batch 50/84 | Loss: 1.2545
|
| 371 |
+
[2026-04-25 19:39:25] Validation | Batch 60/84 | Loss: 1.2285
|
| 372 |
+
[2026-04-25 19:39:26] Validation | Batch 70/84 | Loss: 1.2086
|
| 373 |
+
[2026-04-25 19:39:26] Validation | Batch 80/84 | Loss: 1.2153
|
| 374 |
+
[2026-04-25 19:39:26] Validation | Batch 84/84 | Loss: 1.2082
|
| 375 |
+
[2026-04-25 19:39:27] Validation | Loss: 1.2082 | PPL: 3.43 | Time: 3.85s
|
| 376 |
+
[2026-04-25 19:39:29] New best model saved! Val loss: 1.2082
|
| 377 |
+
[2026-04-25 19:39:31] Epoch 1 | Step 2010 | Loss: 1.1900 | LR: 5.00e-05
|
| 378 |
+
[2026-04-25 19:39:34] Epoch 1 | Step 2020 | Loss: 1.1900 | LR: 5.00e-05
|
| 379 |
+
[2026-04-25 19:39:36] Epoch 1 | Step 2030 | Loss: 1.1905 | LR: 5.00e-05
|
| 380 |
+
[2026-04-25 19:39:39] Epoch 1 | Step 2040 | Loss: 1.1907 | LR: 5.00e-05
|
| 381 |
+
[2026-04-25 19:39:41] Epoch 1 | Step 2050 | Loss: 1.1909 | LR: 5.00e-05
|
| 382 |
+
[2026-04-25 19:39:44] Epoch 1 | Step 2060 | Loss: 1.1907 | LR: 5.00e-05
|
| 383 |
+
[2026-04-25 19:39:47] Epoch 1 | Step 2070 | Loss: 1.1899 | LR: 5.00e-05
|
| 384 |
+
[2026-04-25 19:39:49] Epoch 1 | Step 2080 | Loss: 1.1895 | LR: 5.00e-05
|
| 385 |
+
[2026-04-25 19:39:51] Epoch 1 | Step 2090 | Loss: 1.1900 | LR: 5.00e-05
|
| 386 |
+
[2026-04-25 19:39:54] Epoch 1 | Step 2100 | Loss: 1.1903 | LR: 5.00e-05
|
| 387 |
+
[2026-04-25 19:39:57] Epoch 1 | Step 2110 | Loss: 1.1904 | LR: 5.00e-05
|
| 388 |
+
[2026-04-25 19:39:59] Epoch 1 | Step 2120 | Loss: 1.1902 | LR: 5.00e-05
|
| 389 |
+
[2026-04-25 19:40:02] Epoch 1 | Step 2130 | Loss: 1.1906 | LR: 5.00e-05
|
| 390 |
+
[2026-04-25 19:40:04] Epoch 1 | Step 2140 | Loss: 1.1907 | LR: 5.00e-05
|
| 391 |
+
[2026-04-25 19:40:07] Epoch 1 | Step 2150 | Loss: 1.1907 | LR: 5.00e-05
|
| 392 |
+
[2026-04-25 19:40:09] Epoch 1 | Step 2160 | Loss: 1.1912 | LR: 5.00e-05
|
| 393 |
+
[2026-04-25 19:40:12] Epoch 1 | Step 2170 | Loss: 1.1910 | LR: 5.00e-05
|
| 394 |
+
[2026-04-25 19:40:14] Epoch 1 | Step 2180 | Loss: 1.1907 | LR: 5.00e-05
|
| 395 |
+
[2026-04-25 19:40:16] Epoch 1 | Step 2190 | Loss: 1.1910 | LR: 5.00e-05
|
| 396 |
+
[2026-04-25 19:40:19] Epoch 1 | Step 2200 | Loss: 1.1911 | LR: 5.00e-05
|
| 397 |
+
[2026-04-25 19:40:21] Epoch 1 | Step 2210 | Loss: 1.1912 | LR: 5.00e-05
|
| 398 |
+
[2026-04-25 19:40:24] Epoch 1 | Step 2220 | Loss: 1.1920 | LR: 5.00e-05
|
| 399 |
+
[2026-04-25 19:40:26] Epoch 1 | Step 2230 | Loss: 1.1929 | LR: 5.00e-05
|
| 400 |
+
[2026-04-25 19:40:29] Epoch 1 | Step 2240 | Loss: 1.1936 | LR: 5.00e-05
|
| 401 |
+
[2026-04-25 19:40:31] Epoch 1 | Step 2250 | Loss: 1.1940 | LR: 5.00e-05
|
| 402 |
+
[2026-04-25 19:40:34] Epoch 1 | Step 2260 | Loss: 1.1939 | LR: 5.00e-05
|
| 403 |
+
[2026-04-25 19:40:37] Epoch 1 | Step 2270 | Loss: 1.1941 | LR: 5.00e-05
|
| 404 |
+
[2026-04-25 19:40:39] Epoch 1 | Step 2280 | Loss: 1.1945 | LR: 5.00e-05
|
| 405 |
+
[2026-04-25 19:40:42] Epoch 1 | Step 2290 | Loss: 1.1954 | LR: 5.00e-05
|
| 406 |
+
[2026-04-25 19:40:44] Epoch 1 | Step 2300 | Loss: 1.1957 | LR: 5.00e-05
|
| 407 |
+
[2026-04-25 19:40:46] Epoch 1 | Step 2310 | Loss: 1.1956 | LR: 5.00e-05
|
| 408 |
+
[2026-04-25 19:40:49] Epoch 1 | Step 2320 | Loss: 1.1958 | LR: 5.00e-05
|
| 409 |
+
[2026-04-25 19:40:51] Epoch 1 | Step 2330 | Loss: 1.1959 | LR: 5.00e-05
|
| 410 |
+
[2026-04-25 19:40:54] Epoch 1 | Step 2340 | Loss: 1.1958 | LR: 5.00e-05
|
| 411 |
+
[2026-04-25 19:40:56] Epoch 1 | Step 2350 | Loss: 1.1956 | LR: 5.00e-05
|
| 412 |
+
[2026-04-25 19:40:59] Epoch 1 | Step 2360 | Loss: 1.1959 | LR: 5.00e-05
|
| 413 |
+
[2026-04-25 19:41:01] Epoch 1 | Step 2370 | Loss: 1.1958 | LR: 5.00e-05
|
| 414 |
+
[2026-04-25 19:41:04] Epoch 1 | Step 2380 | Loss: 1.1957 | LR: 5.00e-05
|
| 415 |
+
[2026-04-25 19:41:06] Epoch 1 | Step 2390 | Loss: 1.1960 | LR: 5.00e-05
|
| 416 |
+
[2026-04-25 19:41:09] Epoch 1 | Step 2400 | Loss: 1.1957 | LR: 5.00e-05
|
| 417 |
+
[2026-04-25 19:41:11] Epoch 1 | Step 2410 | Loss: 1.1962 | LR: 5.00e-05
|
| 418 |
+
[2026-04-25 19:41:14] Epoch 1 | Step 2420 | Loss: 1.1964 | LR: 5.00e-05
|
| 419 |
+
[2026-04-25 19:41:16] Epoch 1 | Step 2430 | Loss: 1.1966 | LR: 5.00e-05
|
| 420 |
+
[2026-04-25 19:41:19] Epoch 1 | Step 2440 | Loss: 1.1962 | LR: 5.00e-05
|
| 421 |
+
[2026-04-25 19:41:22] Epoch 1 | Step 2450 | Loss: 1.1961 | LR: 5.00e-05
|
| 422 |
+
[2026-04-25 19:41:24] Epoch 1 | Step 2460 | Loss: 1.1961 | LR: 5.00e-05
|
| 423 |
+
[2026-04-25 19:41:27] Epoch 1 | Step 2470 | Loss: 1.1963 | LR: 5.00e-05
|
| 424 |
+
[2026-04-25 19:41:29] Epoch 1 | Step 2480 | Loss: 1.1964 | LR: 5.00e-05
|
| 425 |
+
[2026-04-25 19:41:32] Epoch 1 | Step 2490 | Loss: 1.1960 | LR: 5.00e-05
|
| 426 |
+
[2026-04-25 19:41:35] Epoch 1 | Step 2500 | Loss: 1.1957 | LR: 5.00e-05
|
| 427 |
+
[2026-04-25 19:41:37] Epoch 1 | Step 2510 | Loss: 1.1959 | LR: 5.00e-05
|
| 428 |
+
[2026-04-25 19:41:39] Epoch 1 | Step 2520 | Loss: 1.1954 | LR: 5.00e-05
|
| 429 |
+
[2026-04-25 19:41:42] Epoch 1 | Step 2530 | Loss: 1.1952 | LR: 5.00e-05
|
| 430 |
+
[2026-04-25 19:41:44] Epoch 1 | Step 2540 | Loss: 1.1952 | LR: 5.00e-05
|
| 431 |
+
[2026-04-25 19:41:47] Epoch 1 | Step 2550 | Loss: 1.1947 | LR: 5.00e-05
|
| 432 |
+
[2026-04-25 19:41:50] Epoch 1 | Step 2560 | Loss: 1.1948 | LR: 5.00e-05
|
| 433 |
+
[2026-04-25 19:41:52] Epoch 1 | Step 2570 | Loss: 1.1953 | LR: 5.00e-05
|
| 434 |
+
[2026-04-25 19:41:55] Epoch 1 | Step 2580 | Loss: 1.1958 | LR: 5.00e-05
|
| 435 |
+
[2026-04-25 19:41:58] Epoch 1 | Step 2590 | Loss: 1.1960 | LR: 5.00e-05
|
| 436 |
+
[2026-04-25 19:42:00] Epoch 1 | Step 2600 | Loss: 1.1962 | LR: 5.00e-05
|
| 437 |
+
[2026-04-25 19:42:02] Epoch 1 | Step 2610 | Loss: 1.1963 | LR: 5.00e-05
|
| 438 |
+
[2026-04-25 19:42:05] Epoch 1 | Step 2620 | Loss: 1.1960 | LR: 5.00e-05
|
| 439 |
+
[2026-04-25 19:42:07] Epoch 1 | Step 2630 | Loss: 1.1958 | LR: 5.00e-05
|
| 440 |
+
[2026-04-25 19:42:10] Epoch 1 | Step 2640 | Loss: 1.1960 | LR: 5.00e-05
|
| 441 |
+
[2026-04-25 19:42:13] Epoch 1 | Step 2650 | Loss: 1.1958 | LR: 5.00e-05
|
| 442 |
+
[2026-04-25 19:42:15] Epoch 1 | Step 2660 | Loss: 1.1960 | LR: 5.00e-05
|
| 443 |
+
[2026-04-25 19:42:18] Epoch 1 | Step 2670 | Loss: 1.1957 | LR: 5.00e-05
|
| 444 |
+
[2026-04-25 19:42:20] Epoch 1 | Step 2680 | Loss: 1.1956 | LR: 5.00e-05
|
| 445 |
+
[2026-04-25 19:42:23] Epoch 1 | Step 2690 | Loss: 1.1955 | LR: 5.00e-05
|
| 446 |
+
[2026-04-25 19:42:25] Epoch 1 | Step 2700 | Loss: 1.1953 | LR: 5.00e-05
|
| 447 |
+
[2026-04-25 19:42:28] Epoch 1 | Step 2710 | Loss: 1.1948 | LR: 5.00e-05
|
| 448 |
+
[2026-04-25 19:42:31] Epoch 1 | Step 2720 | Loss: 1.1951 | LR: 5.00e-05
|
| 449 |
+
[2026-04-25 19:42:33] Epoch 1 | Step 2730 | Loss: 1.1949 | LR: 5.00e-05
|
| 450 |
+
[2026-04-25 19:42:35] Epoch 1 | Step 2740 | Loss: 1.1955 | LR: 5.00e-05
|
| 451 |
+
[2026-04-25 19:42:38] Epoch 1 | Step 2750 | Loss: 1.1958 | LR: 5.00e-05
|
| 452 |
+
[2026-04-25 19:42:40] Epoch 1 | Step 2760 | Loss: 1.1954 | LR: 5.00e-05
|
| 453 |
+
[2026-04-25 19:42:43] Epoch 1 | Step 2770 | Loss: 1.1953 | LR: 5.00e-05
|
| 454 |
+
[2026-04-25 19:42:45] Epoch 1 | Step 2780 | Loss: 1.1957 | LR: 5.00e-05
|
| 455 |
+
[2026-04-25 19:42:48] Epoch 1 | Step 2790 | Loss: 1.1957 | LR: 5.00e-05
|
| 456 |
+
[2026-04-25 19:42:50] Epoch 1 | Step 2800 | Loss: 1.1956 | LR: 5.00e-05
|
| 457 |
+
[2026-04-25 19:42:53] Epoch 1 | Step 2810 | Loss: 1.1959 | LR: 5.00e-05
|
| 458 |
+
[2026-04-25 19:42:55] Epoch 1 | Step 2820 | Loss: 1.1959 | LR: 5.00e-05
|
| 459 |
+
[2026-04-25 19:42:58] Epoch 1 | Step 2830 | Loss: 1.1957 | LR: 5.00e-05
|
| 460 |
+
[2026-04-25 19:43:00] Epoch 1 | Step 2840 | Loss: 1.1965 | LR: 5.00e-05
|
| 461 |
+
[2026-04-25 19:43:03] Epoch 1 | Step 2850 | Loss: 1.1966 | LR: 5.00e-05
|
| 462 |
+
[2026-04-25 19:43:05] Epoch 1 | Step 2860 | Loss: 1.1966 | LR: 5.00e-05
|
| 463 |
+
[2026-04-25 19:43:08] Epoch 1 | Step 2870 | Loss: 1.1968 | LR: 5.00e-05
|
| 464 |
+
[2026-04-25 19:43:11] Epoch 1 | Step 2880 | Loss: 1.1965 | LR: 5.00e-05
|
| 465 |
+
[2026-04-25 19:43:13] Epoch 1 | Step 2890 | Loss: 1.1964 | LR: 5.00e-05
|
| 466 |
+
[2026-04-25 19:43:16] Epoch 1 | Step 2900 | Loss: 1.1959 | LR: 5.00e-05
|
| 467 |
+
[2026-04-25 19:43:18] Epoch 1 | Step 2910 | Loss: 1.1958 | LR: 5.00e-05
|
| 468 |
+
[2026-04-25 19:43:21] Epoch 1 | Step 2920 | Loss: 1.1961 | LR: 5.00e-05
|
| 469 |
+
[2026-04-25 19:43:24] Epoch 1 | Step 2930 | Loss: 1.1960 | LR: 5.00e-05
|
| 470 |
+
[2026-04-25 19:43:27] Epoch 1 | Step 2940 | Loss: 1.1958 | LR: 5.00e-05
|
| 471 |
+
[2026-04-25 19:43:29] Epoch 1 | Step 2950 | Loss: 1.1961 | LR: 5.00e-05
|
| 472 |
+
[2026-04-25 19:43:32] Epoch 1 | Step 2960 | Loss: 1.1962 | LR: 5.00e-05
|
| 473 |
+
[2026-04-25 19:43:34] Epoch 1 | Step 2970 | Loss: 1.1963 | LR: 5.00e-05
|
| 474 |
+
[2026-04-25 19:43:37] Epoch 1 | Step 2980 | Loss: 1.1962 | LR: 5.00e-05
|
| 475 |
+
[2026-04-25 19:43:39] Epoch 1 | Step 2990 | Loss: 1.1965 | LR: 5.00e-05
|
| 476 |
+
[2026-04-25 19:43:42] Epoch 1 | Step 3000 | Loss: 1.1964 | LR: 5.00e-05
|
| 477 |
+
[2026-04-25 19:43:45] Epoch 1 | Step 3010 | Loss: 1.1965 | LR: 5.00e-05
|
| 478 |
+
[2026-04-25 19:43:47] Epoch 1 | Step 3020 | Loss: 1.1962 | LR: 5.00e-05
|
| 479 |
+
[2026-04-25 19:43:50] Epoch 1 | Step 3030 | Loss: 1.1961 | LR: 5.00e-05
|
| 480 |
+
[2026-04-25 19:43:52] Epoch 1 | Step 3040 | Loss: 1.1955 | LR: 5.00e-05
|
| 481 |
+
[2026-04-25 19:43:55] Epoch 1 | Step 3050 | Loss: 1.1951 | LR: 5.00e-05
|
| 482 |
+
[2026-04-25 19:43:57] Epoch 1 | Step 3060 | Loss: 1.1952 | LR: 5.00e-05
|
| 483 |
+
[2026-04-25 19:44:00] Epoch 1 | Step 3070 | Loss: 1.1950 | LR: 5.00e-05
|
| 484 |
+
[2026-04-25 19:44:03] Epoch 1 | Step 3080 | Loss: 1.1951 | LR: 5.00e-05
|
| 485 |
+
[2026-04-25 19:44:05] Epoch 1 | Step 3090 | Loss: 1.1949 | LR: 5.00e-05
|
| 486 |
+
[2026-04-25 19:44:07] Epoch 1 | Step 3100 | Loss: 1.1948 | LR: 5.00e-05
|
| 487 |
+
[2026-04-25 19:44:10] Epoch 1 | Step 3110 | Loss: 1.1946 | LR: 5.00e-05
|
| 488 |
+
[2026-04-25 19:44:12] Epoch 1 | Step 3120 | Loss: 1.1952 | LR: 5.00e-05
|
| 489 |
+
[2026-04-25 19:44:15] Epoch 1 | Step 3130 | Loss: 1.1949 | LR: 5.00e-05
|
| 490 |
+
[2026-04-25 19:44:18] Epoch 1 | Step 3140 | Loss: 1.1951 | LR: 5.00e-05
|
| 491 |
+
[2026-04-25 19:44:20] Epoch 1 | Step 3150 | Loss: 1.1954 | LR: 5.00e-05
|
| 492 |
+
[2026-04-25 19:44:23] Epoch 1 | Step 3160 | Loss: 1.1954 | LR: 5.00e-05
|
| 493 |
+
[2026-04-25 19:44:25] Epoch 1 | Step 3170 | Loss: 1.1955 | LR: 5.00e-05
|
| 494 |
+
[2026-04-25 19:44:28] Epoch 1 | Step 3180 | Loss: 1.1956 | LR: 5.00e-05
|
| 495 |
+
[2026-04-25 19:44:30] Epoch 1 | Step 3190 | Loss: 1.1952 | LR: 5.00e-05
|
| 496 |
+
[2026-04-25 19:44:33] Epoch 1 | Step 3200 | Loss: 1.1952 | LR: 5.00e-05
|
| 497 |
+
[2026-04-25 19:44:35] Epoch 1 | Step 3210 | Loss: 1.1951 | LR: 5.00e-05
|
| 498 |
+
[2026-04-25 19:44:38] Epoch 1 | Step 3220 | Loss: 1.1946 | LR: 5.00e-05
|
| 499 |
+
[2026-04-25 19:44:40] Epoch 1 | Step 3230 | Loss: 1.1951 | LR: 5.00e-05
|
| 500 |
+
[2026-04-25 19:44:43] Epoch 1 | Step 3240 | Loss: 1.1950 | LR: 5.00e-05
|
| 501 |
+
[2026-04-25 19:44:45] Epoch 1 | Step 3250 | Loss: 1.1951 | LR: 5.00e-05
|
| 502 |
+
[2026-04-25 19:44:48] Epoch 1 | Step 3260 | Loss: 1.1953 | LR: 5.00e-05
|
| 503 |
+
[2026-04-25 19:44:50] Epoch 1 | Step 3270 | Loss: 1.1952 | LR: 5.00e-05
|
| 504 |
+
[2026-04-25 19:44:53] Epoch 1 | Step 3280 | Loss: 1.1948 | LR: 5.00e-05
|
| 505 |
+
[2026-04-25 19:44:55] Epoch 1 | Step 3290 | Loss: 1.1948 | LR: 5.00e-05
|
| 506 |
+
[2026-04-25 19:44:58] Epoch 1 | Step 3300 | Loss: 1.1950 | LR: 5.00e-05
|
| 507 |
+
[2026-04-25 19:45:00] Epoch 1 | Step 3310 | Loss: 1.1949 | LR: 5.00e-05
|
| 508 |
+
[2026-04-25 19:45:03] Epoch 1 | Step 3320 | Loss: 1.1951 | LR: 5.00e-05
|
| 509 |
+
[2026-04-25 19:45:06] Epoch 1 | Step 3330 | Loss: 1.1950 | LR: 5.00e-05
|
| 510 |
+
[2026-04-25 19:45:09] Epoch 1 | Step 3340 | Loss: 1.1951 | LR: 5.00e-05
|
| 511 |
+
[2026-04-25 19:45:11] Epoch 1 | Step 3350 | Loss: 1.1948 | LR: 5.00e-05
|
| 512 |
+
[2026-04-25 19:45:13] Epoch 1 | Step 3360 | Loss: 1.1946 | LR: 5.00e-05
|
| 513 |
+
[2026-04-25 19:45:16] Epoch 1 | Step 3370 | Loss: 1.1949 | LR: 5.00e-05
|
| 514 |
+
[2026-04-25 19:45:19] Epoch 1 | Step 3380 | Loss: 1.1946 | LR: 5.00e-05
|
| 515 |
+
[2026-04-25 19:45:22] Epoch 1 | Step 3390 | Loss: 1.1949 | LR: 5.00e-05
|
| 516 |
+
[2026-04-25 19:45:24] Epoch 1 | Step 3400 | Loss: 1.1954 | LR: 5.00e-05
|
| 517 |
+
[2026-04-25 19:45:27] Epoch 1 | Step 3410 | Loss: 1.1952 | LR: 5.00e-05
|
| 518 |
+
[2026-04-25 19:45:29] Epoch 1 | Step 3420 | Loss: 1.1949 | LR: 5.00e-05
|
| 519 |
+
[2026-04-25 19:45:32] Epoch 1 | Step 3430 | Loss: 1.1949 | LR: 5.00e-05
|
| 520 |
+
[2026-04-25 19:45:35] Epoch 1 | Step 3440 | Loss: 1.1951 | LR: 5.00e-05
|
| 521 |
+
[2026-04-25 19:45:37] Epoch 1 | Step 3450 | Loss: 1.1950 | LR: 5.00e-05
|
| 522 |
+
[2026-04-25 19:45:40] Epoch 1 | Step 3460 | Loss: 1.1949 | LR: 5.00e-05
|
| 523 |
+
[2026-04-25 19:45:42] Epoch 1 | Step 3470 | Loss: 1.1949 | LR: 5.00e-05
|
| 524 |
+
[2026-04-25 19:45:45] Epoch 1 | Step 3480 | Loss: 1.1949 | LR: 5.00e-05
|
| 525 |
+
[2026-04-25 19:45:47] Epoch 1 | Step 3490 | Loss: 1.1947 | LR: 5.00e-05
|
| 526 |
+
[2026-04-25 19:45:50] Epoch 1 | Step 3500 | Loss: 1.1943 | LR: 5.00e-05
|
| 527 |
+
[2026-04-25 19:45:53] Epoch 1 | Step 3510 | Loss: 1.1947 | LR: 5.00e-05
|
| 528 |
+
[2026-04-25 19:45:55] Epoch 1 | Step 3520 | Loss: 1.1944 | LR: 5.00e-05
|
| 529 |
+
[2026-04-25 19:45:58] Epoch 1 | Step 3530 | Loss: 1.1947 | LR: 5.00e-05
|
| 530 |
+
[2026-04-25 19:46:00] Epoch 1 | Step 3540 | Loss: 1.1944 | LR: 5.00e-05
|
| 531 |
+
[2026-04-25 19:46:03] Epoch 1 | Step 3550 | Loss: 1.1944 | LR: 5.00e-05
|
| 532 |
+
[2026-04-25 19:46:05] Epoch 1 | Step 3560 | Loss: 1.1944 | LR: 5.00e-05
|
| 533 |
+
[2026-04-25 19:46:08] Epoch 1 | Step 3570 | Loss: 1.1943 | LR: 5.00e-05
|
| 534 |
+
[2026-04-25 19:46:11] Epoch 1 | Step 3580 | Loss: 1.1942 | LR: 5.00e-05
|
| 535 |
+
[2026-04-25 19:46:13] Epoch 1 | Step 3590 | Loss: 1.1942 | LR: 5.00e-05
|
| 536 |
+
[2026-04-25 19:46:16] Epoch 1 | Step 3600 | Loss: 1.1940 | LR: 5.00e-05
|
| 537 |
+
[2026-04-25 19:46:18] Epoch 1 | Step 3610 | Loss: 1.1939 | LR: 5.00e-05
|
| 538 |
+
[2026-04-25 19:46:21] Epoch 1 | Step 3620 | Loss: 1.1938 | LR: 5.00e-05
|
| 539 |
+
[2026-04-25 19:46:23] Epoch 1 | Step 3630 | Loss: 1.1942 | LR: 5.00e-05
|
| 540 |
+
[2026-04-25 19:46:26] Epoch 1 | Step 3640 | Loss: 1.1945 | LR: 5.00e-05
|
| 541 |
+
[2026-04-25 19:46:28] Epoch 1 | Step 3650 | Loss: 1.1946 | LR: 5.00e-05
|
| 542 |
+
[2026-04-25 19:46:31] Epoch 1 | Step 3660 | Loss: 1.1944 | LR: 5.00e-05
|
| 543 |
+
[2026-04-25 19:46:33] Epoch 1 | Step 3670 | Loss: 1.1942 | LR: 5.00e-05
|
| 544 |
+
[2026-04-25 19:46:36] Epoch 1 | Step 3680 | Loss: 1.1943 | LR: 5.00e-05
|
| 545 |
+
[2026-04-25 19:46:38] Epoch 1 | Step 3690 | Loss: 1.1941 | LR: 5.00e-05
|
| 546 |
+
[2026-04-25 19:46:41] Epoch 1 | Step 3700 | Loss: 1.1939 | LR: 5.00e-05
|
| 547 |
+
[2026-04-25 19:46:43] Epoch 1 | Step 3710 | Loss: 1.1939 | LR: 5.00e-05
|
| 548 |
+
[2026-04-25 19:46:46] Epoch 1 | Step 3720 | Loss: 1.1939 | LR: 5.00e-05
|
| 549 |
+
[2026-04-25 19:46:48] Epoch 1 | Step 3730 | Loss: 1.1941 | LR: 5.00e-05
|
| 550 |
+
[2026-04-25 19:46:51] Epoch 1 | Step 3740 | Loss: 1.1942 | LR: 5.00e-05
|
| 551 |
+
[2026-04-25 19:46:53] Epoch 1 | Step 3750 | Loss: 1.1940 | LR: 5.00e-05
|
| 552 |
+
[2026-04-25 19:46:56] Epoch 1 | Step 3760 | Loss: 1.1942 | LR: 5.00e-05
|
| 553 |
+
[2026-04-25 19:46:59] Epoch 1 | Step 3770 | Loss: 1.1944 | LR: 5.00e-05
|
| 554 |
+
[2026-04-25 19:47:01] Epoch 1 | Step 3780 | Loss: 1.1945 | LR: 5.00e-05
|
| 555 |
+
[2026-04-25 19:47:04] Epoch 1 | Step 3790 | Loss: 1.1946 | LR: 5.00e-05
|
| 556 |
+
[2026-04-25 19:47:06] Epoch 1 | Step 3800 | Loss: 1.1948 | LR: 5.00e-05
|
| 557 |
+
[2026-04-25 19:47:09] Epoch 1 | Step 3810 | Loss: 1.1942 | LR: 5.00e-05
|
| 558 |
+
[2026-04-25 19:47:11] Epoch 1 | Step 3820 | Loss: 1.1940 | LR: 5.00e-05
|
| 559 |
+
[2026-04-25 19:47:14] Epoch 1 | Step 3830 | Loss: 1.1939 | LR: 5.00e-05
|
| 560 |
+
[2026-04-25 19:47:16] Epoch 1 | Step 3840 | Loss: 1.1941 | LR: 5.00e-05
|
| 561 |
+
[2026-04-25 19:47:19] Epoch 1 | Step 3850 | Loss: 1.1938 | LR: 5.00e-05
|
| 562 |
+
[2026-04-25 19:47:21] Epoch 1 | Step 3860 | Loss: 1.1937 | LR: 5.00e-05
|
| 563 |
+
[2026-04-25 19:47:24] Epoch 1 | Step 3870 | Loss: 1.1936 | LR: 5.00e-05
|
| 564 |
+
[2026-04-25 19:47:27] Epoch 1 | Step 3880 | Loss: 1.1932 | LR: 5.00e-05
|
| 565 |
+
[2026-04-25 19:47:29] Epoch 1 | Step 3890 | Loss: 1.1931 | LR: 5.00e-05
|
| 566 |
+
[2026-04-25 19:47:32] Epoch 1 | Step 3900 | Loss: 1.1932 | LR: 5.00e-05
|
| 567 |
+
[2026-04-25 19:47:34] Epoch 1 | Step 3910 | Loss: 1.1934 | LR: 5.00e-05
|
| 568 |
+
[2026-04-25 19:47:37] Epoch 1 | Step 3920 | Loss: 1.1936 | LR: 5.00e-05
|
| 569 |
+
[2026-04-25 19:47:39] Epoch 1 | Step 3930 | Loss: 1.1935 | LR: 5.00e-05
|
| 570 |
+
[2026-04-25 19:47:42] Epoch 1 | Step 3940 | Loss: 1.1935 | LR: 5.00e-05
|
| 571 |
+
[2026-04-25 19:47:45] Epoch 1 | Step 3950 | Loss: 1.1933 | LR: 5.00e-05
|
| 572 |
+
[2026-04-25 19:47:47] Epoch 1 | Step 3960 | Loss: 1.1932 | LR: 5.00e-05
|
| 573 |
+
[2026-04-25 19:47:50] Epoch 1 | Step 3970 | Loss: 1.1931 | LR: 5.00e-05
|
| 574 |
+
[2026-04-25 19:47:53] Epoch 1 | Step 3980 | Loss: 1.1932 | LR: 4.99e-05
|
| 575 |
+
[2026-04-25 19:47:55] Epoch 1 | Step 3990 | Loss: 1.1930 | LR: 4.99e-05
|
| 576 |
+
[2026-04-25 19:47:57] Epoch 1 | Step 4000 | Loss: 1.1930 | LR: 4.98e-05
|
| 577 |
+
[2026-04-25 19:47:58] Validation | Batch 10/84 | Loss: 1.1351
|
| 578 |
+
[2026-04-25 19:47:58] Validation | Batch 20/84 | Loss: 1.1323
|
| 579 |
+
[2026-04-25 19:47:59] Validation | Batch 30/84 | Loss: 1.2164
|
| 580 |
+
[2026-04-25 19:47:59] Validation | Batch 40/84 | Loss: 1.2215
|
| 581 |
+
[2026-04-25 19:48:00] Validation | Batch 50/84 | Loss: 1.2167
|
| 582 |
+
[2026-04-25 19:48:00] Validation | Batch 60/84 | Loss: 1.1898
|
| 583 |
+
[2026-04-25 19:48:01] Validation | Batch 70/84 | Loss: 1.1702
|
| 584 |
+
[2026-04-25 19:48:01] Validation | Batch 80/84 | Loss: 1.1787
|
| 585 |
+
[2026-04-25 19:48:01] Validation | Batch 84/84 | Loss: 1.1692
|
| 586 |
+
[2026-04-25 19:48:02] Validation | Loss: 1.1692 | PPL: 3.30 | Time: 3.78s
|
| 587 |
+
[2026-04-25 19:48:04] New best model saved! Val loss: 1.1692
|
| 588 |
+
[2026-04-25 19:48:06] Epoch 1 | Step 4010 | Loss: 1.1929 | LR: 4.97e-05
|
| 589 |
+
[2026-04-25 19:48:09] Epoch 1 | Step 4020 | Loss: 1.1930 | LR: 4.95e-05
|
| 590 |
+
[2026-04-25 19:48:11] Epoch 1 | Step 4030 | Loss: 1.1927 | LR: 4.94e-05
|
| 591 |
+
[2026-04-25 19:48:14] Epoch 1 | Step 4040 | Loss: 1.1924 | LR: 4.92e-05
|
| 592 |
+
[2026-04-25 19:48:16] Epoch 1 | Step 4050 | Loss: 1.1924 | LR: 4.90e-05
|
| 593 |
+
[2026-04-25 19:48:18] Epoch 1 | Step 4060 | Loss: 1.1919 | LR: 4.88e-05
|
| 594 |
+
[2026-04-25 19:48:21] Epoch 1 | Step 4070 | Loss: 1.1920 | LR: 4.85e-05
|
| 595 |
+
[2026-04-25 19:48:23] Epoch 1 | Step 4080 | Loss: 1.1921 | LR: 4.82e-05
|
| 596 |
+
[2026-04-25 19:48:26] Epoch 1 | Step 4090 | Loss: 1.1921 | LR: 4.80e-05
|
| 597 |
+
[2026-04-25 19:48:28] Epoch 1 | Step 4100 | Loss: 1.1923 | LR: 4.77e-05
|
| 598 |
+
[2026-04-25 19:48:31] Epoch 1 | Step 4110 | Loss: 1.1922 | LR: 4.73e-05
|
| 599 |
+
[2026-04-25 19:48:34] Epoch 1 | Step 4120 | Loss: 1.1924 | LR: 4.70e-05
|
| 600 |
+
[2026-04-25 19:48:36] Epoch 1 | Step 4130 | Loss: 1.1922 | LR: 4.66e-05
|
| 601 |
+
[2026-04-25 19:48:39] Epoch 1 | Step 4140 | Loss: 1.1923 | LR: 4.62e-05
|
| 602 |
+
[2026-04-25 19:48:42] Epoch 1 | Step 4150 | Loss: 1.1929 | LR: 4.58e-05
|
| 603 |
+
[2026-04-25 19:48:44] Epoch 1 | Step 4160 | Loss: 1.1932 | LR: 4.54e-05
|
| 604 |
+
[2026-04-25 19:48:47] Epoch 1 | Step 4170 | Loss: 1.1931 | LR: 4.49e-05
|
| 605 |
+
[2026-04-25 19:48:50] Epoch 1 | Step 4180 | Loss: 1.1931 | LR: 4.45e-05
|
| 606 |
+
[2026-04-25 19:48:52] Epoch 1 | Step 4190 | Loss: 1.1930 | LR: 4.40e-05
|
| 607 |
+
[2026-04-25 19:48:55] Epoch 1 | Step 4200 | Loss: 1.1934 | LR: 4.35e-05
|
| 608 |
+
[2026-04-25 19:48:57] Epoch 1 | Step 4210 | Loss: 1.1932 | LR: 4.30e-05
|
| 609 |
+
[2026-04-25 19:49:00] Epoch 1 | Step 4220 | Loss: 1.1937 | LR: 4.25e-05
|
| 610 |
+
[2026-04-25 19:49:03] Epoch 1 | Step 4230 | Loss: 1.1937 | LR: 4.19e-05
|
| 611 |
+
[2026-04-25 19:49:06] Epoch 1 | Step 4240 | Loss: 1.1939 | LR: 4.14e-05
|
| 612 |
+
[2026-04-25 19:49:08] Epoch 1 | Step 4250 | Loss: 1.1939 | LR: 4.08e-05
|
| 613 |
+
[2026-04-25 19:49:11] Epoch 1 | Step 4260 | Loss: 1.1935 | LR: 4.02e-05
|
| 614 |
+
[2026-04-25 19:49:13] Epoch 1 | Step 4270 | Loss: 1.1937 | LR: 3.96e-05
|
| 615 |
+
[2026-04-25 19:49:16] Epoch 1 | Step 4280 | Loss: 1.1936 | LR: 3.90e-05
|
| 616 |
+
[2026-04-25 19:49:18] Epoch 1 | Step 4290 | Loss: 1.1933 | LR: 3.84e-05
|
| 617 |
+
[2026-04-25 19:49:21] Epoch 1 | Step 4300 | Loss: 1.1932 | LR: 3.78e-05
|
| 618 |
+
[2026-04-25 19:49:23] Epoch 1 | Step 4310 | Loss: 1.1935 | LR: 3.71e-05
|
| 619 |
+
[2026-04-25 19:49:26] Epoch 1 | Step 4320 | Loss: 1.1936 | LR: 3.65e-05
|
| 620 |
+
[2026-04-25 19:49:28] Epoch 1 | Step 4330 | Loss: 1.1934 | LR: 3.58e-05
|
| 621 |
+
[2026-04-25 19:49:31] Epoch 1 | Step 4340 | Loss: 1.1932 | LR: 3.52e-05
|
| 622 |
+
[2026-04-25 19:49:33] Epoch 1 | Step 4350 | Loss: 1.1929 | LR: 3.45e-05
|
| 623 |
+
[2026-04-25 19:49:36] Epoch 1 | Step 4360 | Loss: 1.1928 | LR: 3.38e-05
|
| 624 |
+
[2026-04-25 19:49:38] Epoch 1 | Step 4370 | Loss: 1.1928 | LR: 3.31e-05
|
| 625 |
+
[2026-04-25 19:49:41] Epoch 1 | Step 4380 | Loss: 1.1927 | LR: 3.24e-05
|
| 626 |
+
[2026-04-25 19:49:43] Epoch 1 | Step 4390 | Loss: 1.1927 | LR: 3.17e-05
|
| 627 |
+
[2026-04-25 19:49:46] Epoch 1 | Step 4400 | Loss: 1.1924 | LR: 3.10e-05
|
| 628 |
+
[2026-04-25 19:49:48] Epoch 1 | Step 4410 | Loss: 1.1919 | LR: 3.03e-05
|
| 629 |
+
[2026-04-25 19:49:51] Epoch 1 | Step 4420 | Loss: 1.1922 | LR: 2.96e-05
|
| 630 |
+
[2026-04-25 19:49:53] Epoch 1 | Step 4430 | Loss: 1.1920 | LR: 2.89e-05
|
| 631 |
+
[2026-04-25 19:49:56] Epoch 1 | Step 4440 | Loss: 1.1923 | LR: 2.82e-05
|
| 632 |
+
[2026-04-25 19:49:59] Epoch 1 | Step 4450 | Loss: 1.1921 | LR: 2.74e-05
|
| 633 |
+
[2026-04-25 19:50:01] Epoch 1 | Step 4460 | Loss: 1.1924 | LR: 2.67e-05
|
| 634 |
+
[2026-04-25 19:50:04] Epoch 1 | Step 4470 | Loss: 1.1921 | LR: 2.60e-05
|
| 635 |
+
[2026-04-25 19:50:06] Epoch 1 | Step 4480 | Loss: 1.1918 | LR: 2.53e-05
|
| 636 |
+
[2026-04-25 19:50:09] Epoch 1 | Step 4490 | Loss: 1.1916 | LR: 2.46e-05
|
| 637 |
+
[2026-04-25 19:50:12] Epoch 1 | Step 4500 | Loss: 1.1916 | LR: 2.39e-05
|
| 638 |
+
[2026-04-25 19:50:14] Epoch 1 | Step 4510 | Loss: 1.1911 | LR: 2.32e-05
|
| 639 |
+
[2026-04-25 19:50:16] Epoch 1 | Step 4520 | Loss: 1.1908 | LR: 2.25e-05
|
| 640 |
+
[2026-04-25 19:50:20] Epoch 1 | Step 4530 | Loss: 1.1904 | LR: 2.18e-05
|
| 641 |
+
[2026-04-25 19:50:22] Epoch 1 | Step 4540 | Loss: 1.1902 | LR: 2.11e-05
|
| 642 |
+
[2026-04-25 19:50:25] Epoch 1 | Step 4550 | Loss: 1.1898 | LR: 2.04e-05
|
| 643 |
+
[2026-04-25 19:50:28] Epoch 1 | Step 4560 | Loss: 1.1897 | LR: 1.97e-05
|
| 644 |
+
[2026-04-25 19:50:30] Epoch 1 | Step 4570 | Loss: 1.1896 | LR: 1.91e-05
|
| 645 |
+
[2026-04-25 19:50:32] Epoch 1 | Step 4580 | Loss: 1.1894 | LR: 1.84e-05
|
| 646 |
+
[2026-04-25 19:50:35] Epoch 1 | Step 4590 | Loss: 1.1892 | LR: 1.78e-05
|
| 647 |
+
[2026-04-25 19:50:37] Epoch 1 | Step 4600 | Loss: 1.1889 | LR: 1.71e-05
|
| 648 |
+
[2026-04-25 19:50:40] Epoch 1 | Step 4610 | Loss: 1.1886 | LR: 1.65e-05
|
| 649 |
+
[2026-04-25 19:50:42] Epoch 1 | Step 4620 | Loss: 1.1886 | LR: 1.59e-05
|
| 650 |
+
[2026-04-25 19:50:45] Epoch 1 | Step 4630 | Loss: 1.1884 | LR: 1.53e-05
|
| 651 |
+
[2026-04-25 19:50:47] Epoch 1 | Step 4640 | Loss: 1.1883 | LR: 1.47e-05
|
| 652 |
+
[2026-04-25 19:50:50] Epoch 1 | Step 4650 | Loss: 1.1882 | LR: 1.41e-05
|
| 653 |
+
[2026-04-25 19:50:52] Epoch 1 | Step 4660 | Loss: 1.1880 | LR: 1.35e-05
|
| 654 |
+
[2026-04-25 19:50:55] Epoch 1 | Step 4670 | Loss: 1.1877 | LR: 1.30e-05
|
| 655 |
+
[2026-04-25 19:50:58] Epoch 1 | Step 4680 | Loss: 1.1877 | LR: 1.24e-05
|
| 656 |
+
[2026-04-25 19:51:00] Epoch 1 | Step 4690 | Loss: 1.1874 | LR: 1.19e-05
|
| 657 |
+
[2026-04-25 19:51:03] Epoch 1 | Step 4700 | Loss: 1.1875 | LR: 1.14e-05
|
| 658 |
+
[2026-04-25 19:51:05] Epoch 1 | Step 4710 | Loss: 1.1872 | LR: 1.09e-05
|
| 659 |
+
[2026-04-25 19:51:08] Epoch 1 | Step 4720 | Loss: 1.1871 | LR: 1.04e-05
|
| 660 |
+
[2026-04-25 19:51:10] Epoch 1 | Step 4730 | Loss: 1.1870 | LR: 9.98e-06
|
| 661 |
+
[2026-04-25 19:51:12] Epoch 1 | Step 4740 | Loss: 1.1867 | LR: 9.54e-06
|
| 662 |
+
[2026-04-25 19:51:15] Epoch 1 | Step 4750 | Loss: 1.1866 | LR: 9.12e-06
|
| 663 |
+
[2026-04-25 19:51:17] Epoch 1 | Step 4760 | Loss: 1.1863 | LR: 8.72e-06
|
| 664 |
+
[2026-04-25 19:51:20] Epoch 1 | Step 4770 | Loss: 1.1858 | LR: 8.33e-06
|
| 665 |
+
[2026-04-25 19:51:22] Epoch 1 | Step 4780 | Loss: 1.1859 | LR: 7.97e-06
|
| 666 |
+
[2026-04-25 19:51:25] Epoch 1 | Step 4790 | Loss: 1.1858 | LR: 7.62e-06
|
| 667 |
+
[2026-04-25 19:51:28] Epoch 1 | Step 4800 | Loss: 1.1854 | LR: 7.30e-06
|
| 668 |
+
[2026-04-25 19:51:30] Epoch 1 | Step 4810 | Loss: 1.1849 | LR: 6.99e-06
|
| 669 |
+
[2026-04-25 19:51:33] Epoch 1 | Step 4820 | Loss: 1.1846 | LR: 6.71e-06
|
| 670 |
+
[2026-04-25 19:51:35] Epoch 1 | Step 4830 | Loss: 1.1842 | LR: 6.45e-06
|
| 671 |
+
[2026-04-25 19:51:38] Epoch 1 | Step 4840 | Loss: 1.1840 | LR: 6.21e-06
|
| 672 |
+
[2026-04-25 19:51:41] Epoch 1 | Step 4850 | Loss: 1.1842 | LR: 5.99e-06
|
| 673 |
+
[2026-04-25 19:51:43] Epoch 1 | Step 4860 | Loss: 1.1842 | LR: 5.79e-06
|
| 674 |
+
[2026-04-25 19:51:46] Epoch 1 | Step 4870 | Loss: 1.1843 | LR: 5.61e-06
|
| 675 |
+
[2026-04-25 19:51:48] Epoch 1 | Step 4880 | Loss: 1.1840 | LR: 5.46e-06
|
| 676 |
+
[2026-04-25 19:51:51] Epoch 1 | Step 4890 | Loss: 1.1837 | LR: 5.32e-06
|
| 677 |
+
[2026-04-25 19:51:54] Epoch 1 | Step 4900 | Loss: 1.1836 | LR: 5.21e-06
|
| 678 |
+
[2026-04-25 19:51:56] Epoch 1 | Step 4910 | Loss: 1.1834 | LR: 5.13e-06
|
| 679 |
+
[2026-04-25 19:51:59] Epoch 1 | Step 4920 | Loss: 1.1832 | LR: 5.06e-06
|
| 680 |
+
[2026-04-25 19:52:01] Epoch 1 | Step 4930 | Loss: 1.1830 | LR: 5.02e-06
|
| 681 |
+
[2026-04-25 19:52:04] Epoch 1 | Step 4940 | Loss: 1.1829 | LR: 5.00e-06
|
| 682 |
+
[2026-04-25 19:52:06] Epoch 1 | Step 4950 | Loss: 1.1828 | LR: 5.00e-06
|
| 683 |
+
[2026-04-25 19:52:09] Epoch 1 | Step 4960 | Loss: 1.1828 | LR: 5.00e-06
|
| 684 |
+
[2026-04-25 19:52:11] Epoch 1 | Step 4970 | Loss: 1.1825 | LR: 5.00e-06
|
| 685 |
+
[2026-04-25 19:52:14] Epoch 1 | Step 4980 | Loss: 1.1824 | LR: 5.00e-06
|
| 686 |
+
[2026-04-25 19:52:16] Epoch 1 | Step 4990 | Loss: 1.1820 | LR: 5.00e-06
|
| 687 |
+
[2026-04-25 19:52:19] Epoch 1 | Step 5000 | Loss: 1.1821 | LR: 5.00e-06
|
| 688 |
+
[2026-04-25 19:52:21] Epoch 1 | Step 5010 | Loss: 1.1819 | LR: 5.00e-06
|
| 689 |
+
[2026-04-25 19:52:24] Epoch 1 | Step 5020 | Loss: 1.1816 | LR: 5.00e-06
|
| 690 |
+
[2026-04-25 19:52:26] Epoch 1 | Step 5030 | Loss: 1.1814 | LR: 5.00e-06
|
| 691 |
+
[2026-04-25 19:52:28] Epoch 1 | Step 5040 | Loss: 1.1811 | LR: 5.00e-06
|
| 692 |
+
[2026-04-25 19:52:31] Epoch 1 | Step 5050 | Loss: 1.1809 | LR: 5.00e-06
|
| 693 |
+
[2026-04-25 19:52:33] Epoch 1 | Step 5060 | Loss: 1.1807 | LR: 5.00e-06
|
| 694 |
+
[2026-04-25 19:52:36] Epoch 1 | Step 5070 | Loss: 1.1805 | LR: 5.00e-06
|
| 695 |
+
[2026-04-25 19:52:38] Epoch 1 | Step 5080 | Loss: 1.1805 | LR: 5.00e-06
|
| 696 |
+
[2026-04-25 19:52:41] Epoch 1 | Step 5090 | Loss: 1.1804 | LR: 5.00e-06
|
| 697 |
+
[2026-04-25 19:52:44] Epoch 1 | Step 5100 | Loss: 1.1800 | LR: 5.00e-06
|
| 698 |
+
[2026-04-25 19:52:46] Epoch 1 | Step 5110 | Loss: 1.1798 | LR: 5.00e-06
|
| 699 |
+
[2026-04-25 19:52:49] Epoch 1 | Step 5120 | Loss: 1.1799 | LR: 5.00e-06
|
| 700 |
+
[2026-04-25 19:52:52] Epoch 1 | Step 5130 | Loss: 1.1796 | LR: 5.00e-06
|
| 701 |
+
[2026-04-25 19:52:54] Epoch 1 | Step 5140 | Loss: 1.1794 | LR: 5.00e-06
|
| 702 |
+
[2026-04-25 19:52:57] Epoch 1 | Step 5150 | Loss: 1.1791 | LR: 5.00e-06
|
| 703 |
+
[2026-04-25 19:52:59] Epoch 1 | Step 5160 | Loss: 1.1786 | LR: 5.00e-06
|
| 704 |
+
[2026-04-25 19:53:02] Epoch 1 | Step 5170 | Loss: 1.1785 | LR: 5.00e-06
|
| 705 |
+
[2026-04-25 19:53:04] Epoch 1 | Step 5180 | Loss: 1.1783 | LR: 5.00e-06
|
| 706 |
+
[2026-04-25 19:53:07] Epoch 1 | Step 5190 | Loss: 1.1783 | LR: 5.00e-06
|
| 707 |
+
[2026-04-25 19:53:10] Epoch 1 | Step 5200 | Loss: 1.1782 | LR: 5.00e-06
|
| 708 |
+
[2026-04-25 19:53:12] Epoch 1 | Step 5210 | Loss: 1.1780 | LR: 5.00e-06
|
| 709 |
+
[2026-04-25 19:53:15] Epoch 1 | Step 5220 | Loss: 1.1779 | LR: 5.00e-06
|
| 710 |
+
[2026-04-25 19:53:18] Epoch 1 | Step 5230 | Loss: 1.1778 | LR: 5.00e-06
|
| 711 |
+
[2026-04-25 19:53:20] Epoch 1 | Step 5240 | Loss: 1.1777 | LR: 5.00e-06
|
| 712 |
+
[2026-04-25 19:53:23] Epoch 1 | Step 5250 | Loss: 1.1777 | LR: 5.00e-06
|
| 713 |
+
[2026-04-25 19:53:25] Epoch 1 | Step 5260 | Loss: 1.1775 | LR: 5.00e-06
|
| 714 |
+
[2026-04-25 19:53:28] Epoch 1 | Step 5270 | Loss: 1.1773 | LR: 5.00e-06
|
| 715 |
+
[2026-04-25 19:53:31] Epoch 1 | Step 5280 | Loss: 1.1770 | LR: 5.00e-06
|
| 716 |
+
[2026-04-25 19:53:33] Epoch 1 | Step 5290 | Loss: 1.1766 | LR: 5.00e-06
|
| 717 |
+
[2026-04-25 19:53:36] Epoch 1 | Step 5300 | Loss: 1.1764 | LR: 5.00e-06
|
| 718 |
+
[2026-04-25 19:53:38] Epoch 1 | Step 5310 | Loss: 1.1764 | LR: 5.00e-06
|
| 719 |
+
[2026-04-25 19:53:41] Epoch 1 | Step 5320 | Loss: 1.1761 | LR: 5.00e-06
|
| 720 |
+
[2026-04-25 19:53:43] Epoch 1 | Step 5330 | Loss: 1.1760 | LR: 5.00e-06
|
| 721 |
+
[2026-04-25 19:53:46] Epoch 1 | Step 5340 | Loss: 1.1758 | LR: 5.00e-06
|
| 722 |
+
[2026-04-25 19:53:48] Epoch 1 | Step 5350 | Loss: 1.1756 | LR: 5.00e-06
|
| 723 |
+
[2026-04-25 19:53:51] Epoch 1 | Step 5360 | Loss: 1.1756 | LR: 5.00e-06
|
| 724 |
+
[2026-04-25 19:53:53] Epoch 1 | Step 5370 | Loss: 1.1754 | LR: 5.00e-06
|
| 725 |
+
[2026-04-25 19:53:55] Epoch 1 | Step 5380 | Loss: 1.1752 | LR: 5.00e-06
|
| 726 |
+
[2026-04-25 19:53:58] Epoch 1 | Step 5390 | Loss: 1.1749 | LR: 5.00e-06
|
| 727 |
+
[2026-04-25 19:54:01] Epoch 1 | Step 5400 | Loss: 1.1746 | LR: 5.00e-06
|
| 728 |
+
[2026-04-25 19:54:03] Epoch 1 | Step 5410 | Loss: 1.1745 | LR: 5.00e-06
|
| 729 |
+
[2026-04-25 19:54:06] Epoch 1 | Step 5420 | Loss: 1.1742 | LR: 5.00e-06
|
| 730 |
+
[2026-04-25 19:54:08] Epoch 1 | Step 5430 | Loss: 1.1741 | LR: 5.00e-06
|
| 731 |
+
[2026-04-25 19:54:11] Epoch 1 | Step 5440 | Loss: 1.1741 | LR: 5.00e-06
|
| 732 |
+
[2026-04-25 19:54:13] Epoch 1 | Step 5450 | Loss: 1.1742 | LR: 5.00e-06
|
| 733 |
+
[2026-04-25 19:54:16] Epoch 1 | Step 5460 | Loss: 1.1739 | LR: 5.00e-06
|
| 734 |
+
[2026-04-25 19:54:18] Epoch 1 | Step 5470 | Loss: 1.1736 | LR: 5.00e-06
|
| 735 |
+
[2026-04-25 19:54:21] Epoch 1 | Step 5480 | Loss: 1.1736 | LR: 5.00e-06
|
| 736 |
+
[2026-04-25 19:54:23] Epoch 1 | Step 5490 | Loss: 1.1735 | LR: 5.00e-06
|
| 737 |
+
[2026-04-25 19:54:26] Epoch 1 | Step 5500 | Loss: 1.1734 | LR: 5.00e-06
|
| 738 |
+
[2026-04-25 19:54:28] Epoch 1 | Step 5510 | Loss: 1.1735 | LR: 5.00e-06
|
| 739 |
+
[2026-04-25 19:54:31] Epoch 1 | Step 5520 | Loss: 1.1733 | LR: 5.00e-06
|
| 740 |
+
[2026-04-25 19:54:33] Epoch 1 | Step 5530 | Loss: 1.1732 | LR: 5.00e-06
|
| 741 |
+
[2026-04-25 19:54:36] Epoch 1 | Step 5540 | Loss: 1.1727 | LR: 5.00e-06
|
| 742 |
+
[2026-04-25 19:54:38] Epoch 1 | Step 5550 | Loss: 1.1726 | LR: 5.00e-06
|
| 743 |
+
[2026-04-25 19:54:41] Epoch 1 | Step 5560 | Loss: 1.1723 | LR: 5.00e-06
|
| 744 |
+
[2026-04-25 19:54:44] Epoch 1 | Step 5570 | Loss: 1.1724 | LR: 5.00e-06
|
| 745 |
+
[2026-04-25 19:54:46] Epoch 1 | Step 5580 | Loss: 1.1721 | LR: 5.00e-06
|
| 746 |
+
[2026-04-25 19:54:49] Epoch 1 | Step 5590 | Loss: 1.1718 | LR: 5.00e-06
|
| 747 |
+
[2026-04-25 19:54:51] Epoch 1 | Step 5600 | Loss: 1.1719 | LR: 5.00e-06
|
| 748 |
+
[2026-04-25 19:54:54] Epoch 1 | Step 5610 | Loss: 1.1719 | LR: 5.00e-06
|
| 749 |
+
[2026-04-25 19:54:56] Epoch 1 | Step 5620 | Loss: 1.1716 | LR: 5.00e-06
|
| 750 |
+
[2026-04-25 19:54:59] Epoch 1 | Step 5630 | Loss: 1.1715 | LR: 5.00e-06
|
| 751 |
+
[2026-04-25 19:55:02] Epoch 1 | Step 5640 | Loss: 1.1714 | LR: 5.00e-06
|
| 752 |
+
[2026-04-25 19:55:04] Epoch 1 | Step 5650 | Loss: 1.1713 | LR: 5.00e-06
|
| 753 |
+
[2026-04-25 19:55:07] Epoch 1 | Step 5660 | Loss: 1.1709 | LR: 5.00e-06
|
| 754 |
+
[2026-04-25 19:55:09] Epoch 1 | Step 5670 | Loss: 1.1708 | LR: 5.00e-06
|
| 755 |
+
[2026-04-25 19:55:12] Epoch 1 | Step 5680 | Loss: 1.1704 | LR: 5.00e-06
|
| 756 |
+
[2026-04-25 19:55:14] Epoch 1 | Step 5690 | Loss: 1.1704 | LR: 5.00e-06
|
| 757 |
+
[2026-04-25 19:55:16] Epoch 1 | Step 5700 | Loss: 1.1702 | LR: 5.00e-06
|
| 758 |
+
[2026-04-25 19:55:19] Epoch 1 | Step 5710 | Loss: 1.1702 | LR: 5.00e-06
|
| 759 |
+
[2026-04-25 19:55:22] Epoch 1 | Step 5720 | Loss: 1.1701 | LR: 5.00e-06
|
| 760 |
+
[2026-04-25 19:55:24] Epoch 1 | Step 5730 | Loss: 1.1700 | LR: 5.00e-06
|
| 761 |
+
[2026-04-25 19:55:27] Epoch 1 | Step 5740 | Loss: 1.1700 | LR: 5.00e-06
|
| 762 |
+
[2026-04-25 19:55:30] Epoch 1 | Step 5750 | Loss: 1.1698 | LR: 5.00e-06
|
| 763 |
+
[2026-04-25 19:55:33] Epoch 1 | Step 5760 | Loss: 1.1697 | LR: 5.00e-06
|
| 764 |
+
[2026-04-25 19:55:35] Epoch 1 | Step 5770 | Loss: 1.1697 | LR: 5.00e-06
|
| 765 |
+
[2026-04-25 19:55:37] Epoch 1 | Step 5780 | Loss: 1.1694 | LR: 5.00e-06
|
| 766 |
+
[2026-04-25 19:55:40] Epoch 1 | Step 5790 | Loss: 1.1695 | LR: 5.00e-06
|
| 767 |
+
[2026-04-25 19:55:42] Epoch 1 | Step 5800 | Loss: 1.1697 | LR: 5.00e-06
|
| 768 |
+
[2026-04-25 19:55:45] Epoch 1 | Step 5810 | Loss: 1.1696 | LR: 5.00e-06
|
| 769 |
+
[2026-04-25 19:55:48] Epoch 1 | Step 5820 | Loss: 1.1693 | LR: 5.00e-06
|
| 770 |
+
[2026-04-25 19:55:50] Epoch 1 | Step 5830 | Loss: 1.1690 | LR: 5.00e-06
|
| 771 |
+
[2026-04-25 19:55:52] Epoch 1 | Step 5840 | Loss: 1.1691 | LR: 5.00e-06
|
| 772 |
+
[2026-04-25 19:55:55] Epoch 1 | Step 5850 | Loss: 1.1690 | LR: 5.00e-06
|
| 773 |
+
[2026-04-25 19:55:57] Epoch 1 | Step 5860 | Loss: 1.1688 | LR: 5.00e-06
|
| 774 |
+
[2026-04-25 19:56:00] Epoch 1 | Step 5870 | Loss: 1.1688 | LR: 5.00e-06
|
| 775 |
+
[2026-04-25 19:56:03] Epoch 1 | Step 5880 | Loss: 1.1688 | LR: 5.00e-06
|
| 776 |
+
[2026-04-25 19:56:05] Epoch 1 | Step 5890 | Loss: 1.1687 | LR: 5.00e-06
|
| 777 |
+
[2026-04-25 19:56:08] Epoch 1 | Step 5900 | Loss: 1.1685 | LR: 5.00e-06
|
| 778 |
+
[2026-04-25 19:56:11] Epoch 1 | Step 5910 | Loss: 1.1684 | LR: 5.00e-06
|
| 779 |
+
[2026-04-25 19:56:13] Epoch 1 | Step 5920 | Loss: 1.1681 | LR: 5.00e-06
|
| 780 |
+
[2026-04-25 19:56:16] Epoch 1 | Step 5930 | Loss: 1.1681 | LR: 5.00e-06
|
| 781 |
+
[2026-04-25 19:56:19] Epoch 1 | Step 5940 | Loss: 1.1679 | LR: 5.00e-06
|
| 782 |
+
[2026-04-25 19:56:21] Epoch 1 | Step 5950 | Loss: 1.1680 | LR: 5.00e-06
|
| 783 |
+
[2026-04-25 19:56:24] Epoch 1 | Step 5960 | Loss: 1.1679 | LR: 5.00e-06
|
| 784 |
+
[2026-04-25 19:56:26] Epoch 1 | Step 5970 | Loss: 1.1679 | LR: 5.00e-06
|
| 785 |
+
[2026-04-25 19:56:29] Epoch 1 | Step 5980 | Loss: 1.1677 | LR: 5.00e-06
|
| 786 |
+
[2026-04-25 19:56:32] Epoch 1 | Step 5990 | Loss: 1.1678 | LR: 5.00e-06
|
| 787 |
+
[2026-04-25 19:56:34] Epoch 1 | Step 6000 | Loss: 1.1676 | LR: 5.00e-06
|
| 788 |
+
[2026-04-25 19:56:35] Validation | Batch 10/84 | Loss: 1.0538
|
| 789 |
+
[2026-04-25 19:56:35] Validation | Batch 20/84 | Loss: 1.0550
|
| 790 |
+
[2026-04-25 19:56:35] Validation | Batch 30/84 | Loss: 1.1355
|
| 791 |
+
[2026-04-25 19:56:36] Validation | Batch 40/84 | Loss: 1.1377
|
| 792 |
+
[2026-04-25 19:56:36] Validation | Batch 50/84 | Loss: 1.1304
|
| 793 |
+
[2026-04-25 19:56:37] Validation | Batch 60/84 | Loss: 1.1020
|
| 794 |
+
[2026-04-25 19:56:37] Validation | Batch 70/84 | Loss: 1.0857
|
| 795 |
+
[2026-04-25 19:56:38] Validation | Batch 80/84 | Loss: 1.0935
|
| 796 |
+
[2026-04-25 19:56:38] Validation | Batch 84/84 | Loss: 1.0843
|
| 797 |
+
[2026-04-25 19:56:38] Validation | Loss: 1.0843 | PPL: 3.02 | Time: 3.76s
|
| 798 |
+
[2026-04-25 19:56:41] New best model saved! Val loss: 1.0843
|
| 799 |
+
[2026-04-25 19:56:43] Epoch 1 | Step 6010 | Loss: 1.1676 | LR: 5.00e-06
|
| 800 |
+
[2026-04-25 19:56:46] Epoch 1 | Step 6020 | Loss: 1.1674 | LR: 5.00e-06
|
| 801 |
+
[2026-04-25 19:56:48] Epoch 1 | Step 6030 | Loss: 1.1675 | LR: 5.00e-06
|
| 802 |
+
[2026-04-25 19:56:51] Epoch 1 | Step 6040 | Loss: 1.1674 | LR: 5.00e-06
|
| 803 |
+
[2026-04-25 19:56:54] Epoch 1 | Step 6050 | Loss: 1.1674 | LR: 5.00e-06
|
| 804 |
+
[2026-04-25 19:56:56] Epoch 1 | Step 6060 | Loss: 1.1673 | LR: 5.00e-06
|
| 805 |
+
[2026-04-25 19:56:59] Epoch 1 | Step 6070 | Loss: 1.1670 | LR: 5.00e-06
|
| 806 |
+
[2026-04-25 19:57:01] Epoch 1 | Step 6080 | Loss: 1.1670 | LR: 5.00e-06
|
| 807 |
+
[2026-04-25 19:57:04] Epoch 1 | Step 6090 | Loss: 1.1671 | LR: 5.00e-06
|
| 808 |
+
[2026-04-25 19:57:07] Epoch 1 | Step 6100 | Loss: 1.1672 | LR: 5.00e-06
|
| 809 |
+
[2026-04-25 19:57:09] Epoch 1 | Step 6110 | Loss: 1.1671 | LR: 5.00e-06
|
| 810 |
+
[2026-04-25 19:57:12] Epoch 1 | Step 6120 | Loss: 1.1669 | LR: 5.00e-06
|
| 811 |
+
[2026-04-25 19:57:14] Epoch 1 | Step 6130 | Loss: 1.1667 | LR: 5.00e-06
|
| 812 |
+
[2026-04-25 19:57:17] Epoch 1 | Step 6140 | Loss: 1.1662 | LR: 5.00e-06
|
| 813 |
+
[2026-04-25 19:57:19] Epoch 1 | Step 6150 | Loss: 1.1660 | LR: 5.00e-06
|
| 814 |
+
[2026-04-25 19:57:22] Epoch 1 | Step 6160 | Loss: 1.1659 | LR: 5.00e-06
|
| 815 |
+
[2026-04-25 19:57:24] Epoch 1 | Step 6170 | Loss: 1.1660 | LR: 5.00e-06
|
| 816 |
+
[2026-04-25 19:57:27] Epoch 1 | Step 6180 | Loss: 1.1657 | LR: 5.00e-06
|
| 817 |
+
[2026-04-25 19:57:29] Epoch 1 | Step 6190 | Loss: 1.1655 | LR: 5.00e-06
|
| 818 |
+
[2026-04-25 19:57:32] Epoch 1 | Step 6200 | Loss: 1.1653 | LR: 5.00e-06
|
| 819 |
+
[2026-04-25 19:57:35] Epoch 1 | Step 6210 | Loss: 1.1653 | LR: 5.00e-06
|
| 820 |
+
[2026-04-25 19:57:38] Epoch 1 | Step 6220 | Loss: 1.1653 | LR: 5.00e-06
|
| 821 |
+
[2026-04-25 19:57:40] Epoch 1 | Step 6230 | Loss: 1.1650 | LR: 5.00e-06
|
| 822 |
+
[2026-04-25 19:57:43] Epoch 1 | Step 6240 | Loss: 1.1650 | LR: 5.00e-06
|
| 823 |
+
[2026-04-25 19:57:45] Epoch 1 | Step 6250 | Loss: 1.1646 | LR: 5.00e-06
|
| 824 |
+
[2026-04-25 19:57:48] Epoch 1 | Step 6260 | Loss: 1.1646 | LR: 5.00e-06
|
| 825 |
+
[2026-04-25 19:57:50] Epoch 1 | Step 6270 | Loss: 1.1645 | LR: 5.00e-06
|
| 826 |
+
[2026-04-25 19:57:53] Epoch 1 | Step 6280 | Loss: 1.1641 | LR: 5.00e-06
|
| 827 |
+
[2026-04-25 19:57:55] Epoch 1 | Step 6290 | Loss: 1.1640 | LR: 5.00e-06
|
| 828 |
+
[2026-04-25 19:57:58] Epoch 1 | Step 6300 | Loss: 1.1639 | LR: 5.00e-06
|
| 829 |
+
[2026-04-25 19:58:01] Epoch 1 | Step 6310 | Loss: 1.1639 | LR: 5.00e-06
|
| 830 |
+
[2026-04-25 19:58:03] Epoch 1 | Step 6320 | Loss: 1.1638 | LR: 5.00e-06
|
| 831 |
+
[2026-04-25 19:58:06] Epoch 1 | Step 6330 | Loss: 1.1640 | LR: 5.00e-06
|
| 832 |
+
[2026-04-25 19:58:08] Epoch 1 | Step 6340 | Loss: 1.1640 | LR: 5.00e-06
|
| 833 |
+
[2026-04-25 19:58:11] Epoch 1 | Step 6350 | Loss: 1.1639 | LR: 5.00e-06
|
| 834 |
+
[2026-04-25 19:58:13] Epoch 1 | Step 6360 | Loss: 1.1640 | LR: 5.00e-06
|
| 835 |
+
[2026-04-25 19:58:16] Epoch 1 | Step 6370 | Loss: 1.1639 | LR: 5.00e-06
|
| 836 |
+
[2026-04-25 19:58:18] Epoch 1 | Step 6380 | Loss: 1.1638 | LR: 5.00e-06
|
| 837 |
+
[2026-04-25 19:58:21] Epoch 1 | Step 6390 | Loss: 1.1635 | LR: 5.00e-06
|
| 838 |
+
[2026-04-25 19:58:23] Epoch 1 | Step 6400 | Loss: 1.1633 | LR: 5.00e-06
|
| 839 |
+
[2026-04-25 19:58:25] Epoch 1 | Step 6410 | Loss: 1.1632 | LR: 5.00e-06
|
| 840 |
+
[2026-04-25 19:58:28] Epoch 1 | Step 6420 | Loss: 1.1630 | LR: 5.00e-06
|
| 841 |
+
[2026-04-25 19:58:30] Epoch 1 | Step 6430 | Loss: 1.1629 | LR: 5.00e-06
|
| 842 |
+
[2026-04-25 19:58:33] Epoch 1 | Step 6440 | Loss: 1.1628 | LR: 5.00e-06
|
| 843 |
+
[2026-04-25 19:58:35] Epoch 1 | Step 6450 | Loss: 1.1627 | LR: 5.00e-06
|
| 844 |
+
[2026-04-25 19:58:38] Epoch 1 | Step 6460 | Loss: 1.1623 | LR: 5.00e-06
|
| 845 |
+
[2026-04-25 19:58:40] Epoch 1 | Step 6470 | Loss: 1.1622 | LR: 5.00e-06
|
| 846 |
+
[2026-04-25 19:58:43] Epoch 1 | Step 6480 | Loss: 1.1622 | LR: 5.00e-06
|
| 847 |
+
[2026-04-25 19:58:45] Epoch 1 | Step 6490 | Loss: 1.1623 | LR: 5.00e-06
|
| 848 |
+
[2026-04-25 19:58:48] Epoch 1 | Step 6500 | Loss: 1.1620 | LR: 5.00e-06
|
| 849 |
+
[2026-04-25 19:58:50] Epoch 1 | Step 6510 | Loss: 1.1618 | LR: 5.00e-06
|
| 850 |
+
[2026-04-25 19:58:53] Epoch 1 | Step 6520 | Loss: 1.1615 | LR: 5.00e-06
|
| 851 |
+
[2026-04-25 19:58:55] Epoch 1 | Step 6530 | Loss: 1.1612 | LR: 5.00e-06
|
| 852 |
+
[2026-04-25 19:58:58] Epoch 1 | Step 6540 | Loss: 1.1610 | LR: 5.00e-06
|
| 853 |
+
[2026-04-25 19:59:00] Epoch 1 | Step 6550 | Loss: 1.1608 | LR: 5.00e-06
|
| 854 |
+
[2026-04-25 19:59:03] Epoch 1 | Step 6560 | Loss: 1.1607 | LR: 5.00e-06
|
| 855 |
+
[2026-04-25 19:59:05] Epoch 1 | Step 6570 | Loss: 1.1606 | LR: 5.00e-06
|
| 856 |
+
[2026-04-25 19:59:08] Epoch 1 | Step 6580 | Loss: 1.1606 | LR: 5.00e-06
|
| 857 |
+
[2026-04-25 19:59:10] Epoch 1 | Step 6590 | Loss: 1.1603 | LR: 5.00e-06
|
| 858 |
+
[2026-04-25 19:59:13] Epoch 1 | Step 6600 | Loss: 1.1602 | LR: 5.00e-06
|
| 859 |
+
[2026-04-25 19:59:16] Epoch 1 | Step 6610 | Loss: 1.1601 | LR: 5.00e-06
|
| 860 |
+
[2026-04-25 19:59:18] Epoch 1 | Step 6620 | Loss: 1.1600 | LR: 5.00e-06
|
| 861 |
+
[2026-04-25 19:59:21] Epoch 1 | Step 6630 | Loss: 1.1598 | LR: 5.00e-06
|
| 862 |
+
[2026-04-25 19:59:23] Epoch 1 | Step 6640 | Loss: 1.1598 | LR: 5.00e-06
|
| 863 |
+
[2026-04-25 19:59:25] Epoch 1 | Step 6650 | Loss: 1.1598 | LR: 5.00e-06
|
| 864 |
+
[2026-04-25 19:59:28] Epoch 1 | Step 6660 | Loss: 1.1595 | LR: 5.00e-06
|
| 865 |
+
[2026-04-25 19:59:31] Epoch 1 | Step 6670 | Loss: 1.1594 | LR: 5.00e-06
|
| 866 |
+
[2026-04-25 19:59:33] Epoch 1 | Step 6680 | Loss: 1.1593 | LR: 5.00e-06
|
| 867 |
+
[2026-04-25 19:59:36] Epoch 1 | Step 6690 | Loss: 1.1593 | LR: 5.00e-06
|
| 868 |
+
[2026-04-25 19:59:38] Epoch 1 | Step 6700 | Loss: 1.1592 | LR: 5.00e-06
|
| 869 |
+
[2026-04-25 19:59:41] Epoch 1 | Step 6710 | Loss: 1.1591 | LR: 5.00e-06
|
| 870 |
+
[2026-04-25 19:59:43] Epoch 1 | Step 6720 | Loss: 1.1590 | LR: 5.00e-06
|
| 871 |
+
[2026-04-25 19:59:46] Epoch 1 | Step 6730 | Loss: 1.1591 | LR: 5.00e-06
|
| 872 |
+
[2026-04-25 19:59:48] Epoch 1 | Step 6740 | Loss: 1.1589 | LR: 5.00e-06
|
| 873 |
+
[2026-04-25 19:59:51] Epoch 1 | Step 6750 | Loss: 1.1587 | LR: 5.00e-06
|
| 874 |
+
[2026-04-25 19:59:53] Epoch 1 | Step 6760 | Loss: 1.1587 | LR: 5.00e-06
|
| 875 |
+
[2026-04-25 19:59:56] Epoch 1 | Step 6770 | Loss: 1.1586 | LR: 5.00e-06
|
| 876 |
+
[2026-04-25 19:59:58] Epoch 1 | Step 6780 | Loss: 1.1585 | LR: 5.00e-06
|
| 877 |
+
[2026-04-25 20:00:01] Epoch 1 | Step 6790 | Loss: 1.1585 | LR: 5.00e-06
|
| 878 |
+
[2026-04-25 20:00:03] Epoch 1 | Step 6800 | Loss: 1.1586 | LR: 5.00e-06
|
| 879 |
+
[2026-04-25 20:00:06] Epoch 1 | Step 6810 | Loss: 1.1585 | LR: 5.00e-06
|
| 880 |
+
[2026-04-25 20:00:08] Epoch 1 | Step 6820 | Loss: 1.1586 | LR: 5.00e-06
|
| 881 |
+
[2026-04-25 20:00:11] Epoch 1 | Step 6830 | Loss: 1.1586 | LR: 5.00e-06
|
| 882 |
+
[2026-04-25 20:00:14] Epoch 1 | Step 6840 | Loss: 1.1586 | LR: 5.00e-06
|
| 883 |
+
[2026-04-25 20:00:16] Epoch 1 | Step 6850 | Loss: 1.1585 | LR: 5.00e-06
|
| 884 |
+
[2026-04-25 20:00:19] Epoch 1 | Step 6860 | Loss: 1.1584 | LR: 5.00e-06
|
| 885 |
+
[2026-04-25 20:00:21] Epoch 1 | Step 6870 | Loss: 1.1583 | LR: 5.00e-06
|
| 886 |
+
[2026-04-25 20:00:24] Epoch 1 | Step 6880 | Loss: 1.1581 | LR: 5.00e-06
|
| 887 |
+
[2026-04-25 20:00:26] Epoch 1 | Step 6890 | Loss: 1.1582 | LR: 5.00e-06
|
| 888 |
+
[2026-04-25 20:00:29] Epoch 1 | Step 6900 | Loss: 1.1582 | LR: 5.00e-06
|
| 889 |
+
[2026-04-25 20:00:31] Epoch 1 | Step 6910 | Loss: 1.1578 | LR: 5.00e-06
|
| 890 |
+
[2026-04-25 20:00:34] Epoch 1 | Step 6920 | Loss: 1.1577 | LR: 5.00e-06
|
| 891 |
+
[2026-04-25 20:00:36] Epoch 1 | Step 6930 | Loss: 1.1577 | LR: 5.00e-06
|
| 892 |
+
[2026-04-25 20:00:39] Epoch 1 | Step 6940 | Loss: 1.1575 | LR: 5.00e-06
|
| 893 |
+
[2026-04-25 20:00:41] Epoch 1 | Step 6950 | Loss: 1.1574 | LR: 5.00e-06
|
| 894 |
+
[2026-04-25 20:00:44] Epoch 1 | Step 6960 | Loss: 1.1574 | LR: 5.00e-06
|
| 895 |
+
[2026-04-25 20:00:47] Epoch 1 | Step 6970 | Loss: 1.1573 | LR: 5.00e-06
|
| 896 |
+
[2026-04-25 20:00:49] Epoch 1 | Step 6980 | Loss: 1.1572 | LR: 5.00e-06
|
| 897 |
+
[2026-04-25 20:00:51] Epoch 1 | Step 6990 | Loss: 1.1570 | LR: 5.00e-06
|
| 898 |
+
[2026-04-25 20:00:54] Epoch 1 | Step 7000 | Loss: 1.1568 | LR: 5.00e-06
|
| 899 |
+
[2026-04-25 20:00:56] Epoch 1 | Step 7010 | Loss: 1.1567 | LR: 5.00e-06
|
| 900 |
+
[2026-04-25 20:00:59] Epoch 1 | Step 7020 | Loss: 1.1567 | LR: 5.00e-06
|
| 901 |
+
[2026-04-25 20:01:01] Epoch 1 | Step 7030 | Loss: 1.1566 | LR: 5.00e-06
|
| 902 |
+
[2026-04-25 20:01:04] Epoch 1 | Step 7040 | Loss: 1.1566 | LR: 5.00e-06
|
| 903 |
+
[2026-04-25 20:01:06] Epoch 1 | Step 7050 | Loss: 1.1564 | LR: 5.00e-06
|
| 904 |
+
[2026-04-25 20:01:09] Epoch 1 | Step 7060 | Loss: 1.1563 | LR: 5.00e-06
|
| 905 |
+
[2026-04-25 20:01:11] Epoch 1 | Step 7070 | Loss: 1.1564 | LR: 5.00e-06
|
| 906 |
+
[2026-04-25 20:01:14] Epoch 1 | Step 7080 | Loss: 1.1561 | LR: 5.00e-06
|
| 907 |
+
[2026-04-25 20:01:16] Epoch 1 | Step 7090 | Loss: 1.1561 | LR: 5.00e-06
|
| 908 |
+
[2026-04-25 20:01:19] Epoch 1 | Step 7100 | Loss: 1.1558 | LR: 5.00e-06
|
| 909 |
+
[2026-04-25 20:01:22] Epoch 1 | Step 7110 | Loss: 1.1557 | LR: 5.00e-06
|
| 910 |
+
[2026-04-25 20:01:24] Epoch 1 | Step 7120 | Loss: 1.1558 | LR: 5.00e-06
|
| 911 |
+
[2026-04-25 20:01:27] Epoch 1 | Step 7130 | Loss: 1.1555 | LR: 5.00e-06
|
| 912 |
+
[2026-04-25 20:01:29] Epoch 1 | Step 7140 | Loss: 1.1553 | LR: 5.00e-06
|
| 913 |
+
[2026-04-25 20:01:31] Epoch 1 | Step 7150 | Loss: 1.1555 | LR: 5.00e-06
|
| 914 |
+
[2026-04-25 20:01:34] Epoch 1 | Step 7160 | Loss: 1.1552 | LR: 5.00e-06
|
| 915 |
+
[2026-04-25 20:01:37] Epoch 1 | Step 7170 | Loss: 1.1552 | LR: 5.00e-06
|
| 916 |
+
[2026-04-25 20:01:39] Epoch 1 | Step 7180 | Loss: 1.1551 | LR: 5.00e-06
|
| 917 |
+
[2026-04-25 20:01:42] Epoch 1 | Step 7190 | Loss: 1.1552 | LR: 5.00e-06
|
| 918 |
+
[2026-04-25 20:01:44] Epoch 1 | Step 7200 | Loss: 1.1550 | LR: 5.00e-06
|
| 919 |
+
[2026-04-25 20:01:47] Epoch 1 | Step 7210 | Loss: 1.1548 | LR: 5.00e-06
|
| 920 |
+
[2026-04-25 20:01:50] Epoch 1 | Step 7220 | Loss: 1.1548 | LR: 5.00e-06
|
| 921 |
+
[2026-04-25 20:01:52] Epoch 1 | Step 7230 | Loss: 1.1548 | LR: 5.00e-06
|
| 922 |
+
[2026-04-25 20:01:55] Epoch 1 | Step 7240 | Loss: 1.1547 | LR: 5.00e-06
|
| 923 |
+
[2026-04-25 20:01:58] Epoch 1 | Step 7250 | Loss: 1.1546 | LR: 5.00e-06
|
| 924 |
+
[2026-04-25 20:02:00] Epoch 1 | Step 7260 | Loss: 1.1545 | LR: 5.00e-06
|
| 925 |
+
[2026-04-25 20:02:03] Epoch 1 | Step 7270 | Loss: 1.1545 | LR: 5.00e-06
|
| 926 |
+
[2026-04-25 20:02:05] Epoch 1 | Step 7280 | Loss: 1.1545 | LR: 5.00e-06
|
| 927 |
+
[2026-04-25 20:02:08] Epoch 1 | Step 7290 | Loss: 1.1542 | LR: 5.00e-06
|
| 928 |
+
[2026-04-25 20:02:10] Epoch 1 | Step 7300 | Loss: 1.1541 | LR: 5.00e-06
|
| 929 |
+
[2026-04-25 20:02:13] Epoch 1 | Step 7310 | Loss: 1.1539 | LR: 5.00e-06
|
| 930 |
+
[2026-04-25 20:02:16] Epoch 1 | Step 7320 | Loss: 1.1536 | LR: 5.00e-06
|
| 931 |
+
[2026-04-25 20:02:18] Epoch 1 | Step 7330 | Loss: 1.1536 | LR: 5.00e-06
|
| 932 |
+
[2026-04-25 20:02:21] Epoch 1 | Step 7340 | Loss: 1.1537 | LR: 5.00e-06
|
| 933 |
+
[2026-04-25 20:02:23] Epoch 1 | Step 7350 | Loss: 1.1537 | LR: 5.00e-06
|
| 934 |
+
[2026-04-25 20:02:26] Epoch 1 | Step 7360 | Loss: 1.1535 | LR: 5.00e-06
|
| 935 |
+
[2026-04-25 20:02:29] Epoch 1 | Step 7370 | Loss: 1.1532 | LR: 5.00e-06
|
| 936 |
+
[2026-04-25 20:02:31] Epoch 1 | Step 7380 | Loss: 1.1530 | LR: 5.00e-06
|
| 937 |
+
[2026-04-25 20:02:34] Epoch 1 | Step 7390 | Loss: 1.1528 | LR: 5.00e-06
|
| 938 |
+
[2026-04-25 20:02:36] Epoch 1 | Step 7400 | Loss: 1.1527 | LR: 5.00e-06
|
| 939 |
+
[2026-04-25 20:02:39] Epoch 1 | Step 7410 | Loss: 1.1528 | LR: 5.00e-06
|
| 940 |
+
[2026-04-25 20:02:41] Epoch 1 | Step 7420 | Loss: 1.1527 | LR: 5.00e-06
|
| 941 |
+
[2026-04-25 20:02:44] Epoch 1 | Step 7430 | Loss: 1.1525 | LR: 5.00e-06
|
| 942 |
+
[2026-04-25 20:02:46] Epoch 1 | Step 7440 | Loss: 1.1525 | LR: 5.00e-06
|
| 943 |
+
[2026-04-25 20:02:49] Epoch 1 | Step 7450 | Loss: 1.1523 | LR: 5.00e-06
|
| 944 |
+
[2026-04-25 20:02:51] Epoch 1 | Step 7460 | Loss: 1.1522 | LR: 5.00e-06
|
| 945 |
+
[2026-04-25 20:02:54] Epoch 1 | Step 7470 | Loss: 1.1521 | LR: 5.00e-06
|
| 946 |
+
[2026-04-25 20:02:56] Epoch 1 | Step 7480 | Loss: 1.1521 | LR: 5.00e-06
|
| 947 |
+
[2026-04-25 20:02:59] Epoch 1 | Step 7490 | Loss: 1.1521 | LR: 5.00e-06
|
| 948 |
+
[2026-04-25 20:03:01] Epoch 1 | Step 7500 | Loss: 1.1521 | LR: 5.00e-06
|
| 949 |
+
[2026-04-25 20:03:04] Epoch 1 | Step 7510 | Loss: 1.1521 | LR: 5.00e-06
|
| 950 |
+
[2026-04-25 20:03:06] Epoch 1 | Step 7520 | Loss: 1.1520 | LR: 5.00e-06
|
| 951 |
+
[2026-04-25 20:03:09] Epoch 1 | Step 7530 | Loss: 1.1518 | LR: 5.00e-06
|
| 952 |
+
[2026-04-25 20:03:11] Epoch 1 | Step 7540 | Loss: 1.1517 | LR: 5.00e-06
|
| 953 |
+
[2026-04-25 20:03:14] Epoch 1 | Step 7550 | Loss: 1.1517 | LR: 5.00e-06
|
| 954 |
+
[2026-04-25 20:03:16] Epoch 1 | Step 7560 | Loss: 1.1516 | LR: 5.00e-06
|
| 955 |
+
[2026-04-25 20:03:19] Epoch 1 | Step 7570 | Loss: 1.1515 | LR: 5.00e-06
|
| 956 |
+
[2026-04-25 20:03:21] Epoch 1 | Step 7580 | Loss: 1.1514 | LR: 5.00e-06
|
| 957 |
+
[2026-04-25 20:03:24] Epoch 1 | Step 7590 | Loss: 1.1512 | LR: 5.00e-06
|
| 958 |
+
[2026-04-25 20:03:26] Epoch 1 | Step 7600 | Loss: 1.1511 | LR: 5.00e-06
|
| 959 |
+
[2026-04-25 20:03:28] Epoch 1 | Step 7610 | Loss: 1.1510 | LR: 5.00e-06
|
| 960 |
+
[2026-04-25 20:03:31] Epoch 1 | Step 7620 | Loss: 1.1508 | LR: 5.00e-06
|
| 961 |
+
[2026-04-25 20:03:33] Epoch 1 | Step 7630 | Loss: 1.1507 | LR: 5.00e-06
|
| 962 |
+
[2026-04-25 20:03:36] Epoch 1 | Step 7640 | Loss: 1.1506 | LR: 5.00e-06
|
| 963 |
+
[2026-04-25 20:03:39] Epoch 1 | Step 7650 | Loss: 1.1504 | LR: 5.00e-06
|
| 964 |
+
[2026-04-25 20:03:41] Epoch 1 | Step 7660 | Loss: 1.1502 | LR: 5.00e-06
|
| 965 |
+
[2026-04-25 20:03:44] Epoch 1 | Step 7670 | Loss: 1.1500 | LR: 5.00e-06
|
| 966 |
+
[2026-04-25 20:03:46] Epoch 1 | Step 7680 | Loss: 1.1499 | LR: 5.00e-06
|
| 967 |
+
[2026-04-25 20:03:49] Epoch 1 | Step 7690 | Loss: 1.1500 | LR: 5.00e-06
|
| 968 |
+
[2026-04-25 20:03:52] Epoch 1 | Step 7700 | Loss: 1.1498 | LR: 5.00e-06
|
| 969 |
+
[2026-04-25 20:03:55] Epoch 1 | Step 7710 | Loss: 1.1495 | LR: 5.00e-06
|
| 970 |
+
[2026-04-25 20:03:57] Epoch 1 | Step 7720 | Loss: 1.1496 | LR: 5.00e-06
|
| 971 |
+
[2026-04-25 20:04:00] Epoch 1 | Step 7730 | Loss: 1.1497 | LR: 5.00e-06
|
| 972 |
+
[2026-04-25 20:04:02] Epoch 1 | Step 7740 | Loss: 1.1498 | LR: 5.00e-06
|
| 973 |
+
[2026-04-25 20:04:05] Epoch 1 | Step 7750 | Loss: 1.1498 | LR: 5.00e-06
|
| 974 |
+
[2026-04-25 20:04:08] Epoch 1 | Step 7760 | Loss: 1.1496 | LR: 5.00e-06
|
| 975 |
+
[2026-04-25 20:04:10] Epoch 1 | Step 7770 | Loss: 1.1494 | LR: 5.00e-06
|
| 976 |
+
[2026-04-25 20:04:13] Epoch 1 | Step 7780 | Loss: 1.1493 | LR: 5.00e-06
|
| 977 |
+
[2026-04-25 20:04:15] Epoch 1 | Step 7790 | Loss: 1.1492 | LR: 5.00e-06
|
| 978 |
+
[2026-04-25 20:04:18] Epoch 1 | Step 7800 | Loss: 1.1490 | LR: 5.00e-06
|
| 979 |
+
[2026-04-25 20:04:20] Epoch 1 | Step 7810 | Loss: 1.1491 | LR: 5.00e-06
|
| 980 |
+
[2026-04-25 20:04:23] Epoch 1 | Step 7820 | Loss: 1.1491 | LR: 5.00e-06
|
| 981 |
+
[2026-04-25 20:04:25] Epoch 1 | Step 7830 | Loss: 1.1490 | LR: 5.00e-06
|
| 982 |
+
[2026-04-25 20:04:28] Epoch 1 | Step 7840 | Loss: 1.1488 | LR: 5.00e-06
|
| 983 |
+
[2026-04-25 20:04:31] Epoch 1 | Step 7850 | Loss: 1.1485 | LR: 5.00e-06
|
| 984 |
+
[2026-04-25 20:04:33] Epoch 1 | Step 7860 | Loss: 1.1485 | LR: 5.00e-06
|
| 985 |
+
[2026-04-25 20:04:36] Epoch 1 | Step 7870 | Loss: 1.1484 | LR: 5.00e-06
|
| 986 |
+
[2026-04-25 20:04:38] Epoch 1 | Step 7880 | Loss: 1.1484 | LR: 5.00e-06
|
| 987 |
+
[2026-04-25 20:04:41] Epoch 1 | Step 7890 | Loss: 1.1483 | LR: 5.00e-06
|
| 988 |
+
[2026-04-25 20:04:44] Epoch 1 | Step 7900 | Loss: 1.1483 | LR: 5.00e-06
|
| 989 |
+
[2026-04-25 20:04:47] Epoch 1 | Step 7910 | Loss: 1.1483 | LR: 5.00e-06
|
| 990 |
+
[2026-04-25 20:04:50] Epoch 1 | Step 7920 | Loss: 1.1482 | LR: 5.00e-06
|
| 991 |
+
[2026-04-25 20:04:52] Epoch 1 | Step 7930 | Loss: 1.1483 | LR: 5.00e-06
|
| 992 |
+
[2026-04-25 20:04:55] Epoch 1 | Step 7940 | Loss: 1.1482 | LR: 5.00e-06
|
| 993 |
+
[2026-04-25 20:04:57] Epoch 1 | Step 7950 | Loss: 1.1484 | LR: 5.00e-06
|
| 994 |
+
[2026-04-25 20:05:00] Epoch 1 | Step 7960 | Loss: 1.1484 | LR: 5.00e-06
|
| 995 |
+
[2026-04-25 20:05:02] Epoch 1 | Step 7970 | Loss: 1.1484 | LR: 5.00e-06
|
| 996 |
+
[2026-04-25 20:05:05] Epoch 1 | Step 7980 | Loss: 1.1481 | LR: 5.00e-06
|
| 997 |
+
[2026-04-25 20:05:07] Epoch 1 | Step 7990 | Loss: 1.1481 | LR: 5.00e-06
|
| 998 |
+
[2026-04-25 20:05:10] Epoch 1 | Step 8000 | Loss: 1.1480 | LR: 5.00e-06
|
| 999 |
+
[2026-04-25 20:05:10] Validation | Batch 10/84 | Loss: 1.0434
|
| 1000 |
+
[2026-04-25 20:05:11] Validation | Batch 20/84 | Loss: 1.0455
|
| 1001 |
+
[2026-04-25 20:05:11] Validation | Batch 30/84 | Loss: 1.1267
|
| 1002 |
+
[2026-04-25 20:05:12] Validation | Batch 40/84 | Loss: 1.1299
|
| 1003 |
+
[2026-04-25 20:05:12] Validation | Batch 50/84 | Loss: 1.1227
|
| 1004 |
+
[2026-04-25 20:05:12] Validation | Batch 60/84 | Loss: 1.0947
|
| 1005 |
+
[2026-04-25 20:05:13] Validation | Batch 70/84 | Loss: 1.0789
|
| 1006 |
+
[2026-04-25 20:05:13] Validation | Batch 80/84 | Loss: 1.0866
|
| 1007 |
+
[2026-04-25 20:05:13] Validation | Batch 84/84 | Loss: 1.0775
|
| 1008 |
+
[2026-04-25 20:05:14] Validation | Loss: 1.0775 | PPL: 3.00 | Time: 3.76s
|
| 1009 |
+
[2026-04-25 20:05:16] New best model saved! Val loss: 1.0775
|
| 1010 |
+
[2026-04-25 20:05:19] Epoch 1 | Step 8010 | Loss: 1.1479 | LR: 5.00e-06
|
| 1011 |
+
[2026-04-25 20:05:22] Epoch 1 | Step 8020 | Loss: 1.1477 | LR: 5.00e-06
|
| 1012 |
+
[2026-04-25 20:05:25] Epoch 1 | Step 8030 | Loss: 1.1475 | LR: 5.00e-06
|
| 1013 |
+
[2026-04-25 20:05:28] Epoch 1 | Step 8040 | Loss: 1.1476 | LR: 5.00e-06
|
| 1014 |
+
[2026-04-25 20:05:30] Epoch 1 | Step 8050 | Loss: 1.1474 | LR: 5.00e-06
|
| 1015 |
+
[2026-04-25 20:05:33] Epoch 1 | Step 8060 | Loss: 1.1473 | LR: 5.00e-06
|
| 1016 |
+
[2026-04-25 20:05:35] Epoch 1 | Step 8070 | Loss: 1.1472 | LR: 5.00e-06
|
| 1017 |
+
[2026-04-25 20:05:38] Epoch 1 | Step 8080 | Loss: 1.1471 | LR: 5.00e-06
|
| 1018 |
+
[2026-04-25 20:05:40] Epoch 1 | Step 8090 | Loss: 1.1469 | LR: 5.00e-06
|
| 1019 |
+
[2026-04-25 20:05:43] Epoch 1 | Step 8100 | Loss: 1.1468 | LR: 5.00e-06
|
| 1020 |
+
[2026-04-25 20:05:46] Epoch 1 | Step 8110 | Loss: 1.1469 | LR: 5.00e-06
|
| 1021 |
+
[2026-04-25 20:05:48] Epoch 1 | Step 8120 | Loss: 1.1468 | LR: 5.00e-06
|
| 1022 |
+
[2026-04-25 20:05:51] Epoch 1 | Step 8130 | Loss: 1.1467 | LR: 5.00e-06
|
| 1023 |
+
[2026-04-25 20:05:53] Epoch 1 | Step 8140 | Loss: 1.1467 | LR: 5.00e-06
|
| 1024 |
+
[2026-04-25 20:05:56] Epoch 1 | Step 8150 | Loss: 1.1467 | LR: 5.00e-06
|
| 1025 |
+
[2026-04-25 20:05:58] Epoch 1 | Step 8160 | Loss: 1.1465 | LR: 5.00e-06
|
| 1026 |
+
[2026-04-25 20:06:01] Epoch 1 | Step 8170 | Loss: 1.1464 | LR: 5.00e-06
|
| 1027 |
+
[2026-04-25 20:06:03] Epoch 1 | Step 8180 | Loss: 1.1463 | LR: 5.00e-06
|
| 1028 |
+
[2026-04-25 20:06:06] Epoch 1 | Step 8190 | Loss: 1.1461 | LR: 5.00e-06
|
| 1029 |
+
[2026-04-25 20:06:08] Epoch 1 | Step 8200 | Loss: 1.1461 | LR: 5.00e-06
|
| 1030 |
+
[2026-04-25 20:06:11] Epoch 1 | Step 8210 | Loss: 1.1460 | LR: 5.00e-06
|
| 1031 |
+
[2026-04-25 20:06:13] Epoch 1 | Step 8220 | Loss: 1.1460 | LR: 5.00e-06
|
| 1032 |
+
[2026-04-25 20:06:16] Epoch 1 | Step 8230 | Loss: 1.1460 | LR: 5.00e-06
|
| 1033 |
+
[2026-04-25 20:06:18] Epoch 1 | Step 8240 | Loss: 1.1460 | LR: 5.00e-06
|
| 1034 |
+
[2026-04-25 20:06:21] Epoch 1 | Step 8250 | Loss: 1.1459 | LR: 5.00e-06
|
| 1035 |
+
[2026-04-25 20:06:23] Epoch 1 | Step 8260 | Loss: 1.1460 | LR: 5.00e-06
|
| 1036 |
+
[2026-04-25 20:06:26] Epoch 1 | Step 8270 | Loss: 1.1460 | LR: 5.00e-06
|
| 1037 |
+
[2026-04-25 20:06:29] Epoch 1 | Step 8280 | Loss: 1.1460 | LR: 5.00e-06
|
| 1038 |
+
[2026-04-25 20:06:31] Epoch 1 | Step 8290 | Loss: 1.1461 | LR: 5.00e-06
|
| 1039 |
+
[2026-04-25 20:06:34] Epoch 1 | Step 8300 | Loss: 1.1459 | LR: 5.00e-06
|
| 1040 |
+
[2026-04-25 20:06:36] Epoch 1 | Step 8310 | Loss: 1.1460 | LR: 5.00e-06
|
| 1041 |
+
[2026-04-25 20:06:38] Epoch 1 | Step 8320 | Loss: 1.1459 | LR: 5.00e-06
|
| 1042 |
+
[2026-04-25 20:06:41] Epoch 1 | Step 8330 | Loss: 1.1459 | LR: 5.00e-06
|
| 1043 |
+
[2026-04-25 20:06:43] Epoch 1 | Step 8340 | Loss: 1.1458 | LR: 5.00e-06
|
| 1044 |
+
[2026-04-25 20:06:46] Epoch 1 | Step 8350 | Loss: 1.1457 | LR: 5.00e-06
|
| 1045 |
+
[2026-04-25 20:06:48] Epoch 1 | Step 8360 | Loss: 1.1456 | LR: 5.00e-06
|
| 1046 |
+
[2026-04-25 20:06:51] Epoch 1 | Step 8370 | Loss: 1.1455 | LR: 5.00e-06
|
| 1047 |
+
[2026-04-25 20:06:53] Epoch 1 | Step 8380 | Loss: 1.1454 | LR: 5.00e-06
|
| 1048 |
+
[2026-04-25 20:06:55] Epoch 1 | Step 8390 | Loss: 1.1454 | LR: 5.00e-06
|
| 1049 |
+
[2026-04-25 20:06:58] Epoch 1 | Step 8400 | Loss: 1.1453 | LR: 5.00e-06
|
| 1050 |
+
[2026-04-25 20:07:01] Epoch 1 | Step 8410 | Loss: 1.1453 | LR: 5.00e-06
|
| 1051 |
+
[2026-04-25 20:07:03] Epoch 1 | Step 8420 | Loss: 1.1454 | LR: 5.00e-06
|
| 1052 |
+
[2026-04-25 20:07:06] Epoch 1 | Step 8430 | Loss: 1.1451 | LR: 5.00e-06
|
| 1053 |
+
[2026-04-25 20:07:08] Epoch 1 | Step 8440 | Loss: 1.1451 | LR: 5.00e-06
|
| 1054 |
+
[2026-04-25 20:07:11] Epoch 1 | Step 8450 | Loss: 1.1450 | LR: 5.00e-06
|
| 1055 |
+
[2026-04-25 20:07:13] Epoch 1 | Step 8460 | Loss: 1.1450 | LR: 5.00e-06
|
| 1056 |
+
[2026-04-25 20:07:16] Epoch 1 | Step 8470 | Loss: 1.1449 | LR: 5.00e-06
|
| 1057 |
+
[2026-04-25 20:07:18] Epoch 1 | Step 8480 | Loss: 1.1448 | LR: 5.00e-06
|
| 1058 |
+
[2026-04-25 20:07:21] Epoch 1 | Step 8490 | Loss: 1.1446 | LR: 5.00e-06
|
| 1059 |
+
[2026-04-25 20:07:23] Epoch 1 | Step 8500 | Loss: 1.1446 | LR: 5.00e-06
|
| 1060 |
+
[2026-04-25 20:07:26] Epoch 1 | Step 8510 | Loss: 1.1444 | LR: 5.00e-06
|
| 1061 |
+
[2026-04-25 20:07:29] Epoch 1 | Step 8520 | Loss: 1.1444 | LR: 5.00e-06
|
| 1062 |
+
[2026-04-25 20:07:31] Epoch 1 | Step 8530 | Loss: 1.1443 | LR: 5.00e-06
|
| 1063 |
+
[2026-04-25 20:07:34] Epoch 1 | Step 8540 | Loss: 1.1445 | LR: 5.00e-06
|
| 1064 |
+
[2026-04-25 20:07:36] Epoch 1 | Step 8550 | Loss: 1.1445 | LR: 5.00e-06
|
| 1065 |
+
[2026-04-25 20:07:39] Epoch 1 | Step 8560 | Loss: 1.1444 | LR: 5.00e-06
|
| 1066 |
+
[2026-04-25 20:07:41] Epoch 1 | Step 8570 | Loss: 1.1443 | LR: 5.00e-06
|
| 1067 |
+
[2026-04-25 20:07:44] Epoch 1 | Step 8580 | Loss: 1.1441 | LR: 5.00e-06
|
| 1068 |
+
[2026-04-25 20:07:46] Epoch 1 | Step 8590 | Loss: 1.1439 | LR: 5.00e-06
|
| 1069 |
+
[2026-04-25 20:07:49] Epoch 1 | Step 8600 | Loss: 1.1438 | LR: 5.00e-06
|
| 1070 |
+
[2026-04-25 20:07:52] Epoch 1 | Step 8610 | Loss: 1.1439 | LR: 5.00e-06
|
| 1071 |
+
[2026-04-25 20:07:54] Epoch 1 | Step 8620 | Loss: 1.1437 | LR: 5.00e-06
|
| 1072 |
+
[2026-04-25 20:07:57] Epoch 1 | Step 8630 | Loss: 1.1435 | LR: 5.00e-06
|
| 1073 |
+
[2026-04-25 20:07:59] Epoch 1 | Step 8640 | Loss: 1.1436 | LR: 5.00e-06
|
| 1074 |
+
[2026-04-25 20:08:02] Epoch 1 | Step 8650 | Loss: 1.1436 | LR: 5.00e-06
|
| 1075 |
+
[2026-04-25 20:08:04] Epoch 1 | Step 8660 | Loss: 1.1434 | LR: 5.00e-06
|
| 1076 |
+
[2026-04-25 20:08:07] Epoch 1 | Step 8670 | Loss: 1.1435 | LR: 5.00e-06
|
| 1077 |
+
[2026-04-25 20:08:09] Epoch 1 | Step 8680 | Loss: 1.1435 | LR: 5.00e-06
|
| 1078 |
+
[2026-04-25 20:08:12] Epoch 1 | Step 8690 | Loss: 1.1433 | LR: 5.00e-06
|
| 1079 |
+
[2026-04-25 20:08:14] Epoch 1 | Step 8700 | Loss: 1.1433 | LR: 5.00e-06
|
| 1080 |
+
[2026-04-25 20:08:17] Epoch 1 | Step 8710 | Loss: 1.1430 | LR: 5.00e-06
|
| 1081 |
+
[2026-04-25 20:08:19] Epoch 1 | Step 8720 | Loss: 1.1429 | LR: 5.00e-06
|
| 1082 |
+
[2026-04-25 20:08:22] Epoch 1 | Step 8730 | Loss: 1.1429 | LR: 5.00e-06
|
| 1083 |
+
[2026-04-25 20:08:24] Epoch 1 | Step 8740 | Loss: 1.1429 | LR: 5.00e-06
|
| 1084 |
+
[2026-04-25 20:08:27] Epoch 1 | Step 8750 | Loss: 1.1429 | LR: 5.00e-06
|
| 1085 |
+
[2026-04-25 20:08:29] Epoch 1 | Step 8760 | Loss: 1.1428 | LR: 5.00e-06
|
| 1086 |
+
[2026-04-25 20:08:32] Epoch 1 | Step 8770 | Loss: 1.1426 | LR: 5.00e-06
|
| 1087 |
+
[2026-04-25 20:08:35] Epoch 1 | Step 8780 | Loss: 1.1425 | LR: 5.00e-06
|
| 1088 |
+
[2026-04-25 20:08:37] Epoch 1 | Step 8790 | Loss: 1.1425 | LR: 5.00e-06
|
| 1089 |
+
[2026-04-25 20:08:40] Epoch 1 | Step 8800 | Loss: 1.1422 | LR: 5.00e-06
|
| 1090 |
+
[2026-04-25 20:08:43] Epoch 1 | Step 8810 | Loss: 1.1422 | LR: 5.00e-06
|
| 1091 |
+
[2026-04-25 20:08:45] Epoch 1 | Step 8820 | Loss: 1.1421 | LR: 5.00e-06
|
| 1092 |
+
[2026-04-25 20:08:48] Epoch 1 | Step 8830 | Loss: 1.1420 | LR: 5.00e-06
|
| 1093 |
+
[2026-04-25 20:08:51] Epoch 1 | Step 8840 | Loss: 1.1419 | LR: 5.00e-06
|
| 1094 |
+
[2026-04-25 20:08:53] Epoch 1 | Step 8850 | Loss: 1.1419 | LR: 5.00e-06
|
| 1095 |
+
[2026-04-25 20:08:56] Epoch 1 | Step 8860 | Loss: 1.1419 | LR: 5.00e-06
|
| 1096 |
+
[2026-04-25 20:08:58] Epoch 1 | Step 8870 | Loss: 1.1419 | LR: 5.00e-06
|
| 1097 |
+
[2026-04-25 20:09:01] Epoch 1 | Step 8880 | Loss: 1.1418 | LR: 5.00e-06
|
| 1098 |
+
[2026-04-25 20:09:03] Epoch 1 | Step 8890 | Loss: 1.1416 | LR: 5.00e-06
|
| 1099 |
+
[2026-04-25 20:09:05] Epoch 1 | Step 8900 | Loss: 1.1413 | LR: 5.00e-06
|
| 1100 |
+
[2026-04-25 20:09:08] Epoch 1 | Step 8910 | Loss: 1.1414 | LR: 5.00e-06
|
| 1101 |
+
[2026-04-25 20:09:11] Epoch 1 | Step 8920 | Loss: 1.1411 | LR: 5.00e-06
|
| 1102 |
+
[2026-04-25 20:09:13] Epoch 1 | Step 8930 | Loss: 1.1410 | LR: 5.00e-06
|
| 1103 |
+
[2026-04-25 20:09:16] Epoch 1 | Step 8940 | Loss: 1.1410 | LR: 5.00e-06
|
| 1104 |
+
[2026-04-25 20:09:18] Epoch 1 | Step 8950 | Loss: 1.1410 | LR: 5.00e-06
|
| 1105 |
+
[2026-04-25 20:09:21] Epoch 1 | Step 8960 | Loss: 1.1410 | LR: 5.00e-06
|
| 1106 |
+
[2026-04-25 20:09:23] Epoch 1 | Step 8970 | Loss: 1.1409 | LR: 5.00e-06
|
| 1107 |
+
[2026-04-25 20:09:26] Epoch 1 | Step 8980 | Loss: 1.1408 | LR: 5.00e-06
|
| 1108 |
+
[2026-04-25 20:09:28] Epoch 1 | Step 8990 | Loss: 1.1406 | LR: 5.00e-06
|
| 1109 |
+
[2026-04-25 20:09:31] Epoch 1 | Step 9000 | Loss: 1.1406 | LR: 5.00e-06
|
| 1110 |
+
[2026-04-25 20:09:34] Epoch 1 | Step 9010 | Loss: 1.1406 | LR: 5.00e-06
|
| 1111 |
+
[2026-04-25 20:09:36] Epoch 1 | Step 9020 | Loss: 1.1407 | LR: 5.00e-06
|
| 1112 |
+
[2026-04-25 20:09:39] Epoch 1 | Step 9030 | Loss: 1.1406 | LR: 5.00e-06
|
| 1113 |
+
[2026-04-25 20:09:41] Epoch 1 | Step 9040 | Loss: 1.1406 | LR: 5.00e-06
|
| 1114 |
+
[2026-04-25 20:09:44] Epoch 1 | Step 9050 | Loss: 1.1404 | LR: 5.00e-06
|
| 1115 |
+
[2026-04-25 20:09:46] Epoch 1 | Step 9060 | Loss: 1.1405 | LR: 5.00e-06
|
| 1116 |
+
[2026-04-25 20:09:49] Epoch 1 | Step 9070 | Loss: 1.1403 | LR: 5.00e-06
|
| 1117 |
+
[2026-04-25 20:09:51] Epoch 1 | Step 9080 | Loss: 1.1403 | LR: 5.00e-06
|
| 1118 |
+
[2026-04-25 20:09:53] Epoch 1 | Step 9090 | Loss: 1.1402 | LR: 5.00e-06
|
| 1119 |
+
[2026-04-25 20:09:56] Epoch 1 | Step 9100 | Loss: 1.1402 | LR: 5.00e-06
|
| 1120 |
+
[2026-04-25 20:09:59] Epoch 1 | Step 9110 | Loss: 1.1402 | LR: 5.00e-06
|
| 1121 |
+
[2026-04-25 20:10:01] Epoch 1 | Step 9120 | Loss: 1.1402 | LR: 5.00e-06
|
| 1122 |
+
[2026-04-25 20:10:03] Epoch 1 | Step 9130 | Loss: 1.1401 | LR: 5.00e-06
|
| 1123 |
+
[2026-04-25 20:10:06] Epoch 1 | Step 9140 | Loss: 1.1400 | LR: 5.00e-06
|
| 1124 |
+
[2026-04-25 20:10:08] Epoch 1 | Step 9150 | Loss: 1.1401 | LR: 5.00e-06
|
| 1125 |
+
[2026-04-25 20:10:11] Epoch 1 | Step 9160 | Loss: 1.1400 | LR: 5.00e-06
|
| 1126 |
+
[2026-04-25 20:10:13] Epoch 1 | Step 9170 | Loss: 1.1397 | LR: 5.00e-06
|
| 1127 |
+
[2026-04-25 20:10:16] Epoch 1 | Step 9180 | Loss: 1.1396 | LR: 5.00e-06
|
| 1128 |
+
[2026-04-25 20:10:19] Epoch 1 | Step 9190 | Loss: 1.1393 | LR: 5.00e-06
|
| 1129 |
+
[2026-04-25 20:10:21] Epoch 1 | Step 9200 | Loss: 1.1393 | LR: 5.00e-06
|
| 1130 |
+
[2026-04-25 20:10:24] Epoch 1 | Step 9210 | Loss: 1.1393 | LR: 5.00e-06
|
| 1131 |
+
[2026-04-25 20:10:26] Epoch 1 | Step 9220 | Loss: 1.1392 | LR: 5.00e-06
|
| 1132 |
+
[2026-04-25 20:10:29] Epoch 1 | Step 9230 | Loss: 1.1391 | LR: 5.00e-06
|
| 1133 |
+
[2026-04-25 20:10:31] Epoch 1 | Step 9240 | Loss: 1.1389 | LR: 5.00e-06
|
| 1134 |
+
[2026-04-25 20:10:34] Epoch 1 | Step 9250 | Loss: 1.1388 | LR: 5.00e-06
|
| 1135 |
+
[2026-04-25 20:10:37] Epoch 1 | Step 9260 | Loss: 1.1386 | LR: 5.00e-06
|
| 1136 |
+
[2026-04-25 20:10:39] Epoch 1 | Step 9270 | Loss: 1.1385 | LR: 5.00e-06
|
| 1137 |
+
[2026-04-25 20:10:42] Epoch 1 | Step 9280 | Loss: 1.1385 | LR: 5.00e-06
|
| 1138 |
+
[2026-04-25 20:10:44] Epoch 1 | Step 9290 | Loss: 1.1384 | LR: 5.00e-06
|
| 1139 |
+
[2026-04-25 20:10:47] Epoch 1 | Step 9300 | Loss: 1.1384 | LR: 5.00e-06
|
| 1140 |
+
[2026-04-25 20:10:49] Epoch 1 | Step 9310 | Loss: 1.1383 | LR: 5.00e-06
|
| 1141 |
+
[2026-04-25 20:10:52] Epoch 1 | Step 9320 | Loss: 1.1382 | LR: 5.00e-06
|
| 1142 |
+
[2026-04-25 20:10:54] Epoch 1 | Step 9330 | Loss: 1.1381 | LR: 5.00e-06
|
| 1143 |
+
[2026-04-25 20:10:57] Epoch 1 | Step 9340 | Loss: 1.1380 | LR: 5.00e-06
|
| 1144 |
+
[2026-04-25 20:10:59] Epoch 1 | Step 9350 | Loss: 1.1379 | LR: 5.00e-06
|
| 1145 |
+
[2026-04-25 20:11:02] Epoch 1 | Step 9360 | Loss: 1.1378 | LR: 5.00e-06
|
| 1146 |
+
[2026-04-25 20:11:05] Epoch 1 | Step 9370 | Loss: 1.1378 | LR: 5.00e-06
|
| 1147 |
+
[2026-04-25 20:11:07] Epoch 1 | Step 9380 | Loss: 1.1378 | LR: 5.00e-06
|
| 1148 |
+
[2026-04-25 20:11:10] Epoch 1 | Step 9390 | Loss: 1.1375 | LR: 5.00e-06
|
| 1149 |
+
[2026-04-25 20:11:12] Epoch 1 | Step 9400 | Loss: 1.1376 | LR: 5.00e-06
|
| 1150 |
+
[2026-04-25 20:11:15] Epoch 1 | Step 9410 | Loss: 1.1376 | LR: 5.00e-06
|
| 1151 |
+
[2026-04-25 20:11:17] Epoch 1 | Step 9420 | Loss: 1.1377 | LR: 5.00e-06
|
| 1152 |
+
[2026-04-25 20:11:20] Epoch 1 | Step 9430 | Loss: 1.1377 | LR: 5.00e-06
|
| 1153 |
+
[2026-04-25 20:11:22] Epoch 1 | Step 9440 | Loss: 1.1376 | LR: 5.00e-06
|
| 1154 |
+
[2026-04-25 20:11:25] Epoch 1 | Step 9450 | Loss: 1.1377 | LR: 5.00e-06
|
| 1155 |
+
[2026-04-25 20:11:27] Epoch 1 | Step 9460 | Loss: 1.1375 | LR: 5.00e-06
|
| 1156 |
+
[2026-04-25 20:11:30] Epoch 1 | Step 9470 | Loss: 1.1373 | LR: 5.00e-06
|
| 1157 |
+
[2026-04-25 20:11:33] Epoch 1 | Step 9480 | Loss: 1.1371 | LR: 5.00e-06
|
| 1158 |
+
[2026-04-25 20:11:35] Epoch 1 | Step 9490 | Loss: 1.1371 | LR: 5.00e-06
|
| 1159 |
+
[2026-04-25 20:11:37] Epoch 1 | Step 9500 | Loss: 1.1370 | LR: 5.00e-06
|
| 1160 |
+
[2026-04-25 20:11:40] Epoch 1 | Step 9510 | Loss: 1.1370 | LR: 5.00e-06
|
| 1161 |
+
[2026-04-25 20:11:43] Epoch 1 | Step 9520 | Loss: 1.1369 | LR: 5.00e-06
|
| 1162 |
+
[2026-04-25 20:11:45] Epoch 1 | Step 9530 | Loss: 1.1369 | LR: 5.00e-06
|
| 1163 |
+
[2026-04-25 20:11:48] Epoch 1 | Step 9540 | Loss: 1.1367 | LR: 5.00e-06
|
| 1164 |
+
[2026-04-25 20:11:50] Epoch 1 | Step 9550 | Loss: 1.1367 | LR: 5.00e-06
|
| 1165 |
+
[2026-04-25 20:11:53] Epoch 1 | Step 9560 | Loss: 1.1367 | LR: 5.00e-06
|
| 1166 |
+
[2026-04-25 20:11:55] Epoch 1 | Step 9570 | Loss: 1.1367 | LR: 5.00e-06
|
| 1167 |
+
[2026-04-25 20:11:58] Epoch 1 | Step 9580 | Loss: 1.1368 | LR: 5.00e-06
|
| 1168 |
+
[2026-04-25 20:12:00] Epoch 1 | Step 9590 | Loss: 1.1367 | LR: 5.00e-06
|
| 1169 |
+
[2026-04-25 20:12:03] Epoch 1 | Step 9600 | Loss: 1.1366 | LR: 5.00e-06
|
| 1170 |
+
[2026-04-25 20:12:06] Epoch 1 | Step 9610 | Loss: 1.1365 | LR: 5.00e-06
|
| 1171 |
+
[2026-04-25 20:12:09] Epoch 1 | Step 9620 | Loss: 1.1365 | LR: 5.00e-06
|
| 1172 |
+
[2026-04-25 20:12:11] Epoch 1 | Step 9630 | Loss: 1.1366 | LR: 5.00e-06
|
| 1173 |
+
[2026-04-25 20:12:14] Epoch 1 | Step 9640 | Loss: 1.1365 | LR: 5.00e-06
|
| 1174 |
+
[2026-04-25 20:12:17] Epoch 1 | Step 9650 | Loss: 1.1365 | LR: 5.00e-06
|
| 1175 |
+
[2026-04-25 20:12:20] Epoch 1 | Step 9660 | Loss: 1.1364 | LR: 5.00e-06
|
| 1176 |
+
[2026-04-25 20:12:22] Epoch 1 | Step 9670 | Loss: 1.1364 | LR: 5.00e-06
|
| 1177 |
+
[2026-04-25 20:12:25] Epoch 1 | Step 9680 | Loss: 1.1364 | LR: 5.00e-06
|
| 1178 |
+
[2026-04-25 20:12:27] Epoch 1 | Step 9690 | Loss: 1.1363 | LR: 5.00e-06
|
| 1179 |
+
[2026-04-25 20:12:30] Epoch 1 | Step 9700 | Loss: 1.1363 | LR: 5.00e-06
|
| 1180 |
+
[2026-04-25 20:12:32] Epoch 1 | Step 9710 | Loss: 1.1362 | LR: 5.00e-06
|
| 1181 |
+
[2026-04-25 20:12:35] Epoch 1 | Step 9720 | Loss: 1.1362 | LR: 5.00e-06
|
| 1182 |
+
[2026-04-25 20:12:37] Epoch 1 | Step 9730 | Loss: 1.1362 | LR: 5.00e-06
|
| 1183 |
+
[2026-04-25 20:12:40] Epoch 1 | Step 9740 | Loss: 1.1361 | LR: 5.00e-06
|
| 1184 |
+
[2026-04-25 20:12:42] Epoch 1 | Step 9750 | Loss: 1.1360 | LR: 5.00e-06
|
| 1185 |
+
[2026-04-25 20:12:45] Epoch 1 | Step 9760 | Loss: 1.1359 | LR: 5.00e-06
|
| 1186 |
+
[2026-04-25 20:12:47] Epoch 1 | Step 9770 | Loss: 1.1359 | LR: 5.00e-06
|
| 1187 |
+
[2026-04-25 20:12:50] Epoch 1 | Step 9780 | Loss: 1.1358 | LR: 5.00e-06
|
| 1188 |
+
[2026-04-25 20:12:52] Epoch 1 | Step 9790 | Loss: 1.1357 | LR: 5.00e-06
|
| 1189 |
+
[2026-04-25 20:12:55] Epoch 1 | Step 9800 | Loss: 1.1357 | LR: 5.00e-06
|
| 1190 |
+
[2026-04-25 20:12:58] Epoch 1 | Step 9810 | Loss: 1.1355 | LR: 5.00e-06
|
| 1191 |
+
[2026-04-25 20:13:00] Epoch 1 | Step 9820 | Loss: 1.1354 | LR: 5.00e-06
|
| 1192 |
+
[2026-04-25 20:13:03] Epoch 1 | Step 9830 | Loss: 1.1354 | LR: 5.00e-06
|
| 1193 |
+
[2026-04-25 20:13:06] Epoch 1 | Step 9840 | Loss: 1.1355 | LR: 5.00e-06
|
| 1194 |
+
[2026-04-25 20:13:08] Epoch 1 | Step 9850 | Loss: 1.1354 | LR: 5.00e-06
|
| 1195 |
+
[2026-04-25 20:13:11] Epoch 1 | Step 9860 | Loss: 1.1352 | LR: 5.00e-06
|
| 1196 |
+
[2026-04-25 20:13:13] Epoch 1 | Step 9870 | Loss: 1.1353 | LR: 5.00e-06
|
| 1197 |
+
[2026-04-25 20:13:16] Epoch 1 | Step 9880 | Loss: 1.1353 | LR: 5.00e-06
|
| 1198 |
+
[2026-04-25 20:13:18] Epoch 1 completed in 2546.41s | Loss: 1.1353
|
| 1199 |
+
[2026-04-25 20:13:18]
|
| 1200 |
+
Training completed!
|
| 1201 |
+
[2026-04-25 20:13:20] Final model: /workspace/byte-llms-code/outputs/lr_sweep/pythia_1b_lr_5e-5/model_final.pt
|