diff --git "a/i2v_motion/log.txt" "b/i2v_motion/log.txt" new file mode 100644--- /dev/null +++ "b/i2v_motion/log.txt" @@ -0,0 +1,5861 @@ +[2024-06-08 18:16:59,280] INFO: {'__name__': 'Config: VideoLDM Decoder', 'mean': [0.5, 0.5, 0.5], 'std': [0.5, 0.5, 0.5], 'max_words': 1000, 'num_workers': 16, 'prefetch_factor': 2, 'resolution': [256, 256], 'vit_out_dim': 1024, 'vit_resolution': [224, 224], 'depth_clamp': 10.0, 'misc_size': 384, 'depth_std': 20.0, 'frame_lens': [24], 'sample_fps': [8], 'vid_dataset': {'type': 'ObjDataset', 'data_list': './Obj/data/6.8_with_motion/train.txt', 'max_words': 1000, 'resolution': [256, 256], 'vit_resolution': [224, 224], 'get_first_frame': True}, 'img_dataset': {'type': 'ImageBaseDataset', 'data_list': ['laion_400m'], 'max_words': 1000, 'resolution': [448, 256]}, 'batch_sizes': {'1': 256, '4': 4, '8': 4, '16': 4, '24': 20}, 'Diffusion': {'type': 'DiffusionDDIM', 'schedule': 'cosine', 'schedule_param': {'num_timesteps': 1000, 'cosine_s': 0.008, 'zero_terminal_snr': True}, 'mean_type': 'v', 'loss_type': 'mse', 'var_type': 'fixed_small', 'rescale_timesteps': False, 'noise_strength': 0.1, 'ddim_timesteps': 50, 'with_latent_motion_loss': False}, 'ddim_timesteps': 50, 'use_div_loss': False, 'p_zero': 0.0, 'guide_scale': 3.0, 'vit_mean': [0.48145466, 0.4578275, 0.40821073], 'vit_std': [0.26862954, 0.26130258, 0.27577711], 'scale_factor': 0.18215, 'use_checkpoint': True, 'use_sharded_ddp': False, 'use_fsdp': False, 'use_fp16': True, 'temporal_attention': True, 'UNet': {'type': 'UNetSD_I2VGen', 'in_dim': 4, 'dim': 320, 'y_dim': 1024, 'context_dim': 1024, 'out_dim': 4, 'dim_mult': [1, 2, 4, 4], 'num_heads': 8, 'head_dim': 64, 'num_res_blocks': 2, 'attn_scales': [1.0, 0.5, 0.25], 'dropout': 0.1, 'temporal_attention': True, 'temporal_attn_times': 1, 'use_checkpoint': True, 'use_fps_condition': False, 'use_sim_mask': False, 'upper_len': 128, 'concat_dim': 4, 'default_fps': 8, 'use_camera_condition': False, 'use_lgm_refine': False, 'with_motion': True, 'with_latent_motion': False}, 'guidances': [], 'auto_encoder': {'type': 'AutoencoderKL', 'ddconfig': {'double_z': True, 'z_channels': 4, 'resolution': 256, 'in_channels': 3, 'out_ch': 3, 'ch': 128, 'ch_mult': [1, 2, 4, 4], 'num_res_blocks': 2, 'attn_resolutions': [], 'dropout': 0.0, 'video_kernel_size': [3, 1, 1]}, 'embed_dim': 4, 'pretrained': './pretrained_models/modelscope_t2v/VQGAN_autoencoder.pth'}, 'embedder': {'type': 'FrozenOpenCLIPTtxtVisualEmbedder', 'layer': 'penultimate', 'pretrained': 'pretrained_models/modelscope_i2v/I2VGen-XL/open_clip_pytorch_model.bin', 'vit_resolution': [224, 224]}, 'ema_decay': 0.9999, 'num_steps': 1000000, 'lr': 3e-05, 'weight_decay': 0.0, 'betas': (0.9, 0.999), 'eps': 1e-08, 'chunk_size': 4, 'decoder_bs': 4, 'alpha': 0.7, 'save_ckp_interval': 500, 'warmup_steps': 10, 'decay_mode': 'cosine', 'use_ema': True, 'load_from': None, 'Pretrain': {'type': 'pretrain_specific_strategies', 'fix_weight': False, 'grad_scale': 0.5, 'resume_checkpoint': './pretrained_models/i2v_882000.pth', 'sd_keys_path': 'pretrained_models/modelscope_i2v/I2VGen-XL/stable_diffusion_image_key_temporal_attention_x1.json'}, 'viz_interval': 500, 'visual_train': {'type': 'VisualTrainTextImageToVideo', 'partial_keys': [['y', 'image', 'local_image', 'fps', 'camera_data', 'gs_data']], 'use_offset_noise': True, 'guide_scale': 6.0}, 'visual_inference': {'type': 'VisualGeneratedVideos'}, 'inference_list_path': '', 'log_interval': 1, 'log_dir': 'workspace/i2v_motion/obj_train_motion', 'seed': 6666, 'negative_prompt': 'static, 3d asset, Distorted, discontinuous, Ugly, blurry, low resolution, disfigured, disconnected limbs, Ugly faces, incomplete arms', 'with_motion': True, 'motion_magnitude': None, 'with_latent_motion': False, 'latent_motion_magnitude': None, 'ENABLE': True, 'DATASET': 'webvid10m', 'TASK_TYPE': 'train_i2v_entrance', 'noise_strength': 0.1, 'use_zero_infer': True, 'cfg_file': './configs/obj_train_motion.yaml', 'init_method': 'tcp://localhost:9999', 'debug': False, 'opts': [], 'pmi_rank': 0, 'pmi_world_size': 1, 'gpus_per_machine': 4, 'world_size': 4, 'gpu': 0, 'rank': 0, 'log_file': 'workspace/i2v_motion/obj_train_motion/log.txt'} +[2024-06-08 18:16:59,282] INFO: Save all the file in to dir workspace/i2v_motion/obj_train_motion +[2024-06-08 18:16:59,282] INFO: Going into i2v_img_fullid_vidcom function on 0 gpu +[2024-06-08 18:16:59,292] INFO: Current worker with max_frames=24, batch_size=20, sample_fps=8 +[2024-06-08 18:18:04,658] INFO: Loaded ViT-H-14 model config. +[2024-06-08 18:18:58,387] INFO: Loading pretrained ViT-H-14 weights (pretrained_models/modelscope_i2v/I2VGen-XL/open_clip_pytorch_model.bin). +[2024-06-08 18:19:35,489] INFO: Restored from ./pretrained_models/modelscope_t2v/VQGAN_autoencoder.pth +[2024-06-08 18:19:55,218] INFO: load a fixed model with _IncompatibleKeys(missing_keys=['motion_embedding.0.weight', 'motion_embedding.0.bias', 'motion_embedding.2.weight', 'motion_embedding.2.bias'], unexpected_keys=['lgm_big.unet.conv_in.weight', 'lgm_big.unet.conv_in.bias', 'lgm_big.unet.down_blocks.0.nets.0.norm1.weight', 'lgm_big.unet.down_blocks.0.nets.0.norm1.bias', 'lgm_big.unet.down_blocks.0.nets.0.conv1.weight', 'lgm_big.unet.down_blocks.0.nets.0.conv1.bias', 'lgm_big.unet.down_blocks.0.nets.0.norm2.weight', 'lgm_big.unet.down_blocks.0.nets.0.norm2.bias', 'lgm_big.unet.down_blocks.0.nets.0.conv2.weight', 'lgm_big.unet.down_blocks.0.nets.0.conv2.bias', 'lgm_big.unet.down_blocks.0.nets.1.norm1.weight', 'lgm_big.unet.down_blocks.0.nets.1.norm1.bias', 'lgm_big.unet.down_blocks.0.nets.1.conv1.weight', 'lgm_big.unet.down_blocks.0.nets.1.conv1.bias', 'lgm_big.unet.down_blocks.0.nets.1.norm2.weight', 'lgm_big.unet.down_blocks.0.nets.1.norm2.bias', 'lgm_big.unet.down_blocks.0.nets.1.conv2.weight', 'lgm_big.unet.down_blocks.0.nets.1.conv2.bias', 'lgm_big.unet.down_blocks.0.downsample.weight', 'lgm_big.unet.down_blocks.0.downsample.bias', 'lgm_big.unet.down_blocks.1.nets.0.norm1.weight', 'lgm_big.unet.down_blocks.1.nets.0.norm1.bias', 'lgm_big.unet.down_blocks.1.nets.0.conv1.weight', 'lgm_big.unet.down_blocks.1.nets.0.conv1.bias', 'lgm_big.unet.down_blocks.1.nets.0.norm2.weight', 'lgm_big.unet.down_blocks.1.nets.0.norm2.bias', 'lgm_big.unet.down_blocks.1.nets.0.conv2.weight', 'lgm_big.unet.down_blocks.1.nets.0.conv2.bias', 'lgm_big.unet.down_blocks.1.nets.0.shortcut.weight', 'lgm_big.unet.down_blocks.1.nets.0.shortcut.bias', 'lgm_big.unet.down_blocks.1.nets.1.norm1.weight', 'lgm_big.unet.down_blocks.1.nets.1.norm1.bias', 'lgm_big.unet.down_blocks.1.nets.1.conv1.weight', 'lgm_big.unet.down_blocks.1.nets.1.conv1.bias', 'lgm_big.unet.down_blocks.1.nets.1.norm2.weight', 'lgm_big.unet.down_blocks.1.nets.1.norm2.bias', 'lgm_big.unet.down_blocks.1.nets.1.conv2.weight', 'lgm_big.unet.down_blocks.1.nets.1.conv2.bias', 'lgm_big.unet.down_blocks.1.downsample.weight', 'lgm_big.unet.down_blocks.1.downsample.bias', 'lgm_big.unet.down_blocks.2.nets.0.norm1.weight', 'lgm_big.unet.down_blocks.2.nets.0.norm1.bias', 'lgm_big.unet.down_blocks.2.nets.0.conv1.weight', 'lgm_big.unet.down_blocks.2.nets.0.conv1.bias', 'lgm_big.unet.down_blocks.2.nets.0.norm2.weight', 'lgm_big.unet.down_blocks.2.nets.0.norm2.bias', 'lgm_big.unet.down_blocks.2.nets.0.conv2.weight', 'lgm_big.unet.down_blocks.2.nets.0.conv2.bias', 'lgm_big.unet.down_blocks.2.nets.0.shortcut.weight', 'lgm_big.unet.down_blocks.2.nets.0.shortcut.bias', 'lgm_big.unet.down_blocks.2.nets.1.norm1.weight', 'lgm_big.unet.down_blocks.2.nets.1.norm1.bias', 'lgm_big.unet.down_blocks.2.nets.1.conv1.weight', 'lgm_big.unet.down_blocks.2.nets.1.conv1.bias', 'lgm_big.unet.down_blocks.2.nets.1.norm2.weight', 'lgm_big.unet.down_blocks.2.nets.1.norm2.bias', 'lgm_big.unet.down_blocks.2.nets.1.conv2.weight', 'lgm_big.unet.down_blocks.2.nets.1.conv2.bias', 'lgm_big.unet.down_blocks.2.downsample.weight', 'lgm_big.unet.down_blocks.2.downsample.bias', 'lgm_big.unet.down_blocks.3.nets.0.norm1.weight', 'lgm_big.unet.down_blocks.3.nets.0.norm1.bias', 'lgm_big.unet.down_blocks.3.nets.0.conv1.weight', 'lgm_big.unet.down_blocks.3.nets.0.conv1.bias', 'lgm_big.unet.down_blocks.3.nets.0.norm2.weight', 'lgm_big.unet.down_blocks.3.nets.0.norm2.bias', 'lgm_big.unet.down_blocks.3.nets.0.conv2.weight', 'lgm_big.unet.down_blocks.3.nets.0.conv2.bias', 'lgm_big.unet.down_blocks.3.nets.0.shortcut.weight', 'lgm_big.unet.down_blocks.3.nets.0.shortcut.bias', 'lgm_big.unet.down_blocks.3.nets.1.norm1.weight', 'lgm_big.unet.down_blocks.3.nets.1.norm1.bias', 'lgm_big.unet.down_blocks.3.nets.1.conv1.weight', 'lgm_big.unet.down_blocks.3.nets.1.conv1.bias', 'lgm_big.unet.down_blocks.3.nets.1.norm2.weight', 'lgm_big.unet.down_blocks.3.nets.1.norm2.bias', 'lgm_big.unet.down_blocks.3.nets.1.conv2.weight', 'lgm_big.unet.down_blocks.3.nets.1.conv2.bias', 'lgm_big.unet.down_blocks.3.attns.0.norm.weight', 'lgm_big.unet.down_blocks.3.attns.0.norm.bias', 'lgm_big.unet.down_blocks.3.attns.0.attn.qkv.weight', 'lgm_big.unet.down_blocks.3.attns.0.attn.proj.weight', 'lgm_big.unet.down_blocks.3.attns.0.attn.proj.bias', 'lgm_big.unet.down_blocks.3.attns.1.norm.weight', 'lgm_big.unet.down_blocks.3.attns.1.norm.bias', 'lgm_big.unet.down_blocks.3.attns.1.attn.qkv.weight', 'lgm_big.unet.down_blocks.3.attns.1.attn.proj.weight', 'lgm_big.unet.down_blocks.3.attns.1.attn.proj.bias', 'lgm_big.unet.down_blocks.3.downsample.weight', 'lgm_big.unet.down_blocks.3.downsample.bias', 'lgm_big.unet.down_blocks.4.nets.0.norm1.weight', 'lgm_big.unet.down_blocks.4.nets.0.norm1.bias', 'lgm_big.unet.down_blocks.4.nets.0.conv1.weight', 'lgm_big.unet.down_blocks.4.nets.0.conv1.bias', 'lgm_big.unet.down_blocks.4.nets.0.norm2.weight', 'lgm_big.unet.down_blocks.4.nets.0.norm2.bias', 'lgm_big.unet.down_blocks.4.nets.0.conv2.weight', 'lgm_big.unet.down_blocks.4.nets.0.conv2.bias', 'lgm_big.unet.down_blocks.4.nets.0.shortcut.weight', 'lgm_big.unet.down_blocks.4.nets.0.shortcut.bias', 'lgm_big.unet.down_blocks.4.nets.1.norm1.weight', 'lgm_big.unet.down_blocks.4.nets.1.norm1.bias', 'lgm_big.unet.down_blocks.4.nets.1.conv1.weight', 'lgm_big.unet.down_blocks.4.nets.1.conv1.bias', 'lgm_big.unet.down_blocks.4.nets.1.norm2.weight', 'lgm_big.unet.down_blocks.4.nets.1.norm2.bias', 'lgm_big.unet.down_blocks.4.nets.1.conv2.weight', 'lgm_big.unet.down_blocks.4.nets.1.conv2.bias', 'lgm_big.unet.down_blocks.4.attns.0.norm.weight', 'lgm_big.unet.down_blocks.4.attns.0.norm.bias', 'lgm_big.unet.down_blocks.4.attns.0.attn.qkv.weight', 'lgm_big.unet.down_blocks.4.attns.0.attn.proj.weight', 'lgm_big.unet.down_blocks.4.attns.0.attn.proj.bias', 'lgm_big.unet.down_blocks.4.attns.1.norm.weight', 'lgm_big.unet.down_blocks.4.attns.1.norm.bias', 'lgm_big.unet.down_blocks.4.attns.1.attn.qkv.weight', 'lgm_big.unet.down_blocks.4.attns.1.attn.proj.weight', 'lgm_big.unet.down_blocks.4.attns.1.attn.proj.bias', 'lgm_big.unet.down_blocks.4.downsample.weight', 'lgm_big.unet.down_blocks.4.downsample.bias', 'lgm_big.unet.down_blocks.5.nets.0.norm1.weight', 'lgm_big.unet.down_blocks.5.nets.0.norm1.bias', 'lgm_big.unet.down_blocks.5.nets.0.conv1.weight', 'lgm_big.unet.down_blocks.5.nets.0.conv1.bias', 'lgm_big.unet.down_blocks.5.nets.0.norm2.weight', 'lgm_big.unet.down_blocks.5.nets.0.norm2.bias', 'lgm_big.unet.down_blocks.5.nets.0.conv2.weight', 'lgm_big.unet.down_blocks.5.nets.0.conv2.bias', 'lgm_big.unet.down_blocks.5.nets.1.norm1.weight', 'lgm_big.unet.down_blocks.5.nets.1.norm1.bias', 'lgm_big.unet.down_blocks.5.nets.1.conv1.weight', 'lgm_big.unet.down_blocks.5.nets.1.conv1.bias', 'lgm_big.unet.down_blocks.5.nets.1.norm2.weight', 'lgm_big.unet.down_blocks.5.nets.1.norm2.bias', 'lgm_big.unet.down_blocks.5.nets.1.conv2.weight', 'lgm_big.unet.down_blocks.5.nets.1.conv2.bias', 'lgm_big.unet.down_blocks.5.attns.0.norm.weight', 'lgm_big.unet.down_blocks.5.attns.0.norm.bias', 'lgm_big.unet.down_blocks.5.attns.0.attn.qkv.weight', 'lgm_big.unet.down_blocks.5.attns.0.attn.proj.weight', 'lgm_big.unet.down_blocks.5.attns.0.attn.proj.bias', 'lgm_big.unet.down_blocks.5.attns.1.norm.weight', 'lgm_big.unet.down_blocks.5.attns.1.norm.bias', 'lgm_big.unet.down_blocks.5.attns.1.attn.qkv.weight', 'lgm_big.unet.down_blocks.5.attns.1.attn.proj.weight', 'lgm_big.unet.down_blocks.5.attns.1.attn.proj.bias', 'lgm_big.unet.mid_block.nets.0.norm1.weight', 'lgm_big.unet.mid_block.nets.0.norm1.bias', 'lgm_big.unet.mid_block.nets.0.conv1.weight', 'lgm_big.unet.mid_block.nets.0.conv1.bias', 'lgm_big.unet.mid_block.nets.0.norm2.weight', 'lgm_big.unet.mid_block.nets.0.norm2.bias', 'lgm_big.unet.mid_block.nets.0.conv2.weight', 'lgm_big.unet.mid_block.nets.0.conv2.bias', 'lgm_big.unet.mid_block.nets.1.norm1.weight', 'lgm_big.unet.mid_block.nets.1.norm1.bias', 'lgm_big.unet.mid_block.nets.1.conv1.weight', 'lgm_big.unet.mid_block.nets.1.conv1.bias', 'lgm_big.unet.mid_block.nets.1.norm2.weight', 'lgm_big.unet.mid_block.nets.1.norm2.bias', 'lgm_big.unet.mid_block.nets.1.conv2.weight', 'lgm_big.unet.mid_block.nets.1.conv2.bias', 'lgm_big.unet.mid_block.attns.0.norm.weight', 'lgm_big.unet.mid_block.attns.0.norm.bias', 'lgm_big.unet.mid_block.attns.0.attn.qkv.weight', 'lgm_big.unet.mid_block.attns.0.attn.proj.weight', 'lgm_big.unet.mid_block.attns.0.attn.proj.bias', 'lgm_big.unet.up_blocks.0.nets.0.norm1.weight', 'lgm_big.unet.up_blocks.0.nets.0.norm1.bias', 'lgm_big.unet.up_blocks.0.nets.0.conv1.weight', 'lgm_big.unet.up_blocks.0.nets.0.conv1.bias', 'lgm_big.unet.up_blocks.0.nets.0.norm2.weight', 'lgm_big.unet.up_blocks.0.nets.0.norm2.bias', 'lgm_big.unet.up_blocks.0.nets.0.conv2.weight', 'lgm_big.unet.up_blocks.0.nets.0.conv2.bias', 'lgm_big.unet.up_blocks.0.nets.0.shortcut.weight', 'lgm_big.unet.up_blocks.0.nets.0.shortcut.bias', 'lgm_big.unet.up_blocks.0.nets.1.norm1.weight', 'lgm_big.unet.up_blocks.0.nets.1.norm1.bias', 'lgm_big.unet.up_blocks.0.nets.1.conv1.weight', 'lgm_big.unet.up_blocks.0.nets.1.conv1.bias', 'lgm_big.unet.up_blocks.0.nets.1.norm2.weight', 'lgm_big.unet.up_blocks.0.nets.1.norm2.bias', 'lgm_big.unet.up_blocks.0.nets.1.conv2.weight', 'lgm_big.unet.up_blocks.0.nets.1.conv2.bias', 'lgm_big.unet.up_blocks.0.nets.1.shortcut.weight', 'lgm_big.unet.up_blocks.0.nets.1.shortcut.bias', 'lgm_big.unet.up_blocks.0.nets.2.norm1.weight', 'lgm_big.unet.up_blocks.0.nets.2.norm1.bias', 'lgm_big.unet.up_blocks.0.nets.2.conv1.weight', 'lgm_big.unet.up_blocks.0.nets.2.conv1.bias', 'lgm_big.unet.up_blocks.0.nets.2.norm2.weight', 'lgm_big.unet.up_blocks.0.nets.2.norm2.bias', 'lgm_big.unet.up_blocks.0.nets.2.conv2.weight', 'lgm_big.unet.up_blocks.0.nets.2.conv2.bias', 'lgm_big.unet.up_blocks.0.nets.2.shortcut.weight', 'lgm_big.unet.up_blocks.0.nets.2.shortcut.bias', 'lgm_big.unet.up_blocks.0.attns.0.norm.weight', 'lgm_big.unet.up_blocks.0.attns.0.norm.bias', 'lgm_big.unet.up_blocks.0.attns.0.attn.qkv.weight', 'lgm_big.unet.up_blocks.0.attns.0.attn.proj.weight', 'lgm_big.unet.up_blocks.0.attns.0.attn.proj.bias', 'lgm_big.unet.up_blocks.0.attns.1.norm.weight', 'lgm_big.unet.up_blocks.0.attns.1.norm.bias', 'lgm_big.unet.up_blocks.0.attns.1.attn.qkv.weight', 'lgm_big.unet.up_blocks.0.attns.1.attn.proj.weight', 'lgm_big.unet.up_blocks.0.attns.1.attn.proj.bias', 'lgm_big.unet.up_blocks.0.attns.2.norm.weight', 'lgm_big.unet.up_blocks.0.attns.2.norm.bias', 'lgm_big.unet.up_blocks.0.attns.2.attn.qkv.weight', 'lgm_big.unet.up_blocks.0.attns.2.attn.proj.weight', 'lgm_big.unet.up_blocks.0.attns.2.attn.proj.bias', 'lgm_big.unet.up_blocks.0.upsample.weight', 'lgm_big.unet.up_blocks.0.upsample.bias', 'lgm_big.unet.up_blocks.1.nets.0.norm1.weight', 'lgm_big.unet.up_blocks.1.nets.0.norm1.bias', 'lgm_big.unet.up_blocks.1.nets.0.conv1.weight', 'lgm_big.unet.up_blocks.1.nets.0.conv1.bias', 'lgm_big.unet.up_blocks.1.nets.0.norm2.weight', 'lgm_big.unet.up_blocks.1.nets.0.norm2.bias', 'lgm_big.unet.up_blocks.1.nets.0.conv2.weight', 'lgm_big.unet.up_blocks.1.nets.0.conv2.bias', 'lgm_big.unet.up_blocks.1.nets.0.shortcut.weight', 'lgm_big.unet.up_blocks.1.nets.0.shortcut.bias', 'lgm_big.unet.up_blocks.1.nets.1.norm1.weight', 'lgm_big.unet.up_blocks.1.nets.1.norm1.bias', 'lgm_big.unet.up_blocks.1.nets.1.conv1.weight', 'lgm_big.unet.up_blocks.1.nets.1.conv1.bias', 'lgm_big.unet.up_blocks.1.nets.1.norm2.weight', 'lgm_big.unet.up_blocks.1.nets.1.norm2.bias', 'lgm_big.unet.up_blocks.1.nets.1.conv2.weight', 'lgm_big.unet.up_blocks.1.nets.1.conv2.bias', 'lgm_big.unet.up_blocks.1.nets.1.shortcut.weight', 'lgm_big.unet.up_blocks.1.nets.1.shortcut.bias', 'lgm_big.unet.up_blocks.1.nets.2.norm1.weight', 'lgm_big.unet.up_blocks.1.nets.2.norm1.bias', 'lgm_big.unet.up_blocks.1.nets.2.conv1.weight', 'lgm_big.unet.up_blocks.1.nets.2.conv1.bias', 'lgm_big.unet.up_blocks.1.nets.2.norm2.weight', 'lgm_big.unet.up_blocks.1.nets.2.norm2.bias', 'lgm_big.unet.up_blocks.1.nets.2.conv2.weight', 'lgm_big.unet.up_blocks.1.nets.2.conv2.bias', 'lgm_big.unet.up_blocks.1.nets.2.shortcut.weight', 'lgm_big.unet.up_blocks.1.nets.2.shortcut.bias', 'lgm_big.unet.up_blocks.1.attns.0.norm.weight', 'lgm_big.unet.up_blocks.1.attns.0.norm.bias', 'lgm_big.unet.up_blocks.1.attns.0.attn.qkv.weight', 'lgm_big.unet.up_blocks.1.attns.0.attn.proj.weight', 'lgm_big.unet.up_blocks.1.attns.0.attn.proj.bias', 'lgm_big.unet.up_blocks.1.attns.1.norm.weight', 'lgm_big.unet.up_blocks.1.attns.1.norm.bias', 'lgm_big.unet.up_blocks.1.attns.1.attn.qkv.weight', 'lgm_big.unet.up_blocks.1.attns.1.attn.proj.weight', 'lgm_big.unet.up_blocks.1.attns.1.attn.proj.bias', 'lgm_big.unet.up_blocks.1.attns.2.norm.weight', 'lgm_big.unet.up_blocks.1.attns.2.norm.bias', 'lgm_big.unet.up_blocks.1.attns.2.attn.qkv.weight', 'lgm_big.unet.up_blocks.1.attns.2.attn.proj.weight', 'lgm_big.unet.up_blocks.1.attns.2.attn.proj.bias', 'lgm_big.unet.up_blocks.1.upsample.weight', 'lgm_big.unet.up_blocks.1.upsample.bias', 'lgm_big.unet.up_blocks.2.nets.0.norm1.weight', 'lgm_big.unet.up_blocks.2.nets.0.norm1.bias', 'lgm_big.unet.up_blocks.2.nets.0.conv1.weight', 'lgm_big.unet.up_blocks.2.nets.0.conv1.bias', 'lgm_big.unet.up_blocks.2.nets.0.norm2.weight', 'lgm_big.unet.up_blocks.2.nets.0.norm2.bias', 'lgm_big.unet.up_blocks.2.nets.0.conv2.weight', 'lgm_big.unet.up_blocks.2.nets.0.conv2.bias', 'lgm_big.unet.up_blocks.2.nets.0.shortcut.weight', 'lgm_big.unet.up_blocks.2.nets.0.shortcut.bias', 'lgm_big.unet.up_blocks.2.nets.1.norm1.weight', 'lgm_big.unet.up_blocks.2.nets.1.norm1.bias', 'lgm_big.unet.up_blocks.2.nets.1.conv1.weight', 'lgm_big.unet.up_blocks.2.nets.1.conv1.bias', 'lgm_big.unet.up_blocks.2.nets.1.norm2.weight', 'lgm_big.unet.up_blocks.2.nets.1.norm2.bias', 'lgm_big.unet.up_blocks.2.nets.1.conv2.weight', 'lgm_big.unet.up_blocks.2.nets.1.conv2.bias', 'lgm_big.unet.up_blocks.2.nets.1.shortcut.weight', 'lgm_big.unet.up_blocks.2.nets.1.shortcut.bias', 'lgm_big.unet.up_blocks.2.nets.2.norm1.weight', 'lgm_big.unet.up_blocks.2.nets.2.norm1.bias', 'lgm_big.unet.up_blocks.2.nets.2.conv1.weight', 'lgm_big.unet.up_blocks.2.nets.2.conv1.bias', 'lgm_big.unet.up_blocks.2.nets.2.norm2.weight', 'lgm_big.unet.up_blocks.2.nets.2.norm2.bias', 'lgm_big.unet.up_blocks.2.nets.2.conv2.weight', 'lgm_big.unet.up_blocks.2.nets.2.conv2.bias', 'lgm_big.unet.up_blocks.2.nets.2.shortcut.weight', 'lgm_big.unet.up_blocks.2.nets.2.shortcut.bias', 'lgm_big.unet.up_blocks.2.attns.0.norm.weight', 'lgm_big.unet.up_blocks.2.attns.0.norm.bias', 'lgm_big.unet.up_blocks.2.attns.0.attn.qkv.weight', 'lgm_big.unet.up_blocks.2.attns.0.attn.proj.weight', 'lgm_big.unet.up_blocks.2.attns.0.attn.proj.bias', 'lgm_big.unet.up_blocks.2.attns.1.norm.weight', 'lgm_big.unet.up_blocks.2.attns.1.norm.bias', 'lgm_big.unet.up_blocks.2.attns.1.attn.qkv.weight', 'lgm_big.unet.up_blocks.2.attns.1.attn.proj.weight', 'lgm_big.unet.up_blocks.2.attns.1.attn.proj.bias', 'lgm_big.unet.up_blocks.2.attns.2.norm.weight', 'lgm_big.unet.up_blocks.2.attns.2.norm.bias', 'lgm_big.unet.up_blocks.2.attns.2.attn.qkv.weight', 'lgm_big.unet.up_blocks.2.attns.2.attn.proj.weight', 'lgm_big.unet.up_blocks.2.attns.2.attn.proj.bias', 'lgm_big.unet.up_blocks.2.upsample.weight', 'lgm_big.unet.up_blocks.2.upsample.bias', 'lgm_big.unet.up_blocks.3.nets.0.norm1.weight', 'lgm_big.unet.up_blocks.3.nets.0.norm1.bias', 'lgm_big.unet.up_blocks.3.nets.0.conv1.weight', 'lgm_big.unet.up_blocks.3.nets.0.conv1.bias', 'lgm_big.unet.up_blocks.3.nets.0.norm2.weight', 'lgm_big.unet.up_blocks.3.nets.0.norm2.bias', 'lgm_big.unet.up_blocks.3.nets.0.conv2.weight', 'lgm_big.unet.up_blocks.3.nets.0.conv2.bias', 'lgm_big.unet.up_blocks.3.nets.0.shortcut.weight', 'lgm_big.unet.up_blocks.3.nets.0.shortcut.bias', 'lgm_big.unet.up_blocks.3.nets.1.norm1.weight', 'lgm_big.unet.up_blocks.3.nets.1.norm1.bias', 'lgm_big.unet.up_blocks.3.nets.1.conv1.weight', 'lgm_big.unet.up_blocks.3.nets.1.conv1.bias', 'lgm_big.unet.up_blocks.3.nets.1.norm2.weight', 'lgm_big.unet.up_blocks.3.nets.1.norm2.bias', 'lgm_big.unet.up_blocks.3.nets.1.conv2.weight', 'lgm_big.unet.up_blocks.3.nets.1.conv2.bias', 'lgm_big.unet.up_blocks.3.nets.1.shortcut.weight', 'lgm_big.unet.up_blocks.3.nets.1.shortcut.bias', 'lgm_big.unet.up_blocks.3.nets.2.norm1.weight', 'lgm_big.unet.up_blocks.3.nets.2.norm1.bias', 'lgm_big.unet.up_blocks.3.nets.2.conv1.weight', 'lgm_big.unet.up_blocks.3.nets.2.conv1.bias', 'lgm_big.unet.up_blocks.3.nets.2.norm2.weight', 'lgm_big.unet.up_blocks.3.nets.2.norm2.bias', 'lgm_big.unet.up_blocks.3.nets.2.conv2.weight', 'lgm_big.unet.up_blocks.3.nets.2.conv2.bias', 'lgm_big.unet.up_blocks.3.nets.2.shortcut.weight', 'lgm_big.unet.up_blocks.3.nets.2.shortcut.bias', 'lgm_big.unet.up_blocks.3.upsample.weight', 'lgm_big.unet.up_blocks.3.upsample.bias', 'lgm_big.unet.up_blocks.4.nets.0.norm1.weight', 'lgm_big.unet.up_blocks.4.nets.0.norm1.bias', 'lgm_big.unet.up_blocks.4.nets.0.conv1.weight', 'lgm_big.unet.up_blocks.4.nets.0.conv1.bias', 'lgm_big.unet.up_blocks.4.nets.0.norm2.weight', 'lgm_big.unet.up_blocks.4.nets.0.norm2.bias', 'lgm_big.unet.up_blocks.4.nets.0.conv2.weight', 'lgm_big.unet.up_blocks.4.nets.0.conv2.bias', 'lgm_big.unet.up_blocks.4.nets.0.shortcut.weight', 'lgm_big.unet.up_blocks.4.nets.0.shortcut.bias', 'lgm_big.unet.up_blocks.4.nets.1.norm1.weight', 'lgm_big.unet.up_blocks.4.nets.1.norm1.bias', 'lgm_big.unet.up_blocks.4.nets.1.conv1.weight', 'lgm_big.unet.up_blocks.4.nets.1.conv1.bias', 'lgm_big.unet.up_blocks.4.nets.1.norm2.weight', 'lgm_big.unet.up_blocks.4.nets.1.norm2.bias', 'lgm_big.unet.up_blocks.4.nets.1.conv2.weight', 'lgm_big.unet.up_blocks.4.nets.1.conv2.bias', 'lgm_big.unet.up_blocks.4.nets.1.shortcut.weight', 'lgm_big.unet.up_blocks.4.nets.1.shortcut.bias', 'lgm_big.unet.up_blocks.4.nets.2.norm1.weight', 'lgm_big.unet.up_blocks.4.nets.2.norm1.bias', 'lgm_big.unet.up_blocks.4.nets.2.conv1.weight', 'lgm_big.unet.up_blocks.4.nets.2.conv1.bias', 'lgm_big.unet.up_blocks.4.nets.2.norm2.weight', 'lgm_big.unet.up_blocks.4.nets.2.norm2.bias', 'lgm_big.unet.up_blocks.4.nets.2.conv2.weight', 'lgm_big.unet.up_blocks.4.nets.2.conv2.bias', 'lgm_big.unet.up_blocks.4.nets.2.shortcut.weight', 'lgm_big.unet.up_blocks.4.nets.2.shortcut.bias', 'lgm_big.unet.norm_out.weight', 'lgm_big.unet.norm_out.bias', 'lgm_big.unet.conv_out.weight', 'lgm_big.unet.conv_out.bias', 'lgm_big.conv.weight', 'lgm_big.conv.bias', 'camera_embedding.0.weight', 'camera_embedding.0.bias', 'camera_embedding.2.weight', 'camera_embedding.2.bias']) +[2024-06-08 18:19:55,462] INFO: Successfully load step 882000 model from ./pretrained_models/i2v_882000.pth +[2024-06-08 18:19:55,463] INFO: load a fixed model with 823M parameters +[2024-06-08 18:21:19,331] INFO: Step: 882000/1000000 Loss: 0.119 scale: 65536.0 LR: 0.0000030 +[2024-06-08 18:21:19,331] INFO: Begin to Save model to workspace/i2v_motion/obj_train_motion/checkpoints/non_ema_00882000.pth +[2024-06-08 18:21:26,052] INFO: Save model to workspace/i2v_motion/obj_train_motion/checkpoints/non_ema_00882000.pth +[2024-06-08 18:21:39,516] INFO: Step: 882001/1000000 Loss: 0.114 scale: 65536.0 LR: 0.0000060 +[2024-06-08 18:21:53,425] INFO: Step: 882002/1000000 Loss: 0.099 scale: 65536.0 LR: 0.0000090 +[2024-06-08 18:22:07,313] INFO: Step: 882003/1000000 Loss: 0.099 scale: 65536.0 LR: 0.0000120 +[2024-06-08 18:22:21,168] INFO: Step: 882004/1000000 Loss: 0.109 scale: 65536.0 LR: 0.0000150 +[2024-06-08 18:22:34,933] INFO: Step: 882005/1000000 Loss: 0.105 scale: 65536.0 LR: 0.0000180 +[2024-06-08 18:22:48,756] INFO: Step: 882006/1000000 Loss: 0.100 scale: 65536.0 LR: 0.0000210 +[2024-06-08 18:23:02,621] INFO: Step: 882007/1000000 Loss: 0.096 scale: 65536.0 LR: 0.0000240 +[2024-06-08 18:23:16,721] INFO: Step: 882008/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000270 +[2024-06-08 18:23:30,716] INFO: Step: 882009/1000000 Loss: 0.103 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:23:44,773] INFO: Step: 882010/1000000 Loss: 0.109 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:23:58,724] INFO: Step: 882011/1000000 Loss: 0.099 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:24:12,472] INFO: Step: 882012/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:24:26,342] INFO: Step: 882013/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:24:40,258] INFO: Step: 882014/1000000 Loss: 0.099 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:24:54,118] INFO: Step: 882015/1000000 Loss: 0.105 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:25:07,925] INFO: Step: 882016/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:25:21,663] INFO: Step: 882017/1000000 Loss: 0.098 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:25:35,407] INFO: Step: 882018/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:25:49,194] INFO: Step: 882019/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:26:03,143] INFO: Step: 882020/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:26:16,867] INFO: Step: 882021/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:26:30,538] INFO: Step: 882022/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:26:44,377] INFO: Step: 882023/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:26:58,229] INFO: Step: 882024/1000000 Loss: 0.109 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:27:12,053] INFO: Step: 882025/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:27:25,745] INFO: Step: 882026/1000000 Loss: 0.097 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:27:39,680] INFO: Step: 882027/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:27:53,574] INFO: Step: 882028/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:28:07,381] INFO: Step: 882029/1000000 Loss: 0.097 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:28:21,211] INFO: Step: 882030/1000000 Loss: 0.099 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:28:35,127] INFO: Step: 882031/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:28:48,854] INFO: Step: 882032/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:29:02,648] INFO: Step: 882033/1000000 Loss: 0.098 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:29:16,451] INFO: Step: 882034/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:29:30,088] INFO: Step: 882035/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:29:43,785] INFO: Step: 882036/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:29:57,610] INFO: Step: 882037/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:30:11,513] INFO: Step: 882038/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:30:25,374] INFO: Step: 882039/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:30:39,264] INFO: Step: 882040/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:30:53,015] INFO: Step: 882041/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:31:06,900] INFO: Step: 882042/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:31:20,783] INFO: Step: 882043/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:31:34,721] INFO: Step: 882044/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:31:48,494] INFO: Step: 882045/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:32:02,337] INFO: Step: 882046/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:32:16,074] INFO: Step: 882047/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:32:29,951] INFO: Step: 882048/1000000 Loss: 0.096 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:32:43,764] INFO: Step: 882049/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:32:57,505] INFO: Step: 882050/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:33:11,224] INFO: Step: 882051/1000000 Loss: 0.097 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:33:25,113] INFO: Step: 882052/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:33:38,821] INFO: Step: 882053/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:33:52,828] INFO: Step: 882054/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:34:06,587] INFO: Step: 882055/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:34:20,520] INFO: Step: 882056/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:34:34,340] INFO: Step: 882057/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:34:48,102] INFO: Step: 882058/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:35:01,843] INFO: Step: 882059/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:35:15,520] INFO: Step: 882060/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:35:29,264] INFO: Step: 882061/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:35:43,073] INFO: Step: 882062/1000000 Loss: 0.097 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:35:56,830] INFO: Step: 882063/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:36:10,469] INFO: Step: 882064/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:36:24,066] INFO: Step: 882065/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:36:37,801] INFO: Step: 882066/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:36:51,322] INFO: Step: 882067/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:37:05,197] INFO: Step: 882068/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:37:18,843] INFO: Step: 882069/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:37:32,808] INFO: Step: 882070/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:37:46,590] INFO: Step: 882071/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:38:00,327] INFO: Step: 882072/1000000 Loss: 0.101 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:38:14,285] INFO: Step: 882073/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:38:28,072] INFO: Step: 882074/1000000 Loss: 0.100 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:38:41,791] INFO: Step: 882075/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:38:55,628] INFO: Step: 882076/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:39:09,520] INFO: Step: 882077/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:39:23,185] INFO: Step: 882078/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:39:37,029] INFO: Step: 882079/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:39:50,751] INFO: Step: 882080/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:40:04,495] INFO: Step: 882081/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:40:18,285] INFO: Step: 882082/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:40:32,118] INFO: Step: 882083/1000000 Loss: 0.096 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:40:45,866] INFO: Step: 882084/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:40:59,651] INFO: Step: 882085/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:41:13,642] INFO: Step: 882086/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:41:27,326] INFO: Step: 882087/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:41:41,289] INFO: Step: 882088/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:41:55,041] INFO: Step: 882089/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:42:08,794] INFO: Step: 882090/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:42:22,548] INFO: Step: 882091/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:42:36,353] INFO: Step: 882092/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:42:50,174] INFO: Step: 882093/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:43:04,045] INFO: Step: 882094/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:43:17,858] INFO: Step: 882095/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:43:31,754] INFO: Step: 882096/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:43:45,280] INFO: Step: 882097/1000000 Loss: 0.099 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:43:59,179] INFO: Step: 882098/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:44:12,889] INFO: Step: 882099/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:44:26,707] INFO: Step: 882100/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:44:40,551] INFO: Step: 882101/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:44:54,272] INFO: Step: 882102/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:45:08,017] INFO: Step: 882103/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:45:21,778] INFO: Step: 882104/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:45:35,428] INFO: Step: 882105/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:45:49,240] INFO: Step: 882106/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:46:03,177] INFO: Step: 882107/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:46:16,877] INFO: Step: 882108/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:46:30,803] INFO: Step: 882109/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:46:44,644] INFO: Step: 882110/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:46:58,362] INFO: Step: 882111/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:47:12,171] INFO: Step: 882112/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:47:26,009] INFO: Step: 882113/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:47:39,890] INFO: Step: 882114/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:47:53,872] INFO: Step: 882115/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:48:07,648] INFO: Step: 882116/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:48:21,447] INFO: Step: 882117/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:48:35,255] INFO: Step: 882118/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:48:49,071] INFO: Step: 882119/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:49:02,966] INFO: Step: 882120/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:49:16,749] INFO: Step: 882121/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:49:30,534] INFO: Step: 882122/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:49:44,227] INFO: Step: 882123/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:49:58,156] INFO: Step: 882124/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:50:11,914] INFO: Step: 882125/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:50:25,688] INFO: Step: 882126/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:50:39,439] INFO: Step: 882127/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:50:53,273] INFO: Step: 882128/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:51:06,972] INFO: Step: 882129/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:51:20,840] INFO: Step: 882130/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:51:34,582] INFO: Step: 882131/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:51:48,438] INFO: Step: 882132/1000000 Loss: 0.100 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:52:02,135] INFO: Step: 882133/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:52:15,935] INFO: Step: 882134/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:52:29,607] INFO: Step: 882135/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:52:43,428] INFO: Step: 882136/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:52:57,376] INFO: Step: 882137/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:53:11,164] INFO: Step: 882138/1000000 Loss: 0.097 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:53:24,944] INFO: Step: 882139/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:53:38,809] INFO: Step: 882140/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:53:52,614] INFO: Step: 882141/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:54:06,471] INFO: Step: 882142/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:54:20,310] INFO: Step: 882143/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:54:34,093] INFO: Step: 882144/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:54:47,980] INFO: Step: 882145/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:55:01,611] INFO: Step: 882146/1000000 Loss: 0.098 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:55:15,292] INFO: Step: 882147/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:55:29,055] INFO: Step: 882148/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:55:42,951] INFO: Step: 882149/1000000 Loss: 0.098 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:55:56,722] INFO: Step: 882150/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:56:10,577] INFO: Step: 882151/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:56:24,347] INFO: Step: 882152/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:56:37,947] INFO: Step: 882153/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:56:51,560] INFO: Step: 882154/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:57:05,278] INFO: Step: 882155/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:57:19,043] INFO: Step: 882156/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:57:32,863] INFO: Step: 882157/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:57:46,752] INFO: Step: 882158/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:58:00,474] INFO: Step: 882159/1000000 Loss: 0.100 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:58:14,302] INFO: Step: 882160/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:58:28,169] INFO: Step: 882161/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:58:41,945] INFO: Step: 882162/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:58:55,704] INFO: Step: 882163/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:59:09,790] INFO: Step: 882164/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:59:23,821] INFO: Step: 882165/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:59:37,545] INFO: Step: 882166/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 18:59:51,392] INFO: Step: 882167/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:00:05,145] INFO: Step: 882168/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:00:19,068] INFO: Step: 882169/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:00:32,844] INFO: Step: 882170/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:00:46,633] INFO: Step: 882171/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:01:00,440] INFO: Step: 882172/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:01:14,153] INFO: Step: 882173/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:01:28,007] INFO: Step: 882174/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:01:41,840] INFO: Step: 882175/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:01:55,590] INFO: Step: 882176/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:02:09,412] INFO: Step: 882177/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:02:22,969] INFO: Step: 882178/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:02:36,595] INFO: Step: 882179/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:02:50,371] INFO: Step: 882180/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:03:04,122] INFO: Step: 882181/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:03:17,723] INFO: Step: 882182/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:03:31,784] INFO: Step: 882183/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:03:45,318] INFO: Step: 882184/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:03:58,996] INFO: Step: 882185/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:04:12,799] INFO: Step: 882186/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:04:26,520] INFO: Step: 882187/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:04:40,281] INFO: Step: 882188/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:04:54,103] INFO: Step: 882189/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:05:07,862] INFO: Step: 882190/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:05:21,560] INFO: Step: 882191/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:05:35,507] INFO: Step: 882192/1000000 Loss: 0.103 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:05:49,425] INFO: Step: 882193/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:06:03,251] INFO: Step: 882194/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:06:16,824] INFO: Step: 882195/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:06:30,596] INFO: Step: 882196/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:06:44,341] INFO: Step: 882197/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:06:58,118] INFO: Step: 882198/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:07:11,919] INFO: Step: 882199/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:07:25,679] INFO: Step: 882200/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:07:39,475] INFO: Step: 882201/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:07:53,417] INFO: Step: 882202/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:08:07,256] INFO: Step: 882203/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:08:21,070] INFO: Step: 882204/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:08:34,756] INFO: Step: 882205/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:08:48,358] INFO: Step: 882206/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:09:02,075] INFO: Step: 882207/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:09:15,648] INFO: Step: 882208/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:09:29,340] INFO: Step: 882209/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:09:43,109] INFO: Step: 882210/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:09:56,870] INFO: Step: 882211/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:10:10,589] INFO: Step: 882212/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:10:24,370] INFO: Step: 882213/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:10:38,249] INFO: Step: 882214/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:10:52,082] INFO: Step: 882215/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:11:05,843] INFO: Step: 882216/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:11:19,557] INFO: Step: 882217/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:11:33,401] INFO: Step: 882218/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:11:47,299] INFO: Step: 882219/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:12:01,022] INFO: Step: 882220/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:12:14,672] INFO: Step: 882221/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:12:28,481] INFO: Step: 882222/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:12:42,212] INFO: Step: 882223/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:12:55,978] INFO: Step: 882224/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:13:09,725] INFO: Step: 882225/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:13:23,514] INFO: Step: 882226/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:13:37,326] INFO: Step: 882227/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:13:51,102] INFO: Step: 882228/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:14:04,912] INFO: Step: 882229/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:14:18,653] INFO: Step: 882230/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:14:32,466] INFO: Step: 882231/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:14:46,301] INFO: Step: 882232/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:15:00,341] INFO: Step: 882233/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:15:14,138] INFO: Step: 882234/1000000 Loss: 0.099 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:15:28,092] INFO: Step: 882235/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:15:41,972] INFO: Step: 882236/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:15:55,658] INFO: Step: 882237/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:16:09,507] INFO: Step: 882238/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:16:23,264] INFO: Step: 882239/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:16:36,863] INFO: Step: 882240/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:16:50,474] INFO: Step: 882241/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:17:04,302] INFO: Step: 882242/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:17:18,145] INFO: Step: 882243/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:17:31,893] INFO: Step: 882244/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:17:45,686] INFO: Step: 882245/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:17:59,474] INFO: Step: 882246/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:18:13,256] INFO: Step: 882247/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:18:27,001] INFO: Step: 882248/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:18:40,732] INFO: Step: 882249/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:18:54,501] INFO: Step: 882250/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:19:08,481] INFO: Step: 882251/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:19:22,212] INFO: Step: 882252/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:19:35,966] INFO: Step: 882253/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:19:49,924] INFO: Step: 882254/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:20:03,931] INFO: Step: 882255/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:20:17,654] INFO: Step: 882256/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:20:31,311] INFO: Step: 882257/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:20:45,107] INFO: Step: 882258/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:20:58,984] INFO: Step: 882259/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:21:12,825] INFO: Step: 882260/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:21:26,449] INFO: Step: 882261/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:21:40,298] INFO: Step: 882262/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:21:54,224] INFO: Step: 882263/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:22:07,913] INFO: Step: 882264/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:22:21,740] INFO: Step: 882265/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:22:35,552] INFO: Step: 882266/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:22:49,244] INFO: Step: 882267/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:23:03,066] INFO: Step: 882268/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:23:16,794] INFO: Step: 882269/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:23:30,553] INFO: Step: 882270/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:23:44,166] INFO: Step: 882271/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:23:57,944] INFO: Step: 882272/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:24:11,704] INFO: Step: 882273/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:24:25,482] INFO: Step: 882274/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:24:39,202] INFO: Step: 882275/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:24:53,024] INFO: Step: 882276/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:25:06,746] INFO: Step: 882277/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:25:20,590] INFO: Step: 882278/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:25:34,298] INFO: Step: 882279/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:25:48,062] INFO: Step: 882280/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:26:01,660] INFO: Step: 882281/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:26:15,363] INFO: Step: 882282/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:26:29,273] INFO: Step: 882283/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:26:43,238] INFO: Step: 882284/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:26:57,189] INFO: Step: 882285/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:27:11,019] INFO: Step: 882286/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:27:24,787] INFO: Step: 882287/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:27:38,589] INFO: Step: 882288/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:27:52,305] INFO: Step: 882289/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:28:06,262] INFO: Step: 882290/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:28:19,939] INFO: Step: 882291/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:28:33,914] INFO: Step: 882292/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:28:47,784] INFO: Step: 882293/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:29:01,599] INFO: Step: 882294/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:29:15,382] INFO: Step: 882295/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:29:29,212] INFO: Step: 882296/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:29:43,007] INFO: Step: 882297/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:29:56,811] INFO: Step: 882298/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:30:10,693] INFO: Step: 882299/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:30:24,390] INFO: Step: 882300/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:30:38,153] INFO: Step: 882301/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:30:51,861] INFO: Step: 882302/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:31:05,624] INFO: Step: 882303/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:31:19,412] INFO: Step: 882304/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:31:33,077] INFO: Step: 882305/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:31:46,947] INFO: Step: 882306/1000000 Loss: 0.099 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:32:00,646] INFO: Step: 882307/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:32:14,261] INFO: Step: 882308/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:32:28,046] INFO: Step: 882309/1000000 Loss: 0.098 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:32:41,727] INFO: Step: 882310/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:32:55,286] INFO: Step: 882311/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:33:09,168] INFO: Step: 882312/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:33:22,868] INFO: Step: 882313/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:33:36,726] INFO: Step: 882314/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:33:50,368] INFO: Step: 882315/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:34:04,105] INFO: Step: 882316/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:34:17,838] INFO: Step: 882317/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:34:31,633] INFO: Step: 882318/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:34:45,396] INFO: Step: 882319/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:34:59,154] INFO: Step: 882320/1000000 Loss: 0.097 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:35:12,998] INFO: Step: 882321/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:35:26,844] INFO: Step: 882322/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:35:40,422] INFO: Step: 882323/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:35:54,206] INFO: Step: 882324/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:36:08,020] INFO: Step: 882325/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:36:21,790] INFO: Step: 882326/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:36:35,661] INFO: Step: 882327/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:36:49,448] INFO: Step: 882328/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:37:03,484] INFO: Step: 882329/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:37:17,241] INFO: Step: 882330/1000000 Loss: 0.098 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:37:31,010] INFO: Step: 882331/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:37:44,790] INFO: Step: 882332/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:37:58,644] INFO: Step: 882333/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:38:12,306] INFO: Step: 882334/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:38:26,102] INFO: Step: 882335/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:38:39,831] INFO: Step: 882336/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:38:53,671] INFO: Step: 882337/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:39:07,470] INFO: Step: 882338/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:39:21,180] INFO: Step: 882339/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:39:34,878] INFO: Step: 882340/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:39:48,641] INFO: Step: 882341/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:40:02,321] INFO: Step: 882342/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:40:16,063] INFO: Step: 882343/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:40:30,188] INFO: Step: 882344/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:40:44,042] INFO: Step: 882345/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:40:57,954] INFO: Step: 882346/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:41:11,900] INFO: Step: 882347/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:41:25,788] INFO: Step: 882348/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:41:39,469] INFO: Step: 882349/1000000 Loss: 0.100 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:41:53,088] INFO: Step: 882350/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:42:07,093] INFO: Step: 882351/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:42:20,866] INFO: Step: 882352/1000000 Loss: 0.099 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:42:34,638] INFO: Step: 882353/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:42:48,374] INFO: Step: 882354/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:43:02,089] INFO: Step: 882355/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:43:15,866] INFO: Step: 882356/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:43:29,648] INFO: Step: 882357/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:43:43,403] INFO: Step: 882358/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:43:57,286] INFO: Step: 882359/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:44:11,300] INFO: Step: 882360/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:44:24,973] INFO: Step: 882361/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:44:38,646] INFO: Step: 882362/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:44:52,418] INFO: Step: 882363/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:45:06,470] INFO: Step: 882364/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:45:20,378] INFO: Step: 882365/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:45:34,311] INFO: Step: 882366/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:45:48,010] INFO: Step: 882367/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:46:01,838] INFO: Step: 882368/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:46:15,631] INFO: Step: 882369/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:46:29,459] INFO: Step: 882370/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:46:43,167] INFO: Step: 882371/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:46:56,750] INFO: Step: 882372/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:47:10,567] INFO: Step: 882373/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:47:24,239] INFO: Step: 882374/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:47:38,127] INFO: Step: 882375/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:47:51,829] INFO: Step: 882376/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:48:06,003] INFO: Step: 882377/1000000 Loss: 0.097 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:48:19,692] INFO: Step: 882378/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:48:33,327] INFO: Step: 882379/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:48:47,224] INFO: Step: 882380/1000000 Loss: 0.097 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:49:01,305] INFO: Step: 882381/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:49:15,047] INFO: Step: 882382/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:49:28,894] INFO: Step: 882383/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:49:42,847] INFO: Step: 882384/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:49:56,696] INFO: Step: 882385/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:50:10,520] INFO: Step: 882386/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:50:24,295] INFO: Step: 882387/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:50:38,058] INFO: Step: 882388/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:50:51,806] INFO: Step: 882389/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:51:05,569] INFO: Step: 882390/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:51:19,428] INFO: Step: 882391/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:51:33,210] INFO: Step: 882392/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:51:47,088] INFO: Step: 882393/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:52:00,731] INFO: Step: 882394/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:52:14,524] INFO: Step: 882395/1000000 Loss: 0.098 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:52:28,270] INFO: Step: 882396/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:52:42,181] INFO: Step: 882397/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:52:55,967] INFO: Step: 882398/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:53:09,864] INFO: Step: 882399/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:53:23,786] INFO: Step: 882400/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:53:37,641] INFO: Step: 882401/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:53:51,417] INFO: Step: 882402/1000000 Loss: 0.096 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:54:05,296] INFO: Step: 882403/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:54:19,063] INFO: Step: 882404/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:54:32,740] INFO: Step: 882405/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:54:46,581] INFO: Step: 882406/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:55:00,306] INFO: Step: 882407/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:55:14,184] INFO: Step: 882408/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:55:28,085] INFO: Step: 882409/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:55:41,935] INFO: Step: 882410/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:55:55,782] INFO: Step: 882411/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:56:09,504] INFO: Step: 882412/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:56:23,417] INFO: Step: 882413/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:56:37,290] INFO: Step: 882414/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:56:51,089] INFO: Step: 882415/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:57:04,845] INFO: Step: 882416/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:57:18,535] INFO: Step: 882417/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:57:32,376] INFO: Step: 882418/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:57:46,240] INFO: Step: 882419/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:58:00,069] INFO: Step: 882420/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:58:13,769] INFO: Step: 882421/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:58:27,652] INFO: Step: 882422/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:58:41,477] INFO: Step: 882423/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:58:55,410] INFO: Step: 882424/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:59:09,081] INFO: Step: 882425/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:59:22,728] INFO: Step: 882426/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:59:36,475] INFO: Step: 882427/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 19:59:50,240] INFO: Step: 882428/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:00:03,970] INFO: Step: 882429/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:00:17,744] INFO: Step: 882430/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:00:31,519] INFO: Step: 882431/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:00:45,377] INFO: Step: 882432/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:00:59,310] INFO: Step: 882433/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:01:13,108] INFO: Step: 882434/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:01:26,807] INFO: Step: 882435/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:01:40,526] INFO: Step: 882436/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:01:54,320] INFO: Step: 882437/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:02:08,430] INFO: Step: 882438/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:02:22,474] INFO: Step: 882439/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:02:36,357] INFO: Step: 882440/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:02:50,175] INFO: Step: 882441/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:03:03,827] INFO: Step: 882442/1000000 Loss: 0.102 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:03:17,585] INFO: Step: 882443/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:03:31,554] INFO: Step: 882444/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:03:45,502] INFO: Step: 882445/1000000 Loss: 0.100 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:03:59,363] INFO: Step: 882446/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:04:13,233] INFO: Step: 882447/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:04:27,017] INFO: Step: 882448/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:04:40,815] INFO: Step: 882449/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:04:54,771] INFO: Step: 882450/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:05:08,595] INFO: Step: 882451/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:05:22,422] INFO: Step: 882452/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:05:36,391] INFO: Step: 882453/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:05:50,213] INFO: Step: 882454/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:06:04,069] INFO: Step: 882455/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:06:17,708] INFO: Step: 882456/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:06:31,646] INFO: Step: 882457/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:06:45,456] INFO: Step: 882458/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:06:59,128] INFO: Step: 882459/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:07:12,933] INFO: Step: 882460/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:07:26,818] INFO: Step: 882461/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:07:40,536] INFO: Step: 882462/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:07:54,276] INFO: Step: 882463/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:08:08,096] INFO: Step: 882464/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:08:21,855] INFO: Step: 882465/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:08:35,697] INFO: Step: 882466/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:08:49,572] INFO: Step: 882467/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:09:03,557] INFO: Step: 882468/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:09:17,486] INFO: Step: 882469/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:09:31,316] INFO: Step: 882470/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:09:45,106] INFO: Step: 882471/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:09:58,949] INFO: Step: 882472/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:10:12,746] INFO: Step: 882473/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:10:26,565] INFO: Step: 882474/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:10:40,375] INFO: Step: 882475/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:10:54,231] INFO: Step: 882476/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:11:08,112] INFO: Step: 882477/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:11:21,862] INFO: Step: 882478/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:11:35,667] INFO: Step: 882479/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:11:49,532] INFO: Step: 882480/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:12:03,225] INFO: Step: 882481/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:12:17,089] INFO: Step: 882482/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:12:30,724] INFO: Step: 882483/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:12:44,509] INFO: Step: 882484/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:12:58,237] INFO: Step: 882485/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:13:12,029] INFO: Step: 882486/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:13:25,901] INFO: Step: 882487/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:13:39,538] INFO: Step: 882488/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:13:53,208] INFO: Step: 882489/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:14:07,029] INFO: Step: 882490/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:14:20,885] INFO: Step: 882491/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:14:34,679] INFO: Step: 882492/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:14:48,433] INFO: Step: 882493/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:15:02,357] INFO: Step: 882494/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:15:16,096] INFO: Step: 882495/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:15:29,801] INFO: Step: 882496/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:15:43,601] INFO: Step: 882497/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:15:57,321] INFO: Step: 882498/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:16:11,290] INFO: Step: 882499/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:16:24,971] INFO: Step: 882500/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:16:24,972] INFO: Begin to Save model to workspace/i2v_motion/obj_train_motion/checkpoints/non_ema_00882500.pth +[2024-06-08 20:16:31,922] INFO: Save model to workspace/i2v_motion/obj_train_motion/checkpoints/non_ema_00882500.pth +[2024-06-08 20:16:44,064] INFO: Step: 882501/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:16:57,758] INFO: Step: 882502/1000000 Loss: 0.098 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:17:11,585] INFO: Step: 882503/1000000 Loss: 0.098 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:17:25,265] INFO: Step: 882504/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:17:38,923] INFO: Step: 882505/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:17:52,691] INFO: Step: 882506/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:18:06,436] INFO: Step: 882507/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:18:20,209] INFO: Step: 882508/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:18:34,087] INFO: Step: 882509/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:18:47,744] INFO: Step: 882510/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:19:01,612] INFO: Step: 882511/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:19:15,488] INFO: Step: 882512/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:19:29,557] INFO: Step: 882513/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:19:43,510] INFO: Step: 882514/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:19:57,390] INFO: Step: 882515/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:20:11,250] INFO: Step: 882516/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:20:25,092] INFO: Step: 882517/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:20:38,850] INFO: Step: 882518/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:20:52,663] INFO: Step: 882519/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:21:06,394] INFO: Step: 882520/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:21:20,253] INFO: Step: 882521/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:21:33,955] INFO: Step: 882522/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:21:47,642] INFO: Step: 882523/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:22:01,359] INFO: Step: 882524/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:22:15,273] INFO: Step: 882525/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:22:29,059] INFO: Step: 882526/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:22:42,741] INFO: Step: 882527/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:22:56,568] INFO: Step: 882528/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:23:10,510] INFO: Step: 882529/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:23:24,283] INFO: Step: 882530/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:23:38,035] INFO: Step: 882531/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:23:51,768] INFO: Step: 882532/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:24:05,711] INFO: Step: 882533/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:24:19,510] INFO: Step: 882534/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:24:33,338] INFO: Step: 882535/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:24:47,249] INFO: Step: 882536/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:25:01,076] INFO: Step: 882537/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:25:14,745] INFO: Step: 882538/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:25:28,489] INFO: Step: 882539/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:25:42,296] INFO: Step: 882540/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:25:56,142] INFO: Step: 882541/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:26:09,941] INFO: Step: 882542/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:26:23,713] INFO: Step: 882543/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:26:37,362] INFO: Step: 882544/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:26:51,206] INFO: Step: 882545/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:27:05,053] INFO: Step: 882546/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:27:18,745] INFO: Step: 882547/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:27:32,570] INFO: Step: 882548/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:27:46,380] INFO: Step: 882549/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:28:00,112] INFO: Step: 882550/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:28:13,852] INFO: Step: 882551/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:28:27,638] INFO: Step: 882552/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:28:41,463] INFO: Step: 882553/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:28:55,389] INFO: Step: 882554/1000000 Loss: 0.102 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:29:09,347] INFO: Step: 882555/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:29:23,048] INFO: Step: 882556/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:29:36,844] INFO: Step: 882557/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:29:50,590] INFO: Step: 882558/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:30:04,394] INFO: Step: 882559/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:30:18,027] INFO: Step: 882560/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:30:31,851] INFO: Step: 882561/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:30:45,700] INFO: Step: 882562/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:30:59,556] INFO: Step: 882563/1000000 Loss: 0.100 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:31:13,281] INFO: Step: 882564/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:31:27,069] INFO: Step: 882565/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:31:40,833] INFO: Step: 882566/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:31:54,537] INFO: Step: 882567/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:32:08,243] INFO: Step: 882568/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:32:22,101] INFO: Step: 882569/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:32:35,810] INFO: Step: 882570/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:32:49,662] INFO: Step: 882571/1000000 Loss: 0.099 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:33:03,393] INFO: Step: 882572/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:33:17,144] INFO: Step: 882573/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:33:30,971] INFO: Step: 882574/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:33:44,747] INFO: Step: 882575/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:33:58,482] INFO: Step: 882576/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:34:12,240] INFO: Step: 882577/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:34:26,073] INFO: Step: 882578/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:34:39,782] INFO: Step: 882579/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:34:53,544] INFO: Step: 882580/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:35:07,082] INFO: Step: 882581/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:35:20,845] INFO: Step: 882582/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:35:34,474] INFO: Step: 882583/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:35:48,118] INFO: Step: 882584/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:36:02,035] INFO: Step: 882585/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:36:15,827] INFO: Step: 882586/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:36:29,486] INFO: Step: 882587/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:36:43,193] INFO: Step: 882588/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:36:57,078] INFO: Step: 882589/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:37:11,233] INFO: Step: 882590/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:37:25,067] INFO: Step: 882591/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:37:39,146] INFO: Step: 882592/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:37:52,861] INFO: Step: 882593/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:38:06,779] INFO: Step: 882594/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:38:20,608] INFO: Step: 882595/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:38:34,406] INFO: Step: 882596/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:38:48,235] INFO: Step: 882597/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:39:01,982] INFO: Step: 882598/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:39:15,713] INFO: Step: 882599/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:39:29,379] INFO: Step: 882600/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:39:43,202] INFO: Step: 882601/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:39:56,961] INFO: Step: 882602/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:40:10,792] INFO: Step: 882603/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:40:24,742] INFO: Step: 882604/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:40:38,574] INFO: Step: 882605/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:40:52,407] INFO: Step: 882606/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:41:06,188] INFO: Step: 882607/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:41:19,841] INFO: Step: 882608/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:41:33,619] INFO: Step: 882609/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:41:47,396] INFO: Step: 882610/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:42:01,312] INFO: Step: 882611/1000000 Loss: 0.096 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:42:15,136] INFO: Step: 882612/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:42:29,008] INFO: Step: 882613/1000000 Loss: 0.099 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:42:42,697] INFO: Step: 882614/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:42:56,520] INFO: Step: 882615/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:43:10,345] INFO: Step: 882616/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:43:24,002] INFO: Step: 882617/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:43:37,891] INFO: Step: 882618/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:43:51,780] INFO: Step: 882619/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:44:05,705] INFO: Step: 882620/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:44:19,391] INFO: Step: 882621/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:44:33,450] INFO: Step: 882622/1000000 Loss: 0.099 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:44:47,327] INFO: Step: 882623/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:45:01,070] INFO: Step: 882624/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:45:14,906] INFO: Step: 882625/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:45:28,706] INFO: Step: 882626/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:45:42,518] INFO: Step: 882627/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:45:56,319] INFO: Step: 882628/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:46:10,125] INFO: Step: 882629/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:46:23,974] INFO: Step: 882630/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:46:37,669] INFO: Step: 882631/1000000 Loss: 0.096 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:46:51,372] INFO: Step: 882632/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:47:05,221] INFO: Step: 882633/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:47:18,976] INFO: Step: 882634/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:47:32,731] INFO: Step: 882635/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:47:46,625] INFO: Step: 882636/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:48:00,390] INFO: Step: 882637/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:48:14,094] INFO: Step: 882638/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:48:27,871] INFO: Step: 882639/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:48:41,624] INFO: Step: 882640/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:48:55,444] INFO: Step: 882641/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:49:09,225] INFO: Step: 882642/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:49:23,067] INFO: Step: 882643/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:49:36,848] INFO: Step: 882644/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:49:50,598] INFO: Step: 882645/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:50:04,356] INFO: Step: 882646/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:50:18,224] INFO: Step: 882647/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:50:32,013] INFO: Step: 882648/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:50:45,725] INFO: Step: 882649/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:50:59,538] INFO: Step: 882650/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:51:13,295] INFO: Step: 882651/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:51:27,189] INFO: Step: 882652/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:51:41,020] INFO: Step: 882653/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:51:54,869] INFO: Step: 882654/1000000 Loss: 0.096 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:52:08,590] INFO: Step: 882655/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:52:22,276] INFO: Step: 882656/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:52:35,980] INFO: Step: 882657/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:52:49,615] INFO: Step: 882658/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:53:03,519] INFO: Step: 882659/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:53:17,285] INFO: Step: 882660/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:53:31,015] INFO: Step: 882661/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:53:44,920] INFO: Step: 882662/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:53:58,626] INFO: Step: 882663/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:54:12,511] INFO: Step: 882664/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:54:26,070] INFO: Step: 882665/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:54:39,761] INFO: Step: 882666/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:54:53,552] INFO: Step: 882667/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:55:07,300] INFO: Step: 882668/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:55:21,283] INFO: Step: 882669/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:55:35,177] INFO: Step: 882670/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:55:48,875] INFO: Step: 882671/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:56:02,643] INFO: Step: 882672/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:56:16,543] INFO: Step: 882673/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:56:30,140] INFO: Step: 882674/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:56:44,040] INFO: Step: 882675/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:56:57,806] INFO: Step: 882676/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:57:11,616] INFO: Step: 882677/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:57:25,603] INFO: Step: 882678/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:57:39,354] INFO: Step: 882679/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:57:53,130] INFO: Step: 882680/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:58:06,778] INFO: Step: 882681/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:58:20,436] INFO: Step: 882682/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:58:34,004] INFO: Step: 882683/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:58:47,706] INFO: Step: 882684/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:59:01,329] INFO: Step: 882685/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:59:15,114] INFO: Step: 882686/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:59:28,823] INFO: Step: 882687/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:59:42,555] INFO: Step: 882688/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 20:59:56,338] INFO: Step: 882689/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:00:09,943] INFO: Step: 882690/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:00:23,657] INFO: Step: 882691/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:00:37,224] INFO: Step: 882692/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:00:51,036] INFO: Step: 882693/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:01:04,867] INFO: Step: 882694/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:01:18,601] INFO: Step: 882695/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:01:32,375] INFO: Step: 882696/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:01:46,163] INFO: Step: 882697/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:01:59,950] INFO: Step: 882698/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:02:13,580] INFO: Step: 882699/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:02:27,356] INFO: Step: 882700/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:02:41,241] INFO: Step: 882701/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:02:55,104] INFO: Step: 882702/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:03:08,750] INFO: Step: 882703/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:03:22,494] INFO: Step: 882704/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:03:36,162] INFO: Step: 882705/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:03:49,839] INFO: Step: 882706/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:04:03,515] INFO: Step: 882707/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:04:17,162] INFO: Step: 882708/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:04:30,962] INFO: Step: 882709/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:04:44,759] INFO: Step: 882710/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:04:58,442] INFO: Step: 882711/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:05:12,176] INFO: Step: 882712/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:05:26,011] INFO: Step: 882713/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:05:39,761] INFO: Step: 882714/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:05:53,319] INFO: Step: 882715/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:06:07,042] INFO: Step: 882716/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:06:20,716] INFO: Step: 882717/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:06:34,583] INFO: Step: 882718/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:06:48,281] INFO: Step: 882719/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:07:01,829] INFO: Step: 882720/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:07:15,475] INFO: Step: 882721/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:07:29,155] INFO: Step: 882722/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:07:42,983] INFO: Step: 882723/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:07:56,760] INFO: Step: 882724/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:08:10,432] INFO: Step: 882725/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:08:24,156] INFO: Step: 882726/1000000 Loss: 0.103 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:08:38,118] INFO: Step: 882727/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:08:51,863] INFO: Step: 882728/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:09:05,608] INFO: Step: 882729/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:09:19,385] INFO: Step: 882730/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:09:33,205] INFO: Step: 882731/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:09:47,052] INFO: Step: 882732/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:10:00,982] INFO: Step: 882733/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:10:14,629] INFO: Step: 882734/1000000 Loss: 0.096 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:10:28,474] INFO: Step: 882735/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:10:42,250] INFO: Step: 882736/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:10:56,015] INFO: Step: 882737/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:11:09,752] INFO: Step: 882738/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:11:23,667] INFO: Step: 882739/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:11:37,500] INFO: Step: 882740/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:11:51,287] INFO: Step: 882741/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:12:05,060] INFO: Step: 882742/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:12:18,931] INFO: Step: 882743/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:12:32,591] INFO: Step: 882744/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:12:46,223] INFO: Step: 882745/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:12:59,966] INFO: Step: 882746/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:13:13,687] INFO: Step: 882747/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:13:27,421] INFO: Step: 882748/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:13:41,213] INFO: Step: 882749/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:13:55,009] INFO: Step: 882750/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:14:08,729] INFO: Step: 882751/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:14:22,448] INFO: Step: 882752/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:14:36,442] INFO: Step: 882753/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:14:50,177] INFO: Step: 882754/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:15:03,975] INFO: Step: 882755/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:15:17,945] INFO: Step: 882756/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:15:31,751] INFO: Step: 882757/1000000 Loss: 0.097 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:15:45,515] INFO: Step: 882758/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:15:59,308] INFO: Step: 882759/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:16:13,003] INFO: Step: 882760/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:16:26,751] INFO: Step: 882761/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:16:40,488] INFO: Step: 882762/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:16:54,299] INFO: Step: 882763/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:17:08,057] INFO: Step: 882764/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:17:21,846] INFO: Step: 882765/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:17:35,526] INFO: Step: 882766/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:17:49,299] INFO: Step: 882767/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:18:03,126] INFO: Step: 882768/1000000 Loss: 0.097 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:18:17,039] INFO: Step: 882769/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:18:30,668] INFO: Step: 882770/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:18:44,520] INFO: Step: 882771/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:18:58,128] INFO: Step: 882772/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:19:11,861] INFO: Step: 882773/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:19:25,761] INFO: Step: 882774/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:19:39,543] INFO: Step: 882775/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:19:53,420] INFO: Step: 882776/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:20:07,113] INFO: Step: 882777/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:20:21,068] INFO: Step: 882778/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:20:34,883] INFO: Step: 882779/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:20:48,678] INFO: Step: 882780/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:21:02,618] INFO: Step: 882781/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:21:16,562] INFO: Step: 882782/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:21:30,179] INFO: Step: 882783/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:21:44,031] INFO: Step: 882784/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:21:57,857] INFO: Step: 882785/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:22:11,669] INFO: Step: 882786/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:22:25,566] INFO: Step: 882787/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:22:39,308] INFO: Step: 882788/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:22:52,935] INFO: Step: 882789/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:23:06,771] INFO: Step: 882790/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:23:20,724] INFO: Step: 882791/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:23:34,425] INFO: Step: 882792/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:23:48,240] INFO: Step: 882793/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:24:02,048] INFO: Step: 882794/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:24:16,074] INFO: Step: 882795/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:24:29,860] INFO: Step: 882796/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:24:43,440] INFO: Step: 882797/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:24:57,064] INFO: Step: 882798/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:25:10,879] INFO: Step: 882799/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:25:24,789] INFO: Step: 882800/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:25:38,692] INFO: Step: 882801/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:25:52,452] INFO: Step: 882802/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:26:06,276] INFO: Step: 882803/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:26:19,948] INFO: Step: 882804/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:26:33,674] INFO: Step: 882805/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:26:47,402] INFO: Step: 882806/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:27:01,240] INFO: Step: 882807/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:27:14,939] INFO: Step: 882808/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:27:28,688] INFO: Step: 882809/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:27:42,395] INFO: Step: 882810/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:27:56,274] INFO: Step: 882811/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:28:09,967] INFO: Step: 882812/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:28:23,797] INFO: Step: 882813/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:28:37,553] INFO: Step: 882814/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:28:51,334] INFO: Step: 882815/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:29:05,018] INFO: Step: 882816/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:29:18,678] INFO: Step: 882817/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:29:32,436] INFO: Step: 882818/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:29:46,210] INFO: Step: 882819/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:29:59,905] INFO: Step: 882820/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:30:13,850] INFO: Step: 882821/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:30:27,509] INFO: Step: 882822/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:30:41,243] INFO: Step: 882823/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:30:55,000] INFO: Step: 882824/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:31:08,673] INFO: Step: 882825/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:31:22,288] INFO: Step: 882826/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:31:36,160] INFO: Step: 882827/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:31:50,011] INFO: Step: 882828/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:32:03,693] INFO: Step: 882829/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:32:17,423] INFO: Step: 882830/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:32:31,172] INFO: Step: 882831/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:32:44,929] INFO: Step: 882832/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:32:58,666] INFO: Step: 882833/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:33:12,316] INFO: Step: 882834/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:33:26,070] INFO: Step: 882835/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:33:39,834] INFO: Step: 882836/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:33:53,698] INFO: Step: 882837/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:34:07,435] INFO: Step: 882838/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:34:21,113] INFO: Step: 882839/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:34:34,775] INFO: Step: 882840/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:34:48,709] INFO: Step: 882841/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:35:02,603] INFO: Step: 882842/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:35:16,481] INFO: Step: 882843/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:35:30,259] INFO: Step: 882844/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:35:43,999] INFO: Step: 882845/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:35:57,837] INFO: Step: 882846/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:36:11,437] INFO: Step: 882847/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:36:25,092] INFO: Step: 882848/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:36:38,781] INFO: Step: 882849/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:36:52,405] INFO: Step: 882850/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:37:06,169] INFO: Step: 882851/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:37:19,968] INFO: Step: 882852/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:37:33,686] INFO: Step: 882853/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:37:47,417] INFO: Step: 882854/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:38:01,304] INFO: Step: 882855/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:38:15,084] INFO: Step: 882856/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:38:28,895] INFO: Step: 882857/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:38:42,600] INFO: Step: 882858/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:38:56,220] INFO: Step: 882859/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:39:10,014] INFO: Step: 882860/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:39:23,813] INFO: Step: 882861/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:39:37,476] INFO: Step: 882862/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:39:51,322] INFO: Step: 882863/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:40:05,061] INFO: Step: 882864/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:40:18,757] INFO: Step: 882865/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:40:32,493] INFO: Step: 882866/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:40:46,362] INFO: Step: 882867/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:41:00,074] INFO: Step: 882868/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:41:13,880] INFO: Step: 882869/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:41:27,600] INFO: Step: 882870/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:41:41,496] INFO: Step: 882871/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:41:55,312] INFO: Step: 882872/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:42:08,957] INFO: Step: 882873/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:42:22,849] INFO: Step: 882874/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:42:36,814] INFO: Step: 882875/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:42:50,654] INFO: Step: 882876/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:43:04,349] INFO: Step: 882877/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:43:18,010] INFO: Step: 882878/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:43:31,668] INFO: Step: 882879/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:43:45,505] INFO: Step: 882880/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:43:59,412] INFO: Step: 882881/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:44:13,091] INFO: Step: 882882/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:44:26,825] INFO: Step: 882883/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:44:40,504] INFO: Step: 882884/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:44:54,293] INFO: Step: 882885/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:45:08,125] INFO: Step: 882886/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:45:21,811] INFO: Step: 882887/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:45:35,853] INFO: Step: 882888/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:45:49,625] INFO: Step: 882889/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:46:03,374] INFO: Step: 882890/1000000 Loss: 0.096 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:46:17,014] INFO: Step: 882891/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:46:30,746] INFO: Step: 882892/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:46:44,483] INFO: Step: 882893/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:46:58,305] INFO: Step: 882894/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:47:12,136] INFO: Step: 882895/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:47:26,002] INFO: Step: 882896/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:47:39,778] INFO: Step: 882897/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:47:53,489] INFO: Step: 882898/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:48:07,268] INFO: Step: 882899/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:48:20,871] INFO: Step: 882900/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:48:34,599] INFO: Step: 882901/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:48:48,411] INFO: Step: 882902/1000000 Loss: 0.096 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:49:02,052] INFO: Step: 882903/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:49:15,966] INFO: Step: 882904/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:49:29,701] INFO: Step: 882905/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:49:43,718] INFO: Step: 882906/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:49:57,321] INFO: Step: 882907/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:50:11,247] INFO: Step: 882908/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:50:24,869] INFO: Step: 882909/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:50:38,732] INFO: Step: 882910/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:50:52,471] INFO: Step: 882911/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:51:06,306] INFO: Step: 882912/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:51:20,064] INFO: Step: 882913/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:51:33,973] INFO: Step: 882914/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:51:47,663] INFO: Step: 882915/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:52:01,552] INFO: Step: 882916/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:52:15,296] INFO: Step: 882917/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:52:28,934] INFO: Step: 882918/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:52:42,648] INFO: Step: 882919/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:52:56,413] INFO: Step: 882920/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:53:10,090] INFO: Step: 882921/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:53:23,866] INFO: Step: 882922/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:53:37,677] INFO: Step: 882923/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:53:51,347] INFO: Step: 882924/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:54:04,985] INFO: Step: 882925/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:54:18,605] INFO: Step: 882926/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:54:32,343] INFO: Step: 882927/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:54:46,017] INFO: Step: 882928/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:54:59,726] INFO: Step: 882929/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:55:13,451] INFO: Step: 882930/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:55:27,450] INFO: Step: 882931/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:55:41,233] INFO: Step: 882932/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:55:54,968] INFO: Step: 882933/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:56:08,825] INFO: Step: 882934/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:56:22,504] INFO: Step: 882935/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:56:36,353] INFO: Step: 882936/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:56:50,489] INFO: Step: 882937/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:57:04,288] INFO: Step: 882938/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:57:18,014] INFO: Step: 882939/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:57:31,725] INFO: Step: 882940/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:57:45,481] INFO: Step: 882941/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:57:59,256] INFO: Step: 882942/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:58:12,869] INFO: Step: 882943/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:58:26,526] INFO: Step: 882944/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:58:40,179] INFO: Step: 882945/1000000 Loss: 0.109 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:58:53,833] INFO: Step: 882946/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:59:07,592] INFO: Step: 882947/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:59:21,343] INFO: Step: 882948/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:59:35,006] INFO: Step: 882949/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 21:59:48,653] INFO: Step: 882950/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:00:02,240] INFO: Step: 882951/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:00:15,888] INFO: Step: 882952/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:00:29,468] INFO: Step: 882953/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:00:43,181] INFO: Step: 882954/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:00:56,925] INFO: Step: 882955/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:01:10,509] INFO: Step: 882956/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:01:24,541] INFO: Step: 882957/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:01:38,298] INFO: Step: 882958/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:01:52,082] INFO: Step: 882959/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:02:05,888] INFO: Step: 882960/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:02:19,516] INFO: Step: 882961/1000000 Loss: 0.096 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:02:33,210] INFO: Step: 882962/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:02:47,128] INFO: Step: 882963/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:03:00,980] INFO: Step: 882964/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:03:14,766] INFO: Step: 882965/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:03:28,428] INFO: Step: 882966/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:03:42,040] INFO: Step: 882967/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:03:55,880] INFO: Step: 882968/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:04:09,583] INFO: Step: 882969/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:04:23,286] INFO: Step: 882970/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:04:36,996] INFO: Step: 882971/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:04:50,913] INFO: Step: 882972/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:05:04,445] INFO: Step: 882973/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:05:18,356] INFO: Step: 882974/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:05:32,057] INFO: Step: 882975/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:05:45,888] INFO: Step: 882976/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:05:59,597] INFO: Step: 882977/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:06:13,419] INFO: Step: 882978/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:06:27,085] INFO: Step: 882979/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:06:40,958] INFO: Step: 882980/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:06:54,745] INFO: Step: 882981/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:07:08,475] INFO: Step: 882982/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:07:22,063] INFO: Step: 882983/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:07:35,912] INFO: Step: 882984/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:07:49,502] INFO: Step: 882985/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:08:03,184] INFO: Step: 882986/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:08:16,874] INFO: Step: 882987/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:08:30,634] INFO: Step: 882988/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:08:44,491] INFO: Step: 882989/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:08:58,303] INFO: Step: 882990/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:09:12,038] INFO: Step: 882991/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:09:25,798] INFO: Step: 882992/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:09:39,710] INFO: Step: 882993/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:09:53,465] INFO: Step: 882994/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:10:07,142] INFO: Step: 882995/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:10:20,738] INFO: Step: 882996/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:10:34,549] INFO: Step: 882997/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:10:48,327] INFO: Step: 882998/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:11:02,180] INFO: Step: 882999/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:11:15,822] INFO: Step: 883000/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:11:15,822] INFO: Begin to Save model to workspace/i2v_motion/obj_train_motion/checkpoints/non_ema_00883000.pth +[2024-06-08 22:11:23,505] INFO: Save model to workspace/i2v_motion/obj_train_motion/checkpoints/non_ema_00883000.pth +[2024-06-08 22:11:35,697] INFO: Step: 883001/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:11:49,311] INFO: Step: 883002/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:12:02,997] INFO: Step: 883003/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:12:16,693] INFO: Step: 883004/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:12:30,421] INFO: Step: 883005/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:12:44,238] INFO: Step: 883006/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:12:58,131] INFO: Step: 883007/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:13:11,842] INFO: Step: 883008/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:13:25,564] INFO: Step: 883009/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:13:39,402] INFO: Step: 883010/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:13:53,276] INFO: Step: 883011/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:14:07,044] INFO: Step: 883012/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:14:20,842] INFO: Step: 883013/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:14:34,770] INFO: Step: 883014/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:14:48,580] INFO: Step: 883015/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:15:02,328] INFO: Step: 883016/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:15:16,145] INFO: Step: 883017/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:15:30,009] INFO: Step: 883018/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:15:43,784] INFO: Step: 883019/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:15:57,526] INFO: Step: 883020/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:16:11,350] INFO: Step: 883021/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:16:25,192] INFO: Step: 883022/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:16:39,018] INFO: Step: 883023/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:16:52,811] INFO: Step: 883024/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:17:06,443] INFO: Step: 883025/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:17:20,241] INFO: Step: 883026/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:17:33,988] INFO: Step: 883027/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:17:47,978] INFO: Step: 883028/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:18:01,731] INFO: Step: 883029/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:18:15,408] INFO: Step: 883030/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:18:29,320] INFO: Step: 883031/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:18:43,136] INFO: Step: 883032/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:18:56,819] INFO: Step: 883033/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:19:10,837] INFO: Step: 883034/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:19:24,454] INFO: Step: 883035/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:19:38,188] INFO: Step: 883036/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:19:52,112] INFO: Step: 883037/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:20:06,004] INFO: Step: 883038/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:20:19,783] INFO: Step: 883039/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:20:33,682] INFO: Step: 883040/1000000 Loss: 0.100 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:20:47,456] INFO: Step: 883041/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:21:01,294] INFO: Step: 883042/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:21:15,125] INFO: Step: 883043/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:21:29,044] INFO: Step: 883044/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:21:42,995] INFO: Step: 883045/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:21:56,853] INFO: Step: 883046/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:22:10,683] INFO: Step: 883047/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:22:24,419] INFO: Step: 883048/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:22:38,135] INFO: Step: 883049/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:22:51,801] INFO: Step: 883050/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:23:05,695] INFO: Step: 883051/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:23:19,408] INFO: Step: 883052/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:23:33,213] INFO: Step: 883053/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:23:46,934] INFO: Step: 883054/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:24:00,644] INFO: Step: 883055/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:24:14,650] INFO: Step: 883056/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:24:28,572] INFO: Step: 883057/1000000 Loss: 0.098 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:24:42,486] INFO: Step: 883058/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:24:56,266] INFO: Step: 883059/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:25:10,228] INFO: Step: 883060/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:25:24,117] INFO: Step: 883061/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:25:38,080] INFO: Step: 883062/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:25:52,061] INFO: Step: 883063/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:26:05,858] INFO: Step: 883064/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:26:19,676] INFO: Step: 883065/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:26:33,506] INFO: Step: 883066/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:26:47,253] INFO: Step: 883067/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:27:00,932] INFO: Step: 883068/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:27:14,668] INFO: Step: 883069/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:27:28,389] INFO: Step: 883070/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:27:42,120] INFO: Step: 883071/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:27:55,800] INFO: Step: 883072/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:28:09,529] INFO: Step: 883073/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:28:23,323] INFO: Step: 883074/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:28:37,189] INFO: Step: 883075/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:28:50,802] INFO: Step: 883076/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:29:04,804] INFO: Step: 883077/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:29:18,489] INFO: Step: 883078/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:29:32,322] INFO: Step: 883079/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:29:46,123] INFO: Step: 883080/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:29:59,912] INFO: Step: 883081/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:30:13,670] INFO: Step: 883082/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:30:27,458] INFO: Step: 883083/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:30:41,238] INFO: Step: 883084/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:30:55,014] INFO: Step: 883085/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:31:08,759] INFO: Step: 883086/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:31:22,528] INFO: Step: 883087/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:31:36,301] INFO: Step: 883088/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:31:50,138] INFO: Step: 883089/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:32:04,014] INFO: Step: 883090/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:32:17,667] INFO: Step: 883091/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:32:31,555] INFO: Step: 883092/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:32:45,569] INFO: Step: 883093/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:32:59,320] INFO: Step: 883094/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:33:13,076] INFO: Step: 883095/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:33:26,890] INFO: Step: 883096/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:33:40,772] INFO: Step: 883097/1000000 Loss: 0.096 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:33:54,487] INFO: Step: 883098/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:34:08,187] INFO: Step: 883099/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:34:21,965] INFO: Step: 883100/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:34:35,790] INFO: Step: 883101/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:34:49,543] INFO: Step: 883102/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:35:03,386] INFO: Step: 883103/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:35:17,318] INFO: Step: 883104/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:35:31,000] INFO: Step: 883105/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:35:44,638] INFO: Step: 883106/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:35:58,353] INFO: Step: 883107/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:36:12,281] INFO: Step: 883108/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:36:26,049] INFO: Step: 883109/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:36:39,816] INFO: Step: 883110/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:36:53,747] INFO: Step: 883111/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:37:07,689] INFO: Step: 883112/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:37:21,524] INFO: Step: 883113/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:37:35,251] INFO: Step: 883114/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:37:48,999] INFO: Step: 883115/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:38:02,731] INFO: Step: 883116/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:38:16,560] INFO: Step: 883117/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:38:30,278] INFO: Step: 883118/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:38:44,169] INFO: Step: 883119/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:38:58,006] INFO: Step: 883120/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:39:11,784] INFO: Step: 883121/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:39:25,640] INFO: Step: 883122/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:39:39,365] INFO: Step: 883123/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:39:52,990] INFO: Step: 883124/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:40:06,666] INFO: Step: 883125/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:40:20,489] INFO: Step: 883126/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:40:34,338] INFO: Step: 883127/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:40:48,255] INFO: Step: 883128/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:41:02,193] INFO: Step: 883129/1000000 Loss: 0.100 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:41:16,003] INFO: Step: 883130/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:41:30,000] INFO: Step: 883131/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:41:43,707] INFO: Step: 883132/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:41:57,600] INFO: Step: 883133/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:42:11,409] INFO: Step: 883134/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:42:25,309] INFO: Step: 883135/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:42:39,134] INFO: Step: 883136/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:42:52,900] INFO: Step: 883137/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:43:06,910] INFO: Step: 883138/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:43:20,520] INFO: Step: 883139/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:43:34,250] INFO: Step: 883140/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:43:48,056] INFO: Step: 883141/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:44:02,002] INFO: Step: 883142/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:44:15,793] INFO: Step: 883143/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:44:29,578] INFO: Step: 883144/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:44:43,505] INFO: Step: 883145/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:44:57,379] INFO: Step: 883146/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:45:11,067] INFO: Step: 883147/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:45:24,900] INFO: Step: 883148/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:45:38,631] INFO: Step: 883149/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:45:52,371] INFO: Step: 883150/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:46:06,066] INFO: Step: 883151/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:46:19,603] INFO: Step: 883152/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:46:33,400] INFO: Step: 883153/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:46:47,299] INFO: Step: 883154/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:47:00,916] INFO: Step: 883155/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:47:14,759] INFO: Step: 883156/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:47:28,541] INFO: Step: 883157/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:47:42,206] INFO: Step: 883158/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:47:55,952] INFO: Step: 883159/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:48:09,734] INFO: Step: 883160/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:48:23,455] INFO: Step: 883161/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:48:37,355] INFO: Step: 883162/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:48:51,173] INFO: Step: 883163/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:49:05,030] INFO: Step: 883164/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:49:18,774] INFO: Step: 883165/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:49:32,450] INFO: Step: 883166/1000000 Loss: 0.099 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:49:46,123] INFO: Step: 883167/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:49:59,805] INFO: Step: 883168/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:50:13,582] INFO: Step: 883169/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:50:27,239] INFO: Step: 883170/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:50:41,112] INFO: Step: 883171/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:50:54,809] INFO: Step: 883172/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:51:08,568] INFO: Step: 883173/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:51:22,131] INFO: Step: 883174/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:51:35,850] INFO: Step: 883175/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:51:49,816] INFO: Step: 883176/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:52:03,645] INFO: Step: 883177/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:52:17,368] INFO: Step: 883178/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:52:31,150] INFO: Step: 883179/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:52:44,962] INFO: Step: 883180/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:52:58,702] INFO: Step: 883181/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:53:12,431] INFO: Step: 883182/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:53:26,221] INFO: Step: 883183/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:53:40,014] INFO: Step: 883184/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:53:53,749] INFO: Step: 883185/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:54:07,528] INFO: Step: 883186/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:54:21,339] INFO: Step: 883187/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:54:35,178] INFO: Step: 883188/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:54:49,042] INFO: Step: 883189/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:55:02,713] INFO: Step: 883190/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:55:16,679] INFO: Step: 883191/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:55:30,556] INFO: Step: 883192/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:55:44,366] INFO: Step: 883193/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:55:58,181] INFO: Step: 883194/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:56:12,107] INFO: Step: 883195/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:56:25,907] INFO: Step: 883196/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:56:39,819] INFO: Step: 883197/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:56:53,626] INFO: Step: 883198/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:57:07,344] INFO: Step: 883199/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:57:21,179] INFO: Step: 883200/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:57:34,831] INFO: Step: 883201/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:57:48,613] INFO: Step: 883202/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:58:02,411] INFO: Step: 883203/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:58:16,239] INFO: Step: 883204/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:58:30,082] INFO: Step: 883205/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:58:43,708] INFO: Step: 883206/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:58:57,510] INFO: Step: 883207/1000000 Loss: 0.098 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:59:11,348] INFO: Step: 883208/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:59:25,124] INFO: Step: 883209/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:59:38,876] INFO: Step: 883210/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 22:59:52,779] INFO: Step: 883211/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:00:06,662] INFO: Step: 883212/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:00:20,444] INFO: Step: 883213/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:00:34,154] INFO: Step: 883214/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:00:47,923] INFO: Step: 883215/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:01:01,728] INFO: Step: 883216/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:01:15,477] INFO: Step: 883217/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:01:29,164] INFO: Step: 883218/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:01:42,773] INFO: Step: 883219/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:01:56,526] INFO: Step: 883220/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:02:10,470] INFO: Step: 883221/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:02:24,239] INFO: Step: 883222/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:02:37,922] INFO: Step: 883223/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:02:51,790] INFO: Step: 883224/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:03:05,538] INFO: Step: 883225/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:03:19,485] INFO: Step: 883226/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:03:33,153] INFO: Step: 883227/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:03:47,151] INFO: Step: 883228/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:04:01,096] INFO: Step: 883229/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:04:14,998] INFO: Step: 883230/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:04:28,763] INFO: Step: 883231/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:04:42,431] INFO: Step: 883232/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:04:56,160] INFO: Step: 883233/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:05:09,953] INFO: Step: 883234/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:05:23,829] INFO: Step: 883235/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:05:37,781] INFO: Step: 883236/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:05:51,545] INFO: Step: 883237/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:06:05,236] INFO: Step: 883238/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:06:18,959] INFO: Step: 883239/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:06:32,888] INFO: Step: 883240/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:06:46,885] INFO: Step: 883241/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:07:00,689] INFO: Step: 883242/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:07:14,525] INFO: Step: 883243/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:07:28,280] INFO: Step: 883244/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:07:42,032] INFO: Step: 883245/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:07:55,955] INFO: Step: 883246/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:08:09,719] INFO: Step: 883247/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:08:23,429] INFO: Step: 883248/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:08:37,394] INFO: Step: 883249/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:08:51,308] INFO: Step: 883250/1000000 Loss: 0.096 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:09:05,083] INFO: Step: 883251/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:09:19,045] INFO: Step: 883252/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:09:32,773] INFO: Step: 883253/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:09:46,704] INFO: Step: 883254/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:10:00,490] INFO: Step: 883255/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:10:14,109] INFO: Step: 883256/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:10:28,074] INFO: Step: 883257/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:10:41,854] INFO: Step: 883258/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:10:55,713] INFO: Step: 883259/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:11:09,658] INFO: Step: 883260/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:11:23,496] INFO: Step: 883261/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:11:37,477] INFO: Step: 883262/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:11:51,395] INFO: Step: 883263/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:12:05,275] INFO: Step: 883264/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:12:19,046] INFO: Step: 883265/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:12:32,947] INFO: Step: 883266/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:12:46,780] INFO: Step: 883267/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:13:00,682] INFO: Step: 883268/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:13:14,479] INFO: Step: 883269/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:13:28,323] INFO: Step: 883270/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:13:42,114] INFO: Step: 883271/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:13:55,896] INFO: Step: 883272/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:14:09,915] INFO: Step: 883273/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:14:23,811] INFO: Step: 883274/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:14:37,627] INFO: Step: 883275/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:14:51,441] INFO: Step: 883276/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:15:05,231] INFO: Step: 883277/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:15:18,987] INFO: Step: 883278/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:15:32,728] INFO: Step: 883279/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:15:46,622] INFO: Step: 883280/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:16:00,332] INFO: Step: 883281/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:16:14,136] INFO: Step: 883282/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:16:28,105] INFO: Step: 883283/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:16:42,018] INFO: Step: 883284/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:16:55,756] INFO: Step: 883285/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:17:09,426] INFO: Step: 883286/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:17:23,138] INFO: Step: 883287/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:17:37,076] INFO: Step: 883288/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:17:50,906] INFO: Step: 883289/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:18:04,545] INFO: Step: 883290/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:18:18,258] INFO: Step: 883291/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:18:32,112] INFO: Step: 883292/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:18:45,849] INFO: Step: 883293/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:18:59,757] INFO: Step: 883294/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:19:13,556] INFO: Step: 883295/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:19:27,328] INFO: Step: 883296/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:19:41,086] INFO: Step: 883297/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:19:54,890] INFO: Step: 883298/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:20:08,714] INFO: Step: 883299/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:20:22,544] INFO: Step: 883300/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:20:36,284] INFO: Step: 883301/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:20:50,084] INFO: Step: 883302/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:21:03,934] INFO: Step: 883303/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:21:17,732] INFO: Step: 883304/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:21:31,444] INFO: Step: 883305/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:21:45,283] INFO: Step: 883306/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:21:59,075] INFO: Step: 883307/1000000 Loss: 0.096 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:22:12,827] INFO: Step: 883308/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:22:26,623] INFO: Step: 883309/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:22:40,526] INFO: Step: 883310/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:22:54,285] INFO: Step: 883311/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:23:08,251] INFO: Step: 883312/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:23:21,989] INFO: Step: 883313/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:23:35,919] INFO: Step: 883314/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:23:49,738] INFO: Step: 883315/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:24:03,563] INFO: Step: 883316/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:24:17,310] INFO: Step: 883317/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:24:31,157] INFO: Step: 883318/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:24:44,849] INFO: Step: 883319/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:24:58,449] INFO: Step: 883320/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:25:12,339] INFO: Step: 883321/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:25:26,288] INFO: Step: 883322/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:25:40,145] INFO: Step: 883323/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:25:54,108] INFO: Step: 883324/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:26:07,975] INFO: Step: 883325/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:26:21,889] INFO: Step: 883326/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:26:35,628] INFO: Step: 883327/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:26:49,660] INFO: Step: 883328/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:27:03,730] INFO: Step: 883329/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:27:17,607] INFO: Step: 883330/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:27:31,422] INFO: Step: 883331/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:27:45,120] INFO: Step: 883332/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:27:58,956] INFO: Step: 883333/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:28:12,801] INFO: Step: 883334/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:28:26,668] INFO: Step: 883335/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:28:40,437] INFO: Step: 883336/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:28:54,320] INFO: Step: 883337/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:29:08,256] INFO: Step: 883338/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:29:22,282] INFO: Step: 883339/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:29:36,008] INFO: Step: 883340/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:29:49,835] INFO: Step: 883341/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:30:03,730] INFO: Step: 883342/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:30:17,587] INFO: Step: 883343/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:30:31,381] INFO: Step: 883344/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:30:45,099] INFO: Step: 883345/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:30:58,937] INFO: Step: 883346/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:31:12,695] INFO: Step: 883347/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:31:26,453] INFO: Step: 883348/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:31:40,042] INFO: Step: 883349/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:31:53,949] INFO: Step: 883350/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:32:07,791] INFO: Step: 883351/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:32:21,757] INFO: Step: 883352/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:32:35,661] INFO: Step: 883353/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:32:49,535] INFO: Step: 883354/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:33:03,181] INFO: Step: 883355/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:33:17,104] INFO: Step: 883356/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:33:30,949] INFO: Step: 883357/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:33:44,799] INFO: Step: 883358/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:33:58,574] INFO: Step: 883359/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:34:12,291] INFO: Step: 883360/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:34:26,163] INFO: Step: 883361/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:34:39,929] INFO: Step: 883362/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:34:53,819] INFO: Step: 883363/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:35:07,529] INFO: Step: 883364/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:35:21,296] INFO: Step: 883365/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:35:35,256] INFO: Step: 883366/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:35:49,139] INFO: Step: 883367/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:36:03,011] INFO: Step: 883368/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:36:16,717] INFO: Step: 883369/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:36:30,625] INFO: Step: 883370/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:36:44,508] INFO: Step: 883371/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:36:58,347] INFO: Step: 883372/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:37:12,089] INFO: Step: 883373/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:37:25,801] INFO: Step: 883374/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:37:39,690] INFO: Step: 883375/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:37:53,508] INFO: Step: 883376/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:38:07,311] INFO: Step: 883377/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:38:21,102] INFO: Step: 883378/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:38:34,871] INFO: Step: 883379/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:38:48,654] INFO: Step: 883380/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:39:02,463] INFO: Step: 883381/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:39:16,524] INFO: Step: 883382/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:39:30,315] INFO: Step: 883383/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:39:44,132] INFO: Step: 883384/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:39:57,830] INFO: Step: 883385/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:40:11,609] INFO: Step: 883386/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:40:25,452] INFO: Step: 883387/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:40:39,214] INFO: Step: 883388/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:40:53,022] INFO: Step: 883389/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:41:06,899] INFO: Step: 883390/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:41:20,545] INFO: Step: 883391/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:41:34,420] INFO: Step: 883392/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:41:48,345] INFO: Step: 883393/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:42:02,047] INFO: Step: 883394/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:42:15,705] INFO: Step: 883395/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:42:29,601] INFO: Step: 883396/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:42:43,328] INFO: Step: 883397/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:42:57,430] INFO: Step: 883398/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:43:11,251] INFO: Step: 883399/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:43:25,133] INFO: Step: 883400/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:43:38,816] INFO: Step: 883401/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:43:52,738] INFO: Step: 883402/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:44:06,500] INFO: Step: 883403/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:44:20,453] INFO: Step: 883404/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:44:34,410] INFO: Step: 883405/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:44:48,210] INFO: Step: 883406/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:45:02,136] INFO: Step: 883407/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:45:15,895] INFO: Step: 883408/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:45:29,876] INFO: Step: 883409/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:45:43,808] INFO: Step: 883410/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:45:57,633] INFO: Step: 883411/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:46:11,493] INFO: Step: 883412/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:46:25,174] INFO: Step: 883413/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:46:38,926] INFO: Step: 883414/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:46:52,653] INFO: Step: 883415/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:47:06,560] INFO: Step: 883416/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:47:20,432] INFO: Step: 883417/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:47:34,268] INFO: Step: 883418/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:47:48,036] INFO: Step: 883419/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:48:01,942] INFO: Step: 883420/1000000 Loss: 0.099 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:48:15,683] INFO: Step: 883421/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:48:29,844] INFO: Step: 883422/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:48:43,756] INFO: Step: 883423/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:48:57,699] INFO: Step: 883424/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:49:11,489] INFO: Step: 883425/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:49:25,267] INFO: Step: 883426/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:49:39,203] INFO: Step: 883427/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:49:53,036] INFO: Step: 883428/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:50:06,831] INFO: Step: 883429/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:50:20,793] INFO: Step: 883430/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:50:34,783] INFO: Step: 883431/1000000 Loss: 0.098 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:50:48,518] INFO: Step: 883432/1000000 Loss: 0.096 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:51:02,272] INFO: Step: 883433/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:51:16,027] INFO: Step: 883434/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:51:29,758] INFO: Step: 883435/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:51:43,459] INFO: Step: 883436/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:51:57,572] INFO: Step: 883437/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:52:11,416] INFO: Step: 883438/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:52:25,244] INFO: Step: 883439/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:52:39,002] INFO: Step: 883440/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:52:52,796] INFO: Step: 883441/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:53:06,531] INFO: Step: 883442/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:53:20,206] INFO: Step: 883443/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:53:33,881] INFO: Step: 883444/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:53:47,737] INFO: Step: 883445/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:54:01,597] INFO: Step: 883446/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:54:15,326] INFO: Step: 883447/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:54:29,259] INFO: Step: 883448/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:54:43,108] INFO: Step: 883449/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:54:56,799] INFO: Step: 883450/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:55:10,741] INFO: Step: 883451/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:55:24,811] INFO: Step: 883452/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:55:38,850] INFO: Step: 883453/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:55:52,719] INFO: Step: 883454/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:56:06,653] INFO: Step: 883455/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:56:20,405] INFO: Step: 883456/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:56:34,412] INFO: Step: 883457/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:56:48,039] INFO: Step: 883458/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:57:01,717] INFO: Step: 883459/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:57:15,547] INFO: Step: 883460/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:57:29,323] INFO: Step: 883461/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:57:42,949] INFO: Step: 883462/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:57:56,738] INFO: Step: 883463/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:58:10,535] INFO: Step: 883464/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:58:24,281] INFO: Step: 883465/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:58:38,220] INFO: Step: 883466/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:58:51,974] INFO: Step: 883467/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:59:05,732] INFO: Step: 883468/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:59:19,457] INFO: Step: 883469/1000000 Loss: 0.098 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:59:33,269] INFO: Step: 883470/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-08 23:59:47,153] INFO: Step: 883471/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:00:01,081] INFO: Step: 883472/1000000 Loss: 0.097 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:00:15,086] INFO: Step: 883473/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:00:28,943] INFO: Step: 883474/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:00:42,766] INFO: Step: 883475/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:00:56,487] INFO: Step: 883476/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:01:10,312] INFO: Step: 883477/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:01:24,035] INFO: Step: 883478/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:01:37,788] INFO: Step: 883479/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:01:51,474] INFO: Step: 883480/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:02:05,532] INFO: Step: 883481/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:02:19,280] INFO: Step: 883482/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:02:32,942] INFO: Step: 883483/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:02:46,655] INFO: Step: 883484/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:03:00,407] INFO: Step: 883485/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:03:14,416] INFO: Step: 883486/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:03:28,105] INFO: Step: 883487/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:03:42,073] INFO: Step: 883488/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:03:55,977] INFO: Step: 883489/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:04:09,828] INFO: Step: 883490/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:04:23,604] INFO: Step: 883491/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:04:37,420] INFO: Step: 883492/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:04:51,308] INFO: Step: 883493/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:05:05,168] INFO: Step: 883494/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:05:19,141] INFO: Step: 883495/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:05:33,048] INFO: Step: 883496/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:05:46,835] INFO: Step: 883497/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:06:00,541] INFO: Step: 883498/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:06:14,396] INFO: Step: 883499/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:06:28,240] INFO: Step: 883500/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:06:28,241] INFO: Begin to Save model to workspace/i2v_motion/obj_train_motion/checkpoints/non_ema_00883500.pth +[2024-06-09 00:06:35,293] INFO: Save model to workspace/i2v_motion/obj_train_motion/checkpoints/non_ema_00883500.pth +[2024-06-09 00:06:47,516] INFO: Step: 883501/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:07:01,397] INFO: Step: 883502/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:07:15,012] INFO: Step: 883503/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:07:28,986] INFO: Step: 883504/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:07:42,756] INFO: Step: 883505/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:07:56,411] INFO: Step: 883506/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:08:10,136] INFO: Step: 883507/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:08:24,055] INFO: Step: 883508/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:08:38,008] INFO: Step: 883509/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:08:51,822] INFO: Step: 883510/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:09:05,715] INFO: Step: 883511/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:09:19,409] INFO: Step: 883512/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:09:33,257] INFO: Step: 883513/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:09:47,343] INFO: Step: 883514/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:10:01,180] INFO: Step: 883515/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:10:15,225] INFO: Step: 883516/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:10:28,906] INFO: Step: 883517/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:10:42,777] INFO: Step: 883518/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:10:56,612] INFO: Step: 883519/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:11:10,535] INFO: Step: 883520/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:11:24,444] INFO: Step: 883521/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:11:38,279] INFO: Step: 883522/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:11:51,868] INFO: Step: 883523/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:12:05,663] INFO: Step: 883524/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:12:19,402] INFO: Step: 883525/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:12:33,187] INFO: Step: 883526/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:12:47,146] INFO: Step: 883527/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:13:00,993] INFO: Step: 883528/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:13:14,829] INFO: Step: 883529/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:13:28,505] INFO: Step: 883530/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:13:42,288] INFO: Step: 883531/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:13:55,987] INFO: Step: 883532/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:14:09,773] INFO: Step: 883533/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:14:23,409] INFO: Step: 883534/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:14:37,079] INFO: Step: 883535/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:14:51,100] INFO: Step: 883536/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:15:04,851] INFO: Step: 883537/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:15:18,632] INFO: Step: 883538/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:15:32,404] INFO: Step: 883539/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:15:46,370] INFO: Step: 883540/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:16:00,145] INFO: Step: 883541/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:16:14,045] INFO: Step: 883542/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:16:27,933] INFO: Step: 883543/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:16:41,741] INFO: Step: 883544/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:16:55,671] INFO: Step: 883545/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:17:09,448] INFO: Step: 883546/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:17:23,322] INFO: Step: 883547/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:17:37,032] INFO: Step: 883548/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:17:50,650] INFO: Step: 883549/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:18:04,532] INFO: Step: 883550/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:18:18,296] INFO: Step: 883551/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:18:32,065] INFO: Step: 883552/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:18:46,009] INFO: Step: 883553/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:18:59,817] INFO: Step: 883554/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:19:13,613] INFO: Step: 883555/1000000 Loss: 0.068 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:19:27,346] INFO: Step: 883556/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:19:41,248] INFO: Step: 883557/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:19:55,061] INFO: Step: 883558/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:20:09,239] INFO: Step: 883559/1000000 Loss: 0.097 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:20:22,916] INFO: Step: 883560/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:20:36,742] INFO: Step: 883561/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:20:50,562] INFO: Step: 883562/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:21:04,364] INFO: Step: 883563/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:21:18,253] INFO: Step: 883564/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:21:32,151] INFO: Step: 883565/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:21:46,160] INFO: Step: 883566/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:21:59,915] INFO: Step: 883567/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:22:13,578] INFO: Step: 883568/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:22:27,488] INFO: Step: 883569/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:22:41,300] INFO: Step: 883570/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:22:55,198] INFO: Step: 883571/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:23:08,912] INFO: Step: 883572/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:23:22,767] INFO: Step: 883573/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:23:36,458] INFO: Step: 883574/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:23:50,478] INFO: Step: 883575/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:24:04,271] INFO: Step: 883576/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:24:17,924] INFO: Step: 883577/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:24:31,715] INFO: Step: 883578/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:24:45,671] INFO: Step: 883579/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:24:59,468] INFO: Step: 883580/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:25:13,210] INFO: Step: 883581/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:25:27,014] INFO: Step: 883582/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:25:40,942] INFO: Step: 883583/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:25:54,608] INFO: Step: 883584/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:26:08,424] INFO: Step: 883585/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:26:22,370] INFO: Step: 883586/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:26:36,368] INFO: Step: 883587/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:26:50,207] INFO: Step: 883588/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:27:04,053] INFO: Step: 883589/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:27:17,953] INFO: Step: 883590/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:27:31,829] INFO: Step: 883591/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:27:45,556] INFO: Step: 883592/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:27:59,215] INFO: Step: 883593/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:28:12,930] INFO: Step: 883594/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:28:26,648] INFO: Step: 883595/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:28:40,514] INFO: Step: 883596/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:28:54,473] INFO: Step: 883597/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:29:08,448] INFO: Step: 883598/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:29:22,198] INFO: Step: 883599/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:29:35,907] INFO: Step: 883600/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:29:49,776] INFO: Step: 883601/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:30:03,499] INFO: Step: 883602/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:30:17,344] INFO: Step: 883603/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:30:31,409] INFO: Step: 883604/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:30:45,335] INFO: Step: 883605/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:30:59,125] INFO: Step: 883606/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:31:13,016] INFO: Step: 883607/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:31:26,722] INFO: Step: 883608/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:31:40,625] INFO: Step: 883609/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:31:54,483] INFO: Step: 883610/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:32:08,059] INFO: Step: 883611/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:32:21,872] INFO: Step: 883612/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:32:35,811] INFO: Step: 883613/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:32:49,498] INFO: Step: 883614/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:33:03,286] INFO: Step: 883615/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:33:17,304] INFO: Step: 883616/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:33:31,087] INFO: Step: 883617/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:33:44,863] INFO: Step: 883618/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:33:58,672] INFO: Step: 883619/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:34:12,519] INFO: Step: 883620/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:34:26,425] INFO: Step: 883621/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:34:40,261] INFO: Step: 883622/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:34:54,363] INFO: Step: 883623/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:35:08,241] INFO: Step: 883624/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:35:22,055] INFO: Step: 883625/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:35:35,975] INFO: Step: 883626/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:35:49,926] INFO: Step: 883627/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:36:03,937] INFO: Step: 883628/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:36:17,719] INFO: Step: 883629/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:36:31,547] INFO: Step: 883630/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:36:45,185] INFO: Step: 883631/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:36:59,084] INFO: Step: 883632/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:37:12,831] INFO: Step: 883633/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:37:26,855] INFO: Step: 883634/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:37:40,595] INFO: Step: 883635/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:37:54,357] INFO: Step: 883636/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:38:08,063] INFO: Step: 883637/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:38:21,836] INFO: Step: 883638/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:38:35,742] INFO: Step: 883639/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:38:49,422] INFO: Step: 883640/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:39:03,455] INFO: Step: 883641/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:39:17,441] INFO: Step: 883642/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:39:31,255] INFO: Step: 883643/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:39:45,027] INFO: Step: 883644/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:39:58,874] INFO: Step: 883645/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:40:12,563] INFO: Step: 883646/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:40:26,314] INFO: Step: 883647/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:40:40,182] INFO: Step: 883648/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:40:53,911] INFO: Step: 883649/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:41:07,649] INFO: Step: 883650/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:41:21,649] INFO: Step: 883651/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:41:35,723] INFO: Step: 883652/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:41:49,385] INFO: Step: 883653/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:42:03,187] INFO: Step: 883654/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:42:16,784] INFO: Step: 883655/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:42:30,676] INFO: Step: 883656/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:42:44,531] INFO: Step: 883657/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:42:58,479] INFO: Step: 883658/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:43:12,380] INFO: Step: 883659/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:43:26,160] INFO: Step: 883660/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:43:39,932] INFO: Step: 883661/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:43:53,726] INFO: Step: 883662/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:44:07,696] INFO: Step: 883663/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:44:21,444] INFO: Step: 883664/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:44:35,162] INFO: Step: 883665/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:44:48,833] INFO: Step: 883666/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:45:02,767] INFO: Step: 883667/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:45:16,687] INFO: Step: 883668/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:45:30,562] INFO: Step: 883669/1000000 Loss: 0.099 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:45:44,541] INFO: Step: 883670/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:45:58,257] INFO: Step: 883671/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:46:12,089] INFO: Step: 883672/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:46:25,674] INFO: Step: 883673/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:46:39,334] INFO: Step: 883674/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:46:53,072] INFO: Step: 883675/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:47:06,738] INFO: Step: 883676/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:47:20,640] INFO: Step: 883677/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:47:34,383] INFO: Step: 883678/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:47:48,273] INFO: Step: 883679/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:48:01,919] INFO: Step: 883680/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:48:15,717] INFO: Step: 883681/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:48:29,549] INFO: Step: 883682/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:48:43,440] INFO: Step: 883683/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:48:57,265] INFO: Step: 883684/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:49:11,070] INFO: Step: 883685/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:49:24,784] INFO: Step: 883686/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:49:38,644] INFO: Step: 883687/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:49:52,441] INFO: Step: 883688/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:50:06,215] INFO: Step: 883689/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:50:20,004] INFO: Step: 883690/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:50:33,859] INFO: Step: 883691/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:50:47,680] INFO: Step: 883692/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:51:01,603] INFO: Step: 883693/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:51:15,270] INFO: Step: 883694/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:51:29,024] INFO: Step: 883695/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:51:43,006] INFO: Step: 883696/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:51:56,918] INFO: Step: 883697/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:52:10,807] INFO: Step: 883698/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:52:24,821] INFO: Step: 883699/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:52:38,744] INFO: Step: 883700/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:52:52,433] INFO: Step: 883701/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:53:06,294] INFO: Step: 883702/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:53:19,952] INFO: Step: 883703/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:53:33,613] INFO: Step: 883704/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:53:47,436] INFO: Step: 883705/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:54:01,406] INFO: Step: 883706/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:54:15,245] INFO: Step: 883707/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:54:28,958] INFO: Step: 883708/1000000 Loss: 0.098 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:54:42,821] INFO: Step: 883709/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:54:56,540] INFO: Step: 883710/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:55:10,454] INFO: Step: 883711/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:55:24,232] INFO: Step: 883712/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:55:38,002] INFO: Step: 883713/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:55:51,766] INFO: Step: 883714/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:56:05,813] INFO: Step: 883715/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:56:19,718] INFO: Step: 883716/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:56:33,802] INFO: Step: 883717/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:56:47,719] INFO: Step: 883718/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:57:01,486] INFO: Step: 883719/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:57:15,245] INFO: Step: 883720/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:57:29,158] INFO: Step: 883721/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:57:43,162] INFO: Step: 883722/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:57:56,917] INFO: Step: 883723/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:58:10,839] INFO: Step: 883724/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:58:24,580] INFO: Step: 883725/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:58:38,430] INFO: Step: 883726/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:58:52,420] INFO: Step: 883727/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:59:06,173] INFO: Step: 883728/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:59:19,971] INFO: Step: 883729/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:59:33,704] INFO: Step: 883730/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 00:59:47,709] INFO: Step: 883731/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:00:01,729] INFO: Step: 883732/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:00:15,638] INFO: Step: 883733/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:00:29,368] INFO: Step: 883734/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:00:43,499] INFO: Step: 883735/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:00:57,267] INFO: Step: 883736/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:01:11,043] INFO: Step: 883737/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:01:24,768] INFO: Step: 883738/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:01:38,562] INFO: Step: 883739/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:01:52,360] INFO: Step: 883740/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:02:06,152] INFO: Step: 883741/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:02:19,852] INFO: Step: 883742/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:02:33,859] INFO: Step: 883743/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:02:47,686] INFO: Step: 883744/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:03:01,441] INFO: Step: 883745/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:03:15,176] INFO: Step: 883746/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:03:29,013] INFO: Step: 883747/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:03:42,986] INFO: Step: 883748/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:03:56,934] INFO: Step: 883749/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:04:10,715] INFO: Step: 883750/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:04:24,551] INFO: Step: 883751/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:04:38,571] INFO: Step: 883752/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:04:52,404] INFO: Step: 883753/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:05:06,275] INFO: Step: 883754/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:05:20,102] INFO: Step: 883755/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:05:33,981] INFO: Step: 883756/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:05:47,763] INFO: Step: 883757/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:06:01,577] INFO: Step: 883758/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:06:15,295] INFO: Step: 883759/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:06:29,160] INFO: Step: 883760/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:06:42,841] INFO: Step: 883761/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:06:56,762] INFO: Step: 883762/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:07:10,738] INFO: Step: 883763/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:07:24,579] INFO: Step: 883764/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:07:38,518] INFO: Step: 883765/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:07:52,249] INFO: Step: 883766/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:08:05,972] INFO: Step: 883767/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:08:19,724] INFO: Step: 883768/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:08:33,532] INFO: Step: 883769/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:08:47,236] INFO: Step: 883770/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:09:01,029] INFO: Step: 883771/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:09:14,838] INFO: Step: 883772/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:09:28,655] INFO: Step: 883773/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:09:42,587] INFO: Step: 883774/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:09:56,426] INFO: Step: 883775/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:10:10,379] INFO: Step: 883776/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:10:24,078] INFO: Step: 883777/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:10:37,841] INFO: Step: 883778/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:10:51,594] INFO: Step: 883779/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:11:05,389] INFO: Step: 883780/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:11:19,279] INFO: Step: 883781/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:11:32,967] INFO: Step: 883782/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:11:46,697] INFO: Step: 883783/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:12:00,415] INFO: Step: 883784/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:12:14,325] INFO: Step: 883785/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:12:28,201] INFO: Step: 883786/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:12:41,981] INFO: Step: 883787/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:12:55,666] INFO: Step: 883788/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:13:09,473] INFO: Step: 883789/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:13:23,361] INFO: Step: 883790/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:13:37,094] INFO: Step: 883791/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:13:51,237] INFO: Step: 883792/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:14:04,736] INFO: Step: 883793/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:14:18,496] INFO: Step: 883794/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:14:32,328] INFO: Step: 883795/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:14:46,202] INFO: Step: 883796/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:15:00,086] INFO: Step: 883797/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:15:13,796] INFO: Step: 883798/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:15:28,047] INFO: Step: 883799/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:15:41,859] INFO: Step: 883800/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:15:55,637] INFO: Step: 883801/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:16:09,532] INFO: Step: 883802/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:16:23,400] INFO: Step: 883803/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:16:37,330] INFO: Step: 883804/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:16:51,091] INFO: Step: 883805/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:17:04,992] INFO: Step: 883806/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:17:18,789] INFO: Step: 883807/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:17:32,510] INFO: Step: 883808/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:17:46,359] INFO: Step: 883809/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:18:00,176] INFO: Step: 883810/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:18:14,065] INFO: Step: 883811/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:18:27,792] INFO: Step: 883812/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:18:41,536] INFO: Step: 883813/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:18:55,515] INFO: Step: 883814/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:19:09,507] INFO: Step: 883815/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:19:23,378] INFO: Step: 883816/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:19:37,191] INFO: Step: 883817/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:19:50,878] INFO: Step: 883818/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:20:04,789] INFO: Step: 883819/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:20:18,575] INFO: Step: 883820/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:20:32,574] INFO: Step: 883821/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:20:46,549] INFO: Step: 883822/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:21:00,384] INFO: Step: 883823/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:21:14,238] INFO: Step: 883824/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:21:28,089] INFO: Step: 883825/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:21:41,828] INFO: Step: 883826/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:21:55,486] INFO: Step: 883827/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:22:09,285] INFO: Step: 883828/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:22:23,084] INFO: Step: 883829/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:22:36,765] INFO: Step: 883830/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:22:50,918] INFO: Step: 883831/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:23:04,754] INFO: Step: 883832/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:23:18,634] INFO: Step: 883833/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:23:32,344] INFO: Step: 883834/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:23:46,140] INFO: Step: 883835/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:24:00,060] INFO: Step: 883836/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:24:13,753] INFO: Step: 883837/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:24:27,449] INFO: Step: 883838/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:24:41,347] INFO: Step: 883839/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:24:55,232] INFO: Step: 883840/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:25:08,950] INFO: Step: 883841/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:25:22,741] INFO: Step: 883842/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:25:36,548] INFO: Step: 883843/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:25:50,240] INFO: Step: 883844/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:26:03,975] INFO: Step: 883845/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:26:17,775] INFO: Step: 883846/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:26:31,595] INFO: Step: 883847/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:26:45,323] INFO: Step: 883848/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:26:59,239] INFO: Step: 883849/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:27:12,947] INFO: Step: 883850/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:27:26,742] INFO: Step: 883851/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:27:40,461] INFO: Step: 883852/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:27:54,242] INFO: Step: 883853/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:28:07,954] INFO: Step: 883854/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:28:21,770] INFO: Step: 883855/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:28:35,709] INFO: Step: 883856/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:28:49,577] INFO: Step: 883857/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:29:03,409] INFO: Step: 883858/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:29:17,303] INFO: Step: 883859/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:29:30,973] INFO: Step: 883860/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:29:44,718] INFO: Step: 883861/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:29:58,443] INFO: Step: 883862/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:30:12,191] INFO: Step: 883863/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:30:26,191] INFO: Step: 883864/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:30:39,999] INFO: Step: 883865/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:30:53,891] INFO: Step: 883866/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:31:07,607] INFO: Step: 883867/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:31:21,388] INFO: Step: 883868/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:31:35,205] INFO: Step: 883869/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:31:49,151] INFO: Step: 883870/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:32:02,797] INFO: Step: 883871/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:32:16,566] INFO: Step: 883872/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:32:30,498] INFO: Step: 883873/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:32:44,348] INFO: Step: 883874/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:32:58,182] INFO: Step: 883875/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:33:11,947] INFO: Step: 883876/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:33:25,781] INFO: Step: 883877/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:33:39,646] INFO: Step: 883878/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:33:53,426] INFO: Step: 883879/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:34:07,323] INFO: Step: 883880/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:34:21,147] INFO: Step: 883881/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:34:35,080] INFO: Step: 883882/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:34:48,883] INFO: Step: 883883/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:35:02,564] INFO: Step: 883884/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:35:16,388] INFO: Step: 883885/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:35:30,193] INFO: Step: 883886/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:35:44,086] INFO: Step: 883887/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:35:58,100] INFO: Step: 883888/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:36:11,938] INFO: Step: 883889/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:36:25,586] INFO: Step: 883890/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:36:39,436] INFO: Step: 883891/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:36:53,146] INFO: Step: 883892/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:37:06,847] INFO: Step: 883893/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:37:20,759] INFO: Step: 883894/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:37:34,541] INFO: Step: 883895/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:37:48,381] INFO: Step: 883896/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:38:02,246] INFO: Step: 883897/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:38:16,027] INFO: Step: 883898/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:38:29,851] INFO: Step: 883899/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:38:43,625] INFO: Step: 883900/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:38:57,370] INFO: Step: 883901/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:39:11,285] INFO: Step: 883902/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:39:25,044] INFO: Step: 883903/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:39:38,753] INFO: Step: 883904/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:39:52,553] INFO: Step: 883905/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:40:06,351] INFO: Step: 883906/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:40:20,241] INFO: Step: 883907/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:40:33,942] INFO: Step: 883908/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:40:47,766] INFO: Step: 883909/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:41:01,605] INFO: Step: 883910/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:41:15,430] INFO: Step: 883911/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:41:29,283] INFO: Step: 883912/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:41:42,973] INFO: Step: 883913/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:41:56,646] INFO: Step: 883914/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:42:10,532] INFO: Step: 883915/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:42:24,269] INFO: Step: 883916/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:42:37,923] INFO: Step: 883917/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:42:51,734] INFO: Step: 883918/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:43:05,474] INFO: Step: 883919/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:43:19,401] INFO: Step: 883920/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:43:33,282] INFO: Step: 883921/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:43:47,152] INFO: Step: 883922/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:44:00,861] INFO: Step: 883923/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:44:14,445] INFO: Step: 883924/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:44:28,464] INFO: Step: 883925/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:44:42,308] INFO: Step: 883926/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:44:56,155] INFO: Step: 883927/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:45:10,058] INFO: Step: 883928/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:45:24,040] INFO: Step: 883929/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:45:37,648] INFO: Step: 883930/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:45:51,491] INFO: Step: 883931/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:46:05,123] INFO: Step: 883932/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:46:18,763] INFO: Step: 883933/1000000 Loss: 0.068 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:46:32,387] INFO: Step: 883934/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:46:46,122] INFO: Step: 883935/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:47:00,006] INFO: Step: 883936/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:47:13,740] INFO: Step: 883937/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:47:27,718] INFO: Step: 883938/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:47:41,556] INFO: Step: 883939/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:47:55,295] INFO: Step: 883940/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:48:09,119] INFO: Step: 883941/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:48:22,853] INFO: Step: 883942/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:48:36,799] INFO: Step: 883943/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:48:50,549] INFO: Step: 883944/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:49:04,315] INFO: Step: 883945/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:49:18,020] INFO: Step: 883946/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:49:31,756] INFO: Step: 883947/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:49:45,567] INFO: Step: 883948/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:49:59,181] INFO: Step: 883949/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:50:13,057] INFO: Step: 883950/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:50:26,782] INFO: Step: 883951/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:50:40,676] INFO: Step: 883952/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:50:54,396] INFO: Step: 883953/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:51:08,217] INFO: Step: 883954/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:51:22,033] INFO: Step: 883955/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:51:35,903] INFO: Step: 883956/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:51:49,924] INFO: Step: 883957/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:52:03,821] INFO: Step: 883958/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:52:17,828] INFO: Step: 883959/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:52:31,715] INFO: Step: 883960/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:52:45,665] INFO: Step: 883961/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:52:59,610] INFO: Step: 883962/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:53:13,596] INFO: Step: 883963/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:53:27,404] INFO: Step: 883964/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:53:41,141] INFO: Step: 883965/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:53:55,144] INFO: Step: 883966/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:54:08,887] INFO: Step: 883967/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:54:22,667] INFO: Step: 883968/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:54:36,409] INFO: Step: 883969/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:54:50,195] INFO: Step: 883970/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:55:03,851] INFO: Step: 883971/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:55:17,671] INFO: Step: 883972/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:55:31,460] INFO: Step: 883973/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:55:45,269] INFO: Step: 883974/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:55:59,035] INFO: Step: 883975/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:56:12,959] INFO: Step: 883976/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:56:26,912] INFO: Step: 883977/1000000 Loss: 0.066 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:56:40,626] INFO: Step: 883978/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:56:54,348] INFO: Step: 883979/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:57:08,167] INFO: Step: 883980/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:57:21,927] INFO: Step: 883981/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:57:35,722] INFO: Step: 883982/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:57:49,417] INFO: Step: 883983/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:58:03,277] INFO: Step: 883984/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:58:16,931] INFO: Step: 883985/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:58:30,617] INFO: Step: 883986/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:58:44,381] INFO: Step: 883987/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:58:58,299] INFO: Step: 883988/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:59:12,115] INFO: Step: 883989/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:59:25,927] INFO: Step: 883990/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:59:39,683] INFO: Step: 883991/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 01:59:53,373] INFO: Step: 883992/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 02:00:07,097] INFO: Step: 883993/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 02:00:20,746] INFO: Step: 883994/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-09 02:00:34,632] INFO: Step: 883995/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 02:00:48,439] INFO: Step: 883996/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 02:01:02,377] INFO: Step: 883997/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 02:01:16,205] INFO: Step: 883998/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 02:01:30,032] INFO: Step: 883999/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:01:43,901] INFO: Step: 884000/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:01:43,901] INFO: Begin to Save model to workspace/i2v_motion/obj_train_motion/checkpoints/non_ema_00884000.pth +[2024-06-09 02:01:50,962] INFO: Save model to workspace/i2v_motion/obj_train_motion/checkpoints/non_ema_00884000.pth +[2024-06-09 02:02:03,107] INFO: Step: 884001/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:02:16,945] INFO: Step: 884002/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:02:30,864] INFO: Step: 884003/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:02:44,562] INFO: Step: 884004/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:02:58,337] INFO: Step: 884005/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:03:12,173] INFO: Step: 884006/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:03:26,052] INFO: Step: 884007/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:03:39,804] INFO: Step: 884008/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:03:53,554] INFO: Step: 884009/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:04:07,315] INFO: Step: 884010/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:04:21,005] INFO: Step: 884011/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:04:34,997] INFO: Step: 884012/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:04:48,803] INFO: Step: 884013/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:05:02,740] INFO: Step: 884014/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:05:16,322] INFO: Step: 884015/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:05:30,113] INFO: Step: 884016/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:05:43,913] INFO: Step: 884017/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:05:57,620] INFO: Step: 884018/1000000 Loss: 0.096 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:06:11,526] INFO: Step: 884019/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:06:25,239] INFO: Step: 884020/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:06:38,845] INFO: Step: 884021/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:06:52,518] INFO: Step: 884022/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:07:06,270] INFO: Step: 884023/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:07:20,112] INFO: Step: 884024/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:07:33,795] INFO: Step: 884025/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:07:47,575] INFO: Step: 884026/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:08:01,658] INFO: Step: 884027/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:08:15,526] INFO: Step: 884028/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:08:29,413] INFO: Step: 884029/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:08:43,249] INFO: Step: 884030/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:08:57,097] INFO: Step: 884031/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:09:11,121] INFO: Step: 884032/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:09:24,915] INFO: Step: 884033/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:09:38,686] INFO: Step: 884034/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:09:52,632] INFO: Step: 884035/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:10:06,427] INFO: Step: 884036/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:10:20,183] INFO: Step: 884037/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:10:34,168] INFO: Step: 884038/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:10:48,022] INFO: Step: 884039/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:11:01,935] INFO: Step: 884040/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:11:15,834] INFO: Step: 884041/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:11:29,725] INFO: Step: 884042/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:11:43,399] INFO: Step: 884043/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:11:57,220] INFO: Step: 884044/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:12:10,972] INFO: Step: 884045/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:12:24,901] INFO: Step: 884046/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:12:38,755] INFO: Step: 884047/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:12:52,552] INFO: Step: 884048/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:13:06,280] INFO: Step: 884049/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:13:20,025] INFO: Step: 884050/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:13:33,741] INFO: Step: 884051/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:13:47,386] INFO: Step: 884052/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:14:01,183] INFO: Step: 884053/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:14:14,981] INFO: Step: 884054/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:14:28,694] INFO: Step: 884055/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:14:42,484] INFO: Step: 884056/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:14:56,426] INFO: Step: 884057/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:15:10,161] INFO: Step: 884058/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:15:24,254] INFO: Step: 884059/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:15:38,027] INFO: Step: 884060/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:15:51,834] INFO: Step: 884061/1000000 Loss: 0.070 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:16:05,665] INFO: Step: 884062/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:16:19,469] INFO: Step: 884063/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:16:33,094] INFO: Step: 884064/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:16:46,882] INFO: Step: 884065/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:17:01,062] INFO: Step: 884066/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:17:14,800] INFO: Step: 884067/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:17:28,443] INFO: Step: 884068/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:17:42,361] INFO: Step: 884069/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:17:56,279] INFO: Step: 884070/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:18:10,179] INFO: Step: 884071/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:18:23,938] INFO: Step: 884072/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:18:37,840] INFO: Step: 884073/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:18:51,643] INFO: Step: 884074/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:19:05,648] INFO: Step: 884075/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:19:19,692] INFO: Step: 884076/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:19:33,346] INFO: Step: 884077/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:19:47,262] INFO: Step: 884078/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:20:00,957] INFO: Step: 884079/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:20:14,932] INFO: Step: 884080/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:20:29,012] INFO: Step: 884081/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:20:42,784] INFO: Step: 884082/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:20:56,707] INFO: Step: 884083/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:21:10,489] INFO: Step: 884084/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:21:24,311] INFO: Step: 884085/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:21:38,215] INFO: Step: 884086/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:21:52,068] INFO: Step: 884087/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:22:05,914] INFO: Step: 884088/1000000 Loss: 0.093 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:22:19,739] INFO: Step: 884089/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:22:33,649] INFO: Step: 884090/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:22:47,543] INFO: Step: 884091/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:23:01,625] INFO: Step: 884092/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:23:15,437] INFO: Step: 884093/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:23:29,238] INFO: Step: 884094/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:23:43,087] INFO: Step: 884095/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:23:57,158] INFO: Step: 884096/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:24:10,874] INFO: Step: 884097/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:24:24,885] INFO: Step: 884098/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:24:38,856] INFO: Step: 884099/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:24:52,643] INFO: Step: 884100/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:25:06,487] INFO: Step: 884101/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:25:20,533] INFO: Step: 884102/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:25:34,338] INFO: Step: 884103/1000000 Loss: 0.090 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:25:48,259] INFO: Step: 884104/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:26:02,061] INFO: Step: 884105/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:26:16,054] INFO: Step: 884106/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:26:29,958] INFO: Step: 884107/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:26:43,795] INFO: Step: 884108/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:26:57,650] INFO: Step: 884109/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:27:11,609] INFO: Step: 884110/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:27:25,388] INFO: Step: 884111/1000000 Loss: 0.090 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:27:39,276] INFO: Step: 884112/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:27:53,154] INFO: Step: 884113/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:28:06,900] INFO: Step: 884114/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:28:20,646] INFO: Step: 884115/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:28:34,452] INFO: Step: 884116/1000000 Loss: 0.091 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:28:48,246] INFO: Step: 884117/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:29:02,188] INFO: Step: 884118/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:29:15,978] INFO: Step: 884119/1000000 Loss: 0.093 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:29:29,832] INFO: Step: 884120/1000000 Loss: 0.091 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:29:43,472] INFO: Step: 884121/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:29:57,238] INFO: Step: 884122/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:30:10,987] INFO: Step: 884123/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:30:24,928] INFO: Step: 884124/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:30:38,600] INFO: Step: 884125/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:30:52,351] INFO: Step: 884126/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:31:06,322] INFO: Step: 884127/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:31:20,230] INFO: Step: 884128/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:31:34,077] INFO: Step: 884129/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:31:47,996] INFO: Step: 884130/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:32:01,862] INFO: Step: 884131/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:32:15,658] INFO: Step: 884132/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:32:29,661] INFO: Step: 884133/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:32:43,431] INFO: Step: 884134/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:32:57,304] INFO: Step: 884135/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:33:11,074] INFO: Step: 884136/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:33:24,870] INFO: Step: 884137/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:33:38,640] INFO: Step: 884138/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:33:52,509] INFO: Step: 884139/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:34:06,338] INFO: Step: 884140/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:34:20,110] INFO: Step: 884141/1000000 Loss: 0.091 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:34:33,914] INFO: Step: 884142/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:34:47,691] INFO: Step: 884143/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:35:01,771] INFO: Step: 884144/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:35:15,610] INFO: Step: 884145/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:35:29,502] INFO: Step: 884146/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:35:43,397] INFO: Step: 884147/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:35:57,138] INFO: Step: 884148/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:36:10,866] INFO: Step: 884149/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:36:24,558] INFO: Step: 884150/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:36:38,385] INFO: Step: 884151/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:36:52,200] INFO: Step: 884152/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:37:05,953] INFO: Step: 884153/1000000 Loss: 0.090 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:37:19,823] INFO: Step: 884154/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:37:33,671] INFO: Step: 884155/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:37:47,511] INFO: Step: 884156/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:38:01,079] INFO: Step: 884157/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:38:14,861] INFO: Step: 884158/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:38:28,704] INFO: Step: 884159/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:38:42,680] INFO: Step: 884160/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:38:56,452] INFO: Step: 884161/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:39:10,293] INFO: Step: 884162/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:39:24,053] INFO: Step: 884163/1000000 Loss: 0.090 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:39:37,679] INFO: Step: 884164/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:39:51,693] INFO: Step: 884165/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:40:05,492] INFO: Step: 884166/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:40:19,254] INFO: Step: 884167/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:40:33,122] INFO: Step: 884168/1000000 Loss: 0.097 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:40:46,951] INFO: Step: 884169/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:41:00,643] INFO: Step: 884170/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:41:14,433] INFO: Step: 884171/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:41:28,154] INFO: Step: 884172/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:41:42,074] INFO: Step: 884173/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:41:55,906] INFO: Step: 884174/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:42:09,687] INFO: Step: 884175/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:42:23,565] INFO: Step: 884176/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:42:37,269] INFO: Step: 884177/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:42:50,965] INFO: Step: 884178/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:43:04,894] INFO: Step: 884179/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:43:18,522] INFO: Step: 884180/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:43:32,271] INFO: Step: 884181/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:43:46,070] INFO: Step: 884182/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:43:59,876] INFO: Step: 884183/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:44:13,585] INFO: Step: 884184/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:44:27,512] INFO: Step: 884185/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:44:41,318] INFO: Step: 884186/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:44:55,091] INFO: Step: 884187/1000000 Loss: 0.070 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:45:08,898] INFO: Step: 884188/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:45:22,701] INFO: Step: 884189/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:45:36,530] INFO: Step: 884190/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:45:50,441] INFO: Step: 884191/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:46:04,276] INFO: Step: 884192/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:46:18,006] INFO: Step: 884193/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:46:31,982] INFO: Step: 884194/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:46:45,875] INFO: Step: 884195/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:46:59,816] INFO: Step: 884196/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:47:13,783] INFO: Step: 884197/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:47:27,718] INFO: Step: 884198/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:47:41,521] INFO: Step: 884199/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:47:55,465] INFO: Step: 884200/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:48:09,359] INFO: Step: 884201/1000000 Loss: 0.090 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:48:23,157] INFO: Step: 884202/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:48:37,131] INFO: Step: 884203/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:48:50,929] INFO: Step: 884204/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:49:04,652] INFO: Step: 884205/1000000 Loss: 0.093 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:49:18,424] INFO: Step: 884206/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:49:32,470] INFO: Step: 884207/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:49:46,244] INFO: Step: 884208/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:50:00,147] INFO: Step: 884209/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:50:14,009] INFO: Step: 884210/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:50:27,794] INFO: Step: 884211/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:50:41,713] INFO: Step: 884212/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:50:55,553] INFO: Step: 884213/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:51:09,528] INFO: Step: 884214/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:51:23,355] INFO: Step: 884215/1000000 Loss: 0.095 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:51:37,170] INFO: Step: 884216/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:51:51,100] INFO: Step: 884217/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:52:04,938] INFO: Step: 884218/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:52:18,741] INFO: Step: 884219/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:52:32,494] INFO: Step: 884220/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:52:46,243] INFO: Step: 884221/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:53:00,469] INFO: Step: 884222/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:53:14,304] INFO: Step: 884223/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:53:28,295] INFO: Step: 884224/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:53:42,292] INFO: Step: 884225/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:53:56,044] INFO: Step: 884226/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:54:09,894] INFO: Step: 884227/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:54:23,850] INFO: Step: 884228/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:54:37,823] INFO: Step: 884229/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:54:51,806] INFO: Step: 884230/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:55:05,474] INFO: Step: 884231/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:55:19,429] INFO: Step: 884232/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:55:33,246] INFO: Step: 884233/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:55:46,921] INFO: Step: 884234/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:56:00,663] INFO: Step: 884235/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:56:14,419] INFO: Step: 884236/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:56:28,462] INFO: Step: 884237/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:56:42,317] INFO: Step: 884238/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:56:55,895] INFO: Step: 884239/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:57:09,736] INFO: Step: 884240/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:57:23,742] INFO: Step: 884241/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:57:37,637] INFO: Step: 884242/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:57:51,572] INFO: Step: 884243/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:58:05,343] INFO: Step: 884244/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:58:19,114] INFO: Step: 884245/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:58:32,885] INFO: Step: 884246/1000000 Loss: 0.070 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:58:46,642] INFO: Step: 884247/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:59:00,467] INFO: Step: 884248/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:59:14,208] INFO: Step: 884249/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:59:28,052] INFO: Step: 884250/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:59:41,840] INFO: Step: 884251/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 02:59:55,468] INFO: Step: 884252/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:00:09,345] INFO: Step: 884253/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:00:23,178] INFO: Step: 884254/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:00:37,141] INFO: Step: 884255/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:00:50,882] INFO: Step: 884256/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:01:04,846] INFO: Step: 884257/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:01:18,613] INFO: Step: 884258/1000000 Loss: 0.092 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:01:32,297] INFO: Step: 884259/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:01:46,096] INFO: Step: 884260/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:01:59,796] INFO: Step: 884261/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:02:13,765] INFO: Step: 884262/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:02:27,478] INFO: Step: 884263/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:02:41,307] INFO: Step: 884264/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:02:54,954] INFO: Step: 884265/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:03:08,492] INFO: Step: 884266/1000000 Loss: 0.090 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:03:22,258] INFO: Step: 884267/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:03:35,993] INFO: Step: 884268/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:03:49,750] INFO: Step: 884269/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:04:03,588] INFO: Step: 884270/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:04:17,387] INFO: Step: 884271/1000000 Loss: 0.098 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:04:31,088] INFO: Step: 884272/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:04:44,900] INFO: Step: 884273/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:04:58,842] INFO: Step: 884274/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:05:12,525] INFO: Step: 884275/1000000 Loss: 0.094 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:05:26,477] INFO: Step: 884276/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:05:40,287] INFO: Step: 884277/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:05:53,996] INFO: Step: 884278/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:06:07,851] INFO: Step: 884279/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:06:21,706] INFO: Step: 884280/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:06:35,478] INFO: Step: 884281/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:06:49,423] INFO: Step: 884282/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:07:03,182] INFO: Step: 884283/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:07:16,978] INFO: Step: 884284/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:07:30,848] INFO: Step: 884285/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:07:44,591] INFO: Step: 884286/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:07:58,361] INFO: Step: 884287/1000000 Loss: 0.070 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:08:12,011] INFO: Step: 884288/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:08:25,943] INFO: Step: 884289/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:08:39,825] INFO: Step: 884290/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:08:53,671] INFO: Step: 884291/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:09:07,567] INFO: Step: 884292/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:09:21,479] INFO: Step: 884293/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:09:35,155] INFO: Step: 884294/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:09:48,806] INFO: Step: 884295/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:10:02,527] INFO: Step: 884296/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:10:16,476] INFO: Step: 884297/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:10:30,003] INFO: Step: 884298/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:10:43,912] INFO: Step: 884299/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:10:57,645] INFO: Step: 884300/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:11:11,313] INFO: Step: 884301/1000000 Loss: 0.099 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:11:25,130] INFO: Step: 884302/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:11:39,011] INFO: Step: 884303/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:11:52,849] INFO: Step: 884304/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:12:06,544] INFO: Step: 884305/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:12:20,290] INFO: Step: 884306/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:12:34,206] INFO: Step: 884307/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:12:48,033] INFO: Step: 884308/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:13:01,893] INFO: Step: 884309/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:13:15,598] INFO: Step: 884310/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:13:29,405] INFO: Step: 884311/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:13:43,288] INFO: Step: 884312/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:13:57,182] INFO: Step: 884313/1000000 Loss: 0.098 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:14:10,945] INFO: Step: 884314/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:14:24,698] INFO: Step: 884315/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:14:38,650] INFO: Step: 884316/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:14:52,578] INFO: Step: 884317/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:15:06,345] INFO: Step: 884318/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:15:20,303] INFO: Step: 884319/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:15:34,099] INFO: Step: 884320/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:15:48,137] INFO: Step: 884321/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:16:02,048] INFO: Step: 884322/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:16:15,960] INFO: Step: 884323/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:16:29,779] INFO: Step: 884324/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:16:43,808] INFO: Step: 884325/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:16:57,717] INFO: Step: 884326/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:17:11,540] INFO: Step: 884327/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:17:25,218] INFO: Step: 884328/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:17:38,885] INFO: Step: 884329/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:17:52,500] INFO: Step: 884330/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:18:06,390] INFO: Step: 884331/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:18:20,309] INFO: Step: 884332/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:18:34,088] INFO: Step: 884333/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:18:47,829] INFO: Step: 884334/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:19:01,760] INFO: Step: 884335/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:19:15,739] INFO: Step: 884336/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:19:29,436] INFO: Step: 884337/1000000 Loss: 0.091 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:19:43,154] INFO: Step: 884338/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:19:57,112] INFO: Step: 884339/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:20:10,886] INFO: Step: 884340/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:20:24,712] INFO: Step: 884341/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:20:38,544] INFO: Step: 884342/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:20:52,370] INFO: Step: 884343/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:21:06,299] INFO: Step: 884344/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:21:20,008] INFO: Step: 884345/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:21:33,787] INFO: Step: 884346/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:21:47,605] INFO: Step: 884347/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:22:01,386] INFO: Step: 884348/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:22:15,116] INFO: Step: 884349/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:22:28,912] INFO: Step: 884350/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:22:42,591] INFO: Step: 884351/1000000 Loss: 0.069 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:22:56,269] INFO: Step: 884352/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:23:09,955] INFO: Step: 884353/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:23:23,762] INFO: Step: 884354/1000000 Loss: 0.091 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:23:37,503] INFO: Step: 884355/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:23:51,271] INFO: Step: 884356/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:24:05,228] INFO: Step: 884357/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:24:19,117] INFO: Step: 884358/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:24:33,058] INFO: Step: 884359/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:24:46,830] INFO: Step: 884360/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:25:00,649] INFO: Step: 884361/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:25:14,282] INFO: Step: 884362/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:25:28,163] INFO: Step: 884363/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:25:42,067] INFO: Step: 884364/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:25:56,016] INFO: Step: 884365/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:26:09,653] INFO: Step: 884366/1000000 Loss: 0.093 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:26:23,562] INFO: Step: 884367/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:26:37,419] INFO: Step: 884368/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:26:51,184] INFO: Step: 884369/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:27:04,938] INFO: Step: 884370/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:27:18,663] INFO: Step: 884371/1000000 Loss: 0.091 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:27:32,496] INFO: Step: 884372/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:27:46,368] INFO: Step: 884373/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:28:00,115] INFO: Step: 884374/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:28:14,122] INFO: Step: 884375/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:28:27,896] INFO: Step: 884376/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:28:41,674] INFO: Step: 884377/1000000 Loss: 0.093 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:28:55,710] INFO: Step: 884378/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:29:09,572] INFO: Step: 884379/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:29:23,328] INFO: Step: 884380/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:29:37,155] INFO: Step: 884381/1000000 Loss: 0.092 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:29:50,924] INFO: Step: 884382/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:30:04,663] INFO: Step: 884383/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:30:18,403] INFO: Step: 884384/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:30:32,047] INFO: Step: 884385/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:30:45,810] INFO: Step: 884386/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:30:59,481] INFO: Step: 884387/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:31:13,201] INFO: Step: 884388/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:31:26,987] INFO: Step: 884389/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:31:40,913] INFO: Step: 884390/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:31:54,647] INFO: Step: 884391/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:32:08,348] INFO: Step: 884392/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:32:22,280] INFO: Step: 884393/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:32:36,172] INFO: Step: 884394/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:32:49,921] INFO: Step: 884395/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:33:03,612] INFO: Step: 884396/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:33:17,470] INFO: Step: 884397/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:33:31,311] INFO: Step: 884398/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:33:45,096] INFO: Step: 884399/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:33:58,822] INFO: Step: 884400/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:34:12,458] INFO: Step: 884401/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:34:26,175] INFO: Step: 884402/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:34:40,173] INFO: Step: 884403/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:34:53,996] INFO: Step: 884404/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:35:07,737] INFO: Step: 884405/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:35:21,501] INFO: Step: 884406/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:35:35,377] INFO: Step: 884407/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:35:49,196] INFO: Step: 884408/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:36:02,962] INFO: Step: 884409/1000000 Loss: 0.093 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:36:16,715] INFO: Step: 884410/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:36:30,596] INFO: Step: 884411/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:36:44,199] INFO: Step: 884412/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:36:58,010] INFO: Step: 884413/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:37:11,969] INFO: Step: 884414/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:37:25,908] INFO: Step: 884415/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:37:39,695] INFO: Step: 884416/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:37:53,381] INFO: Step: 884417/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:38:07,074] INFO: Step: 884418/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:38:21,037] INFO: Step: 884419/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:38:34,967] INFO: Step: 884420/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:38:48,710] INFO: Step: 884421/1000000 Loss: 0.096 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:39:02,353] INFO: Step: 884422/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:39:16,072] INFO: Step: 884423/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:39:29,882] INFO: Step: 884424/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:39:43,737] INFO: Step: 884425/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:39:57,612] INFO: Step: 884426/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:40:11,496] INFO: Step: 884427/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:40:25,256] INFO: Step: 884428/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:40:39,065] INFO: Step: 884429/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:40:52,870] INFO: Step: 884430/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:41:06,822] INFO: Step: 884431/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:41:20,489] INFO: Step: 884432/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:41:34,329] INFO: Step: 884433/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:41:48,091] INFO: Step: 884434/1000000 Loss: 0.091 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:42:01,903] INFO: Step: 884435/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:42:15,623] INFO: Step: 884436/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:42:29,544] INFO: Step: 884437/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:42:43,265] INFO: Step: 884438/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:42:56,952] INFO: Step: 884439/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:43:10,725] INFO: Step: 884440/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:43:24,454] INFO: Step: 884441/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:43:38,182] INFO: Step: 884442/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:43:52,096] INFO: Step: 884443/1000000 Loss: 0.069 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:44:06,092] INFO: Step: 884444/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:44:19,801] INFO: Step: 884445/1000000 Loss: 0.091 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:44:33,500] INFO: Step: 884446/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:44:47,338] INFO: Step: 884447/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:45:01,183] INFO: Step: 884448/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:45:15,205] INFO: Step: 884449/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:45:28,996] INFO: Step: 884450/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:45:43,003] INFO: Step: 884451/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:45:56,759] INFO: Step: 884452/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:46:10,597] INFO: Step: 884453/1000000 Loss: 0.092 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:46:24,298] INFO: Step: 884454/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:46:38,244] INFO: Step: 884455/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:46:52,089] INFO: Step: 884456/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:47:05,872] INFO: Step: 884457/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:47:19,607] INFO: Step: 884458/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:47:33,377] INFO: Step: 884459/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:47:47,089] INFO: Step: 884460/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:48:01,003] INFO: Step: 884461/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:48:14,685] INFO: Step: 884462/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:48:28,554] INFO: Step: 884463/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:48:42,338] INFO: Step: 884464/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:48:56,182] INFO: Step: 884465/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:49:09,964] INFO: Step: 884466/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:49:23,772] INFO: Step: 884467/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:49:37,481] INFO: Step: 884468/1000000 Loss: 0.091 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:49:51,360] INFO: Step: 884469/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:50:05,106] INFO: Step: 884470/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:50:18,938] INFO: Step: 884471/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:50:32,669] INFO: Step: 884472/1000000 Loss: 0.090 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:50:46,584] INFO: Step: 884473/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:51:00,449] INFO: Step: 884474/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:51:14,298] INFO: Step: 884475/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:51:28,116] INFO: Step: 884476/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:51:41,955] INFO: Step: 884477/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:51:55,557] INFO: Step: 884478/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:52:09,523] INFO: Step: 884479/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:52:23,345] INFO: Step: 884480/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:52:37,057] INFO: Step: 884481/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:52:50,908] INFO: Step: 884482/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:53:04,692] INFO: Step: 884483/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:53:18,454] INFO: Step: 884484/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:53:32,221] INFO: Step: 884485/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:53:45,952] INFO: Step: 884486/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:53:59,720] INFO: Step: 884487/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:54:13,534] INFO: Step: 884488/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:54:27,284] INFO: Step: 884489/1000000 Loss: 0.093 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:54:41,036] INFO: Step: 884490/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:54:54,912] INFO: Step: 884491/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:55:08,680] INFO: Step: 884492/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:55:22,473] INFO: Step: 884493/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:55:36,265] INFO: Step: 884494/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:55:50,051] INFO: Step: 884495/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:56:03,876] INFO: Step: 884496/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:56:17,739] INFO: Step: 884497/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:56:31,607] INFO: Step: 884498/1000000 Loss: 0.092 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:56:45,227] INFO: Step: 884499/1000000 Loss: 0.064 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:56:58,989] INFO: Step: 884500/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:56:58,990] INFO: Begin to Save model to workspace/i2v_motion/obj_train_motion/checkpoints/non_ema_00884500.pth +[2024-06-09 03:57:06,140] INFO: Save model to workspace/i2v_motion/obj_train_motion/checkpoints/non_ema_00884500.pth +[2024-06-09 03:57:18,296] INFO: Step: 884501/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:57:32,020] INFO: Step: 884502/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:57:45,703] INFO: Step: 884503/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:57:59,398] INFO: Step: 884504/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:58:13,215] INFO: Step: 884505/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:58:26,830] INFO: Step: 884506/1000000 Loss: 0.092 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:58:40,482] INFO: Step: 884507/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:58:54,216] INFO: Step: 884508/1000000 Loss: 0.094 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:59:08,015] INFO: Step: 884509/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:59:21,733] INFO: Step: 884510/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:59:35,578] INFO: Step: 884511/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 03:59:49,356] INFO: Step: 884512/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:00:03,011] INFO: Step: 884513/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:00:16,916] INFO: Step: 884514/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:00:30,953] INFO: Step: 884515/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:00:44,633] INFO: Step: 884516/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:00:58,468] INFO: Step: 884517/1000000 Loss: 0.090 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:01:12,315] INFO: Step: 884518/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:01:26,347] INFO: Step: 884519/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:01:40,249] INFO: Step: 884520/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:01:53,949] INFO: Step: 884521/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:02:07,640] INFO: Step: 884522/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:02:21,342] INFO: Step: 884523/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:02:35,053] INFO: Step: 884524/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:02:48,775] INFO: Step: 884525/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:03:02,619] INFO: Step: 884526/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:03:16,454] INFO: Step: 884527/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:03:30,028] INFO: Step: 884528/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:03:43,865] INFO: Step: 884529/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:03:57,726] INFO: Step: 884530/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:04:11,766] INFO: Step: 884531/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:04:25,513] INFO: Step: 884532/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:04:39,424] INFO: Step: 884533/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:04:53,188] INFO: Step: 884534/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:05:06,903] INFO: Step: 884535/1000000 Loss: 0.096 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:05:20,596] INFO: Step: 884536/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:05:34,440] INFO: Step: 884537/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:05:48,211] INFO: Step: 884538/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:06:01,929] INFO: Step: 884539/1000000 Loss: 0.090 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:06:15,773] INFO: Step: 884540/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:06:29,519] INFO: Step: 884541/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:06:43,272] INFO: Step: 884542/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:06:57,155] INFO: Step: 884543/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:07:10,840] INFO: Step: 884544/1000000 Loss: 0.070 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:07:24,713] INFO: Step: 884545/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:07:38,497] INFO: Step: 884546/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:07:52,199] INFO: Step: 884547/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:08:05,980] INFO: Step: 884548/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:08:19,999] INFO: Step: 884549/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:08:33,827] INFO: Step: 884550/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:08:47,585] INFO: Step: 884551/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:09:01,544] INFO: Step: 884552/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:09:15,287] INFO: Step: 884553/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:09:28,928] INFO: Step: 884554/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:09:42,584] INFO: Step: 884555/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:09:56,209] INFO: Step: 884556/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:10:10,069] INFO: Step: 884557/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:10:23,721] INFO: Step: 884558/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:10:37,534] INFO: Step: 884559/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:10:51,266] INFO: Step: 884560/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:11:05,007] INFO: Step: 884561/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:11:18,860] INFO: Step: 884562/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:11:32,493] INFO: Step: 884563/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:11:46,112] INFO: Step: 884564/1000000 Loss: 0.069 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:11:59,921] INFO: Step: 884565/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:12:13,603] INFO: Step: 884566/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:12:27,587] INFO: Step: 884567/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:12:41,371] INFO: Step: 884568/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:12:55,209] INFO: Step: 884569/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:13:08,995] INFO: Step: 884570/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:13:22,726] INFO: Step: 884571/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:13:36,444] INFO: Step: 884572/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:13:50,301] INFO: Step: 884573/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:14:04,115] INFO: Step: 884574/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:14:17,797] INFO: Step: 884575/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:14:31,823] INFO: Step: 884576/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:14:45,697] INFO: Step: 884577/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:14:59,433] INFO: Step: 884578/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:15:13,418] INFO: Step: 884579/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:15:27,252] INFO: Step: 884580/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:15:41,092] INFO: Step: 884581/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:15:54,858] INFO: Step: 884582/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:16:08,836] INFO: Step: 884583/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:16:22,665] INFO: Step: 884584/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:16:36,330] INFO: Step: 884585/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:16:50,204] INFO: Step: 884586/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:17:03,955] INFO: Step: 884587/1000000 Loss: 0.092 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:17:17,675] INFO: Step: 884588/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:17:31,651] INFO: Step: 884589/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:17:45,532] INFO: Step: 884590/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:17:59,363] INFO: Step: 884591/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:18:13,659] INFO: Step: 884592/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:18:27,404] INFO: Step: 884593/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:18:41,177] INFO: Step: 884594/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:18:55,012] INFO: Step: 884595/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:19:08,800] INFO: Step: 884596/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:19:22,728] INFO: Step: 884597/1000000 Loss: 0.070 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:19:36,478] INFO: Step: 884598/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:19:50,368] INFO: Step: 884599/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:20:04,230] INFO: Step: 884600/1000000 Loss: 0.095 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:20:18,141] INFO: Step: 884601/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:20:31,955] INFO: Step: 884602/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:20:45,917] INFO: Step: 884603/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:20:59,823] INFO: Step: 884604/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:21:13,542] INFO: Step: 884605/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:21:27,369] INFO: Step: 884606/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:21:41,229] INFO: Step: 884607/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:21:55,174] INFO: Step: 884608/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:22:08,948] INFO: Step: 884609/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:22:22,823] INFO: Step: 884610/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:22:36,666] INFO: Step: 884611/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:22:50,487] INFO: Step: 884612/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:23:04,394] INFO: Step: 884613/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:23:18,256] INFO: Step: 884614/1000000 Loss: 0.094 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:23:32,109] INFO: Step: 884615/1000000 Loss: 0.096 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:23:45,870] INFO: Step: 884616/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:23:59,649] INFO: Step: 884617/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:24:13,413] INFO: Step: 884618/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:24:27,437] INFO: Step: 884619/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:24:41,328] INFO: Step: 884620/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:24:55,227] INFO: Step: 884621/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:25:08,899] INFO: Step: 884622/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:25:22,724] INFO: Step: 884623/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:25:36,678] INFO: Step: 884624/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:25:50,343] INFO: Step: 884625/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:26:03,933] INFO: Step: 884626/1000000 Loss: 0.091 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:26:17,459] INFO: Step: 884627/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:26:31,008] INFO: Step: 884628/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:26:44,756] INFO: Step: 884629/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:26:58,598] INFO: Step: 884630/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:27:12,509] INFO: Step: 884631/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:27:26,121] INFO: Step: 884632/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:27:39,853] INFO: Step: 884633/1000000 Loss: 0.096 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:27:53,464] INFO: Step: 884634/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:28:07,279] INFO: Step: 884635/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:28:20,984] INFO: Step: 884636/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:28:34,560] INFO: Step: 884637/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:28:48,230] INFO: Step: 884638/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:29:01,983] INFO: Step: 884639/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:29:15,292] INFO: Step: 884640/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:29:29,011] INFO: Step: 884641/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:29:42,605] INFO: Step: 884642/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:29:56,138] INFO: Step: 884643/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:30:09,810] INFO: Step: 884644/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:30:23,525] INFO: Step: 884645/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:30:37,189] INFO: Step: 884646/1000000 Loss: 0.069 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:30:51,014] INFO: Step: 884647/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:31:04,938] INFO: Step: 884648/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:31:18,583] INFO: Step: 884649/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:31:32,259] INFO: Step: 884650/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:31:45,884] INFO: Step: 884651/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:31:59,443] INFO: Step: 884652/1000000 Loss: 0.096 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:32:13,275] INFO: Step: 884653/1000000 Loss: 0.068 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:32:26,871] INFO: Step: 884654/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:32:40,436] INFO: Step: 884655/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:32:53,972] INFO: Step: 884656/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 04:33:35,685] INFO: Step: 884657/1000000 Loss: 0.070 scale: 131072.0 LR: 0.0000300 +[2024-06-09 08:37:03,310] INFO: {'__name__': 'Config: VideoLDM Decoder', 'mean': [0.5, 0.5, 0.5], 'std': [0.5, 0.5, 0.5], 'max_words': 1000, 'num_workers': 16, 'prefetch_factor': 2, 'resolution': [256, 256], 'vit_out_dim': 1024, 'vit_resolution': [224, 224], 'depth_clamp': 10.0, 'misc_size': 384, 'depth_std': 20.0, 'frame_lens': [24], 'sample_fps': [8], 'vid_dataset': {'type': 'ObjDataset', 'data_list': './Obj/data/6.8_with_motion/train.txt', 'max_words': 1000, 'resolution': [256, 256], 'vit_resolution': [224, 224], 'get_first_frame': True}, 'img_dataset': {'type': 'ImageBaseDataset', 'data_list': ['laion_400m'], 'max_words': 1000, 'resolution': [448, 256]}, 'batch_sizes': {'1': 256, '4': 4, '8': 4, '16': 4, '24': 20}, 'Diffusion': {'type': 'DiffusionDDIM', 'schedule': 'cosine', 'schedule_param': {'num_timesteps': 1000, 'cosine_s': 0.008, 'zero_terminal_snr': True}, 'mean_type': 'v', 'loss_type': 'mse', 'var_type': 'fixed_small', 'rescale_timesteps': False, 'noise_strength': 0.1, 'ddim_timesteps': 50, 'with_latent_motion_loss': False}, 'ddim_timesteps': 50, 'use_div_loss': False, 'p_zero': 0.0, 'guide_scale': 3.0, 'vit_mean': [0.48145466, 0.4578275, 0.40821073], 'vit_std': [0.26862954, 0.26130258, 0.27577711], 'scale_factor': 0.18215, 'use_checkpoint': True, 'use_sharded_ddp': False, 'use_fsdp': False, 'use_fp16': True, 'temporal_attention': True, 'UNet': {'type': 'UNetSD_I2VGen', 'in_dim': 4, 'dim': 320, 'y_dim': 1024, 'context_dim': 1024, 'out_dim': 4, 'dim_mult': [1, 2, 4, 4], 'num_heads': 8, 'head_dim': 64, 'num_res_blocks': 2, 'attn_scales': [1.0, 0.5, 0.25], 'dropout': 0.1, 'temporal_attention': True, 'temporal_attn_times': 1, 'use_checkpoint': True, 'use_fps_condition': False, 'use_sim_mask': False, 'upper_len': 128, 'concat_dim': 4, 'default_fps': 8, 'use_camera_condition': False, 'use_lgm_refine': False, 'with_motion': True, 'with_latent_motion': False}, 'guidances': [], 'auto_encoder': {'type': 'AutoencoderKL', 'ddconfig': {'double_z': True, 'z_channels': 4, 'resolution': 256, 'in_channels': 3, 'out_ch': 3, 'ch': 128, 'ch_mult': [1, 2, 4, 4], 'num_res_blocks': 2, 'attn_resolutions': [], 'dropout': 0.0, 'video_kernel_size': [3, 1, 1]}, 'embed_dim': 4, 'pretrained': './pretrained_models/modelscope_t2v/VQGAN_autoencoder.pth'}, 'embedder': {'type': 'FrozenOpenCLIPTtxtVisualEmbedder', 'layer': 'penultimate', 'pretrained': 'pretrained_models/modelscope_i2v/I2VGen-XL/open_clip_pytorch_model.bin', 'vit_resolution': [224, 224]}, 'ema_decay': 0.9999, 'num_steps': 1000000, 'lr': 3e-05, 'weight_decay': 0.0, 'betas': (0.9, 0.999), 'eps': 1e-08, 'chunk_size': 4, 'decoder_bs': 4, 'alpha': 0.7, 'save_ckp_interval': 500, 'warmup_steps': 10, 'decay_mode': 'cosine', 'use_ema': True, 'load_from': None, 'Pretrain': {'type': 'pretrain_specific_strategies', 'fix_weight': False, 'grad_scale': 0.5, 'resume_checkpoint': './workspace/i2v_motion/obj_train_motion/checkpoints/non_ema_00884500.pth', 'sd_keys_path': 'pretrained_models/modelscope_i2v/I2VGen-XL/stable_diffusion_image_key_temporal_attention_x1.json'}, 'viz_interval': 500, 'visual_train': {'type': 'VisualTrainTextImageToVideo', 'partial_keys': [['y', 'image', 'local_image', 'fps', 'camera_data', 'gs_data']], 'use_offset_noise': True, 'guide_scale': 6.0}, 'visual_inference': {'type': 'VisualGeneratedVideos'}, 'inference_list_path': '', 'log_interval': 1, 'log_dir': 'workspace/i2v_motion/obj_train_motion', 'seed': 6666, 'negative_prompt': 'static, 3d asset, Distorted, discontinuous, Ugly, blurry, low resolution, disfigured, disconnected limbs, Ugly faces, incomplete arms', 'with_motion': True, 'motion_magnitude': None, 'with_latent_motion': False, 'latent_motion_magnitude': None, 'ENABLE': True, 'DATASET': 'webvid10m', 'TASK_TYPE': 'train_i2v_entrance', 'noise_strength': 0.1, 'use_zero_infer': True, 'cfg_file': './configs/obj_train_motion.yaml', 'init_method': 'tcp://localhost:9999', 'debug': False, 'opts': [], 'pmi_rank': 0, 'pmi_world_size': 1, 'gpus_per_machine': 3, 'world_size': 3, 'gpu': 0, 'rank': 0, 'log_file': 'workspace/i2v_motion/obj_train_motion/log.txt'} +[2024-06-09 08:37:03,310] INFO: Save all the file in to dir workspace/i2v_motion/obj_train_motion +[2024-06-09 08:37:03,311] INFO: Going into i2v_img_fullid_vidcom function on 0 gpu +[2024-06-09 08:37:03,334] INFO: Current worker with max_frames=24, batch_size=20, sample_fps=8 +[2024-06-09 08:38:10,929] INFO: {'__name__': 'Config: VideoLDM Decoder', 'mean': [0.5, 0.5, 0.5], 'std': [0.5, 0.5, 0.5], 'max_words': 1000, 'num_workers': 16, 'prefetch_factor': 2, 'resolution': [256, 256], 'vit_out_dim': 1024, 'vit_resolution': [224, 224], 'depth_clamp': 10.0, 'misc_size': 384, 'depth_std': 20.0, 'frame_lens': [24], 'sample_fps': [8], 'vid_dataset': {'type': 'ObjDataset', 'data_list': './Obj/data/6.8_with_motion/train.txt', 'max_words': 1000, 'resolution': [256, 256], 'vit_resolution': [224, 224], 'get_first_frame': True}, 'img_dataset': {'type': 'ImageBaseDataset', 'data_list': ['laion_400m'], 'max_words': 1000, 'resolution': [448, 256]}, 'batch_sizes': {'1': 256, '4': 4, '8': 4, '16': 4, '24': 20}, 'Diffusion': {'type': 'DiffusionDDIM', 'schedule': 'cosine', 'schedule_param': {'num_timesteps': 1000, 'cosine_s': 0.008, 'zero_terminal_snr': True}, 'mean_type': 'v', 'loss_type': 'mse', 'var_type': 'fixed_small', 'rescale_timesteps': False, 'noise_strength': 0.1, 'ddim_timesteps': 50, 'with_latent_motion_loss': False}, 'ddim_timesteps': 50, 'use_div_loss': False, 'p_zero': 0.0, 'guide_scale': 3.0, 'vit_mean': [0.48145466, 0.4578275, 0.40821073], 'vit_std': [0.26862954, 0.26130258, 0.27577711], 'scale_factor': 0.18215, 'use_checkpoint': True, 'use_sharded_ddp': False, 'use_fsdp': False, 'use_fp16': True, 'temporal_attention': True, 'UNet': {'type': 'UNetSD_I2VGen', 'in_dim': 4, 'dim': 320, 'y_dim': 1024, 'context_dim': 1024, 'out_dim': 4, 'dim_mult': [1, 2, 4, 4], 'num_heads': 8, 'head_dim': 64, 'num_res_blocks': 2, 'attn_scales': [1.0, 0.5, 0.25], 'dropout': 0.1, 'temporal_attention': True, 'temporal_attn_times': 1, 'use_checkpoint': True, 'use_fps_condition': False, 'use_sim_mask': False, 'upper_len': 128, 'concat_dim': 4, 'default_fps': 8, 'use_camera_condition': False, 'use_lgm_refine': False, 'with_motion': True, 'with_latent_motion': False}, 'guidances': [], 'auto_encoder': {'type': 'AutoencoderKL', 'ddconfig': {'double_z': True, 'z_channels': 4, 'resolution': 256, 'in_channels': 3, 'out_ch': 3, 'ch': 128, 'ch_mult': [1, 2, 4, 4], 'num_res_blocks': 2, 'attn_resolutions': [], 'dropout': 0.0, 'video_kernel_size': [3, 1, 1]}, 'embed_dim': 4, 'pretrained': './pretrained_models/modelscope_t2v/VQGAN_autoencoder.pth'}, 'embedder': {'type': 'FrozenOpenCLIPTtxtVisualEmbedder', 'layer': 'penultimate', 'pretrained': 'pretrained_models/modelscope_i2v/I2VGen-XL/open_clip_pytorch_model.bin', 'vit_resolution': [224, 224]}, 'ema_decay': 0.9999, 'num_steps': 1000000, 'lr': 3e-05, 'weight_decay': 0.0, 'betas': (0.9, 0.999), 'eps': 1e-08, 'chunk_size': 4, 'decoder_bs': 4, 'alpha': 0.7, 'save_ckp_interval': 500, 'warmup_steps': 10, 'decay_mode': 'cosine', 'use_ema': True, 'load_from': None, 'Pretrain': {'type': 'pretrain_specific_strategies', 'fix_weight': False, 'grad_scale': 0.5, 'resume_checkpoint': './workspace/i2v_motion/obj_train_motion/checkpoints/non_ema_00884500.pth', 'sd_keys_path': 'pretrained_models/modelscope_i2v/I2VGen-XL/stable_diffusion_image_key_temporal_attention_x1.json'}, 'viz_interval': 500, 'visual_train': {'type': 'VisualTrainTextImageToVideo', 'partial_keys': [['y', 'image', 'local_image', 'fps', 'camera_data', 'gs_data']], 'use_offset_noise': True, 'guide_scale': 6.0}, 'visual_inference': {'type': 'VisualGeneratedVideos'}, 'inference_list_path': '', 'log_interval': 1, 'log_dir': 'workspace/i2v_motion/obj_train_motion', 'seed': 6666, 'negative_prompt': 'static, 3d asset, Distorted, discontinuous, Ugly, blurry, low resolution, disfigured, disconnected limbs, Ugly faces, incomplete arms', 'with_motion': True, 'motion_magnitude': None, 'with_latent_motion': False, 'latent_motion_magnitude': None, 'ENABLE': True, 'DATASET': 'webvid10m', 'TASK_TYPE': 'train_i2v_entrance', 'noise_strength': 0.1, 'use_zero_infer': True, 'cfg_file': './configs/obj_train_motion.yaml', 'init_method': 'tcp://localhost:9999', 'debug': False, 'opts': [], 'pmi_rank': 0, 'pmi_world_size': 1, 'gpus_per_machine': 3, 'world_size': 3, 'gpu': 0, 'rank': 0, 'log_file': 'workspace/i2v_motion/obj_train_motion/log.txt'} +[2024-06-09 08:38:10,929] INFO: Save all the file in to dir workspace/i2v_motion/obj_train_motion +[2024-06-09 08:38:10,929] INFO: Going into i2v_img_fullid_vidcom function on 0 gpu +[2024-06-09 08:38:10,940] INFO: Current worker with max_frames=24, batch_size=20, sample_fps=8 +[2024-06-09 08:39:15,378] INFO: Loaded ViT-H-14 model config. +[2024-06-09 08:39:38,483] INFO: Loading pretrained ViT-H-14 weights (pretrained_models/modelscope_i2v/I2VGen-XL/open_clip_pytorch_model.bin). +[2024-06-09 08:40:13,364] INFO: Restored from ./pretrained_models/modelscope_t2v/VQGAN_autoencoder.pth +[2024-06-09 08:40:27,019] INFO: load a fixed model with +[2024-06-09 08:40:27,128] INFO: Successfully load step 884500 model from ./workspace/i2v_motion/obj_train_motion/checkpoints/non_ema_00884500.pth +[2024-06-09 08:40:27,128] INFO: load a fixed model with 823M parameters +[2024-06-09 08:41:42,211] INFO: Step: 884500/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000030 +[2024-06-09 08:41:42,211] INFO: Begin to Save model to workspace/i2v_motion/obj_train_motion/checkpoints/non_ema_00884500.pth +[2024-06-09 08:41:50,132] INFO: Save model to workspace/i2v_motion/obj_train_motion/checkpoints/non_ema_00884500.pth +[2024-06-09 08:42:03,167] INFO: Step: 884501/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000060 +[2024-06-09 08:42:16,296] INFO: Step: 884502/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000090 +[2024-06-09 08:42:29,432] INFO: Step: 884503/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000120 +[2024-06-09 08:42:42,447] INFO: Step: 884504/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000150 +[2024-06-09 08:42:55,578] INFO: Step: 884505/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000180 +[2024-06-09 08:43:08,728] INFO: Step: 884506/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000210 +[2024-06-09 08:43:21,875] INFO: Step: 884507/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000240 +[2024-06-09 08:43:35,035] INFO: Step: 884508/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000270 +[2024-06-09 08:43:48,459] INFO: Step: 884509/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:44:01,763] INFO: Step: 884510/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:44:14,985] INFO: Step: 884511/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:44:28,327] INFO: Step: 884512/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:44:41,474] INFO: Step: 884513/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:44:54,874] INFO: Step: 884514/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:45:08,232] INFO: Step: 884515/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:45:21,355] INFO: Step: 884516/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:45:34,601] INFO: Step: 884517/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:45:47,896] INFO: Step: 884518/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:46:01,108] INFO: Step: 884519/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:46:14,291] INFO: Step: 884520/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:46:27,626] INFO: Step: 884521/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:46:41,018] INFO: Step: 884522/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:46:54,204] INFO: Step: 884523/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:47:07,382] INFO: Step: 884524/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:47:20,644] INFO: Step: 884525/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:47:33,634] INFO: Step: 884526/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:47:46,944] INFO: Step: 884527/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:48:00,270] INFO: Step: 884528/1000000 Loss: 0.068 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:48:13,455] INFO: Step: 884529/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:48:26,839] INFO: Step: 884530/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:48:40,124] INFO: Step: 884531/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:48:53,204] INFO: Step: 884532/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:49:06,487] INFO: Step: 884533/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:49:19,765] INFO: Step: 884534/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:49:33,000] INFO: Step: 884535/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:49:46,339] INFO: Step: 884536/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:49:59,383] INFO: Step: 884537/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:50:12,630] INFO: Step: 884538/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:50:25,695] INFO: Step: 884539/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:50:38,917] INFO: Step: 884540/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:50:52,196] INFO: Step: 884541/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:51:05,389] INFO: Step: 884542/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:51:18,760] INFO: Step: 884543/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:51:32,025] INFO: Step: 884544/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:51:45,260] INFO: Step: 884545/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:51:58,449] INFO: Step: 884546/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:52:11,705] INFO: Step: 884547/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:52:25,039] INFO: Step: 884548/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:52:38,115] INFO: Step: 884549/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:52:51,262] INFO: Step: 884550/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:53:04,501] INFO: Step: 884551/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:53:17,863] INFO: Step: 884552/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:53:31,157] INFO: Step: 884553/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:53:44,341] INFO: Step: 884554/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:53:57,482] INFO: Step: 884555/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:54:10,708] INFO: Step: 884556/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:54:23,937] INFO: Step: 884557/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:54:37,431] INFO: Step: 884558/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:54:50,605] INFO: Step: 884559/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:55:03,909] INFO: Step: 884560/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:55:17,216] INFO: Step: 884561/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:55:30,532] INFO: Step: 884562/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:55:43,948] INFO: Step: 884563/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:55:57,017] INFO: Step: 884564/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:56:10,142] INFO: Step: 884565/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:56:23,384] INFO: Step: 884566/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:56:36,603] INFO: Step: 884567/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:56:49,733] INFO: Step: 884568/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:57:02,970] INFO: Step: 884569/1000000 Loss: 0.068 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:57:16,215] INFO: Step: 884570/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:57:29,568] INFO: Step: 884571/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:57:42,688] INFO: Step: 884572/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:57:56,058] INFO: Step: 884573/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:58:09,097] INFO: Step: 884574/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:58:22,434] INFO: Step: 884575/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:58:35,631] INFO: Step: 884576/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:58:48,860] INFO: Step: 884577/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:59:02,124] INFO: Step: 884578/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:59:15,496] INFO: Step: 884579/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:59:28,566] INFO: Step: 884580/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:59:41,732] INFO: Step: 884581/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 08:59:54,914] INFO: Step: 884582/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:00:08,129] INFO: Step: 884583/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:00:21,403] INFO: Step: 884584/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:00:34,802] INFO: Step: 884585/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:00:47,966] INFO: Step: 884586/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:01:01,412] INFO: Step: 884587/1000000 Loss: 0.097 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:01:14,626] INFO: Step: 884588/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:01:27,868] INFO: Step: 884589/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:01:40,982] INFO: Step: 884590/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:01:54,157] INFO: Step: 884591/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:02:07,399] INFO: Step: 884592/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:02:20,580] INFO: Step: 884593/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:02:34,014] INFO: Step: 884594/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:02:47,232] INFO: Step: 884595/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:03:00,339] INFO: Step: 884596/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:03:13,593] INFO: Step: 884597/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:03:26,810] INFO: Step: 884598/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:03:40,041] INFO: Step: 884599/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:03:53,177] INFO: Step: 884600/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:04:06,365] INFO: Step: 884601/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:04:19,633] INFO: Step: 884602/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:04:32,848] INFO: Step: 884603/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:04:46,072] INFO: Step: 884604/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:04:59,245] INFO: Step: 884605/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:05:12,342] INFO: Step: 884606/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:05:25,531] INFO: Step: 884607/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:05:38,791] INFO: Step: 884608/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:05:51,979] INFO: Step: 884609/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:06:05,058] INFO: Step: 884610/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:06:18,236] INFO: Step: 884611/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:06:31,318] INFO: Step: 884612/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:06:44,765] INFO: Step: 884613/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:06:57,910] INFO: Step: 884614/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:07:11,144] INFO: Step: 884615/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:07:24,298] INFO: Step: 884616/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:07:37,571] INFO: Step: 884617/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:07:50,802] INFO: Step: 884618/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:08:04,098] INFO: Step: 884619/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:08:17,314] INFO: Step: 884620/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:08:30,672] INFO: Step: 884621/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:08:43,700] INFO: Step: 884622/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:08:56,936] INFO: Step: 884623/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:09:09,973] INFO: Step: 884624/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:09:22,965] INFO: Step: 884625/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:09:36,238] INFO: Step: 884626/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:09:49,402] INFO: Step: 884627/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:10:02,517] INFO: Step: 884628/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:10:15,723] INFO: Step: 884629/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:10:29,094] INFO: Step: 884630/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:10:42,332] INFO: Step: 884631/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:10:55,513] INFO: Step: 884632/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:11:08,556] INFO: Step: 884633/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:11:21,970] INFO: Step: 884634/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:11:35,233] INFO: Step: 884635/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:11:48,389] INFO: Step: 884636/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:12:01,605] INFO: Step: 884637/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:12:14,607] INFO: Step: 884638/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:12:27,803] INFO: Step: 884639/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:12:41,064] INFO: Step: 884640/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:12:54,324] INFO: Step: 884641/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:13:07,477] INFO: Step: 884642/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:13:20,920] INFO: Step: 884643/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:13:34,156] INFO: Step: 884644/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:13:47,452] INFO: Step: 884645/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:14:00,755] INFO: Step: 884646/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:14:13,965] INFO: Step: 884647/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:14:27,129] INFO: Step: 884648/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:14:40,393] INFO: Step: 884649/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:14:53,640] INFO: Step: 884650/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:15:06,852] INFO: Step: 884651/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:15:20,006] INFO: Step: 884652/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:15:33,252] INFO: Step: 884653/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:15:46,366] INFO: Step: 884654/1000000 Loss: 0.106 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:15:59,556] INFO: Step: 884655/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:16:12,747] INFO: Step: 884656/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:16:26,012] INFO: Step: 884657/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:16:39,016] INFO: Step: 884658/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:16:52,253] INFO: Step: 884659/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:17:05,447] INFO: Step: 884660/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:17:18,530] INFO: Step: 884661/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:17:31,714] INFO: Step: 884662/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:17:44,944] INFO: Step: 884663/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:17:58,195] INFO: Step: 884664/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:18:11,363] INFO: Step: 884665/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:18:24,641] INFO: Step: 884666/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:18:37,722] INFO: Step: 884667/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:18:50,959] INFO: Step: 884668/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:19:04,124] INFO: Step: 884669/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:19:17,307] INFO: Step: 884670/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:19:30,493] INFO: Step: 884671/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:19:43,746] INFO: Step: 884672/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:19:56,333] INFO: Step: 884673/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:20:09,580] INFO: Step: 884674/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:20:22,693] INFO: Step: 884675/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:20:36,132] INFO: Step: 884676/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:20:49,295] INFO: Step: 884677/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:21:02,579] INFO: Step: 884678/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:21:15,858] INFO: Step: 884679/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:21:29,092] INFO: Step: 884680/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:21:42,333] INFO: Step: 884681/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:21:55,513] INFO: Step: 884682/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:22:08,672] INFO: Step: 884683/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:22:21,897] INFO: Step: 884684/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:22:35,084] INFO: Step: 884685/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:22:48,251] INFO: Step: 884686/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:23:01,473] INFO: Step: 884687/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:23:14,760] INFO: Step: 884688/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:23:27,967] INFO: Step: 884689/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:23:41,162] INFO: Step: 884690/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:23:54,575] INFO: Step: 884691/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:24:07,878] INFO: Step: 884692/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:24:20,953] INFO: Step: 884693/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:24:34,162] INFO: Step: 884694/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:24:47,403] INFO: Step: 884695/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:25:00,629] INFO: Step: 884696/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:25:13,869] INFO: Step: 884697/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:25:27,097] INFO: Step: 884698/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:25:40,223] INFO: Step: 884699/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:25:53,368] INFO: Step: 884700/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:26:06,589] INFO: Step: 884701/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:26:19,600] INFO: Step: 884702/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:26:32,635] INFO: Step: 884703/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:26:45,867] INFO: Step: 884704/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:26:59,269] INFO: Step: 884705/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:27:12,424] INFO: Step: 884706/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:27:25,768] INFO: Step: 884707/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:27:39,066] INFO: Step: 884708/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:27:52,253] INFO: Step: 884709/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:28:05,393] INFO: Step: 884710/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:28:18,635] INFO: Step: 884711/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:28:31,940] INFO: Step: 884712/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:28:45,108] INFO: Step: 884713/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:28:58,145] INFO: Step: 884714/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:29:11,350] INFO: Step: 884715/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:29:24,685] INFO: Step: 884716/1000000 Loss: 0.097 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:29:37,884] INFO: Step: 884717/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:29:51,091] INFO: Step: 884718/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:30:04,343] INFO: Step: 884719/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:30:17,654] INFO: Step: 884720/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:30:30,771] INFO: Step: 884721/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:30:43,967] INFO: Step: 884722/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:30:57,269] INFO: Step: 884723/1000000 Loss: 0.101 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:31:10,596] INFO: Step: 884724/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:31:23,850] INFO: Step: 884725/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:31:37,057] INFO: Step: 884726/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:31:50,211] INFO: Step: 884727/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:32:03,385] INFO: Step: 884728/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:32:16,471] INFO: Step: 884729/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:32:29,790] INFO: Step: 884730/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:32:42,816] INFO: Step: 884731/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:32:56,014] INFO: Step: 884732/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:33:09,156] INFO: Step: 884733/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:33:22,407] INFO: Step: 884734/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:33:35,743] INFO: Step: 884735/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:33:49,007] INFO: Step: 884736/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:34:02,198] INFO: Step: 884737/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:34:15,313] INFO: Step: 884738/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:34:28,608] INFO: Step: 884739/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:34:41,883] INFO: Step: 884740/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:34:55,096] INFO: Step: 884741/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:35:08,249] INFO: Step: 884742/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:35:21,421] INFO: Step: 884743/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:35:34,595] INFO: Step: 884744/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:35:47,850] INFO: Step: 884745/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:36:01,114] INFO: Step: 884746/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:36:14,286] INFO: Step: 884747/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:36:27,603] INFO: Step: 884748/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:36:40,758] INFO: Step: 884749/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:36:53,945] INFO: Step: 884750/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:37:07,011] INFO: Step: 884751/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:37:20,173] INFO: Step: 884752/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:37:33,452] INFO: Step: 884753/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:37:46,600] INFO: Step: 884754/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:37:59,865] INFO: Step: 884755/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:38:13,172] INFO: Step: 884756/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:38:26,525] INFO: Step: 884757/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:38:39,700] INFO: Step: 884758/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:38:52,963] INFO: Step: 884759/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:39:06,126] INFO: Step: 884760/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:39:19,341] INFO: Step: 884761/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:39:32,488] INFO: Step: 884762/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:39:45,649] INFO: Step: 884763/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:39:59,041] INFO: Step: 884764/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:40:12,233] INFO: Step: 884765/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:40:25,334] INFO: Step: 884766/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:40:38,499] INFO: Step: 884767/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:40:51,719] INFO: Step: 884768/1000000 Loss: 0.066 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:41:04,861] INFO: Step: 884769/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:41:18,129] INFO: Step: 884770/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:41:31,200] INFO: Step: 884771/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:41:44,485] INFO: Step: 884772/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:41:57,764] INFO: Step: 884773/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:42:10,875] INFO: Step: 884774/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:42:24,121] INFO: Step: 884775/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:42:37,238] INFO: Step: 884776/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:42:50,697] INFO: Step: 884777/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:43:03,971] INFO: Step: 884778/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:43:17,171] INFO: Step: 884779/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:43:30,539] INFO: Step: 884780/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:43:43,827] INFO: Step: 884781/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:43:56,951] INFO: Step: 884782/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:44:10,399] INFO: Step: 884783/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:44:23,684] INFO: Step: 884784/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:44:36,904] INFO: Step: 884785/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:44:50,115] INFO: Step: 884786/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:45:03,429] INFO: Step: 884787/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:45:16,698] INFO: Step: 884788/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:45:30,160] INFO: Step: 884789/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:45:43,330] INFO: Step: 884790/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:45:56,616] INFO: Step: 884791/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:46:09,706] INFO: Step: 884792/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:46:23,132] INFO: Step: 884793/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:46:36,372] INFO: Step: 884794/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:46:49,662] INFO: Step: 884795/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:47:02,896] INFO: Step: 884796/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:47:16,122] INFO: Step: 884797/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:47:29,367] INFO: Step: 884798/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:47:42,542] INFO: Step: 884799/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:47:55,580] INFO: Step: 884800/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:48:08,677] INFO: Step: 884801/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:48:21,761] INFO: Step: 884802/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:48:34,974] INFO: Step: 884803/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:48:48,292] INFO: Step: 884804/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:49:01,728] INFO: Step: 884805/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:49:14,877] INFO: Step: 884806/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:49:28,028] INFO: Step: 884807/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:49:41,213] INFO: Step: 884808/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:49:54,582] INFO: Step: 884809/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:50:07,841] INFO: Step: 884810/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:50:21,069] INFO: Step: 884811/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:50:34,419] INFO: Step: 884812/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:50:47,559] INFO: Step: 884813/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:51:00,675] INFO: Step: 884814/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:51:13,939] INFO: Step: 884815/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:51:27,218] INFO: Step: 884816/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:51:40,375] INFO: Step: 884817/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:51:53,679] INFO: Step: 884818/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:52:06,883] INFO: Step: 884819/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:52:20,089] INFO: Step: 884820/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:52:33,327] INFO: Step: 884821/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:52:46,513] INFO: Step: 884822/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:52:59,725] INFO: Step: 884823/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:53:12,839] INFO: Step: 884824/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:53:26,077] INFO: Step: 884825/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:53:39,299] INFO: Step: 884826/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:53:52,420] INFO: Step: 884827/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:54:05,547] INFO: Step: 884828/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:54:18,840] INFO: Step: 884829/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:54:32,022] INFO: Step: 884830/1000000 Loss: 0.108 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:54:45,283] INFO: Step: 884831/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:54:58,394] INFO: Step: 884832/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:55:11,587] INFO: Step: 884833/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:55:24,704] INFO: Step: 884834/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:55:38,061] INFO: Step: 884835/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:55:51,265] INFO: Step: 884836/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:56:04,371] INFO: Step: 884837/1000000 Loss: 0.096 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:56:17,755] INFO: Step: 884838/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:56:30,923] INFO: Step: 884839/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:56:44,263] INFO: Step: 884840/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:56:57,398] INFO: Step: 884841/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:57:10,603] INFO: Step: 884842/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:57:23,885] INFO: Step: 884843/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:57:37,099] INFO: Step: 884844/1000000 Loss: 0.097 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:57:50,347] INFO: Step: 884845/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:58:03,494] INFO: Step: 884846/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:58:16,816] INFO: Step: 884847/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:58:30,008] INFO: Step: 884848/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:58:43,214] INFO: Step: 884849/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:58:56,522] INFO: Step: 884850/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:59:09,742] INFO: Step: 884851/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:59:23,069] INFO: Step: 884852/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:59:36,209] INFO: Step: 884853/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 09:59:49,330] INFO: Step: 884854/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:00:02,086] INFO: Step: 884855/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:00:15,405] INFO: Step: 884856/1000000 Loss: 0.065 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:00:28,746] INFO: Step: 884857/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:00:42,069] INFO: Step: 884858/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:00:55,262] INFO: Step: 884859/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:01:08,531] INFO: Step: 884860/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:01:21,833] INFO: Step: 884861/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:01:35,086] INFO: Step: 884862/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:01:48,203] INFO: Step: 884863/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:02:01,191] INFO: Step: 884864/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:02:14,447] INFO: Step: 884865/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:02:27,720] INFO: Step: 884866/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:02:40,918] INFO: Step: 884867/1000000 Loss: 0.068 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:02:54,228] INFO: Step: 884868/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:03:07,512] INFO: Step: 884869/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:03:20,634] INFO: Step: 884870/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:03:33,823] INFO: Step: 884871/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:03:47,124] INFO: Step: 884872/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:04:00,199] INFO: Step: 884873/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:04:13,456] INFO: Step: 884874/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:04:26,612] INFO: Step: 884875/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:04:39,755] INFO: Step: 884876/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:04:52,896] INFO: Step: 884877/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:05:05,995] INFO: Step: 884878/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:05:19,188] INFO: Step: 884879/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:05:32,453] INFO: Step: 884880/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:05:45,616] INFO: Step: 884881/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:05:58,732] INFO: Step: 884882/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:06:11,983] INFO: Step: 884883/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:06:25,240] INFO: Step: 884884/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:06:38,388] INFO: Step: 884885/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:06:51,552] INFO: Step: 884886/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:07:04,659] INFO: Step: 884887/1000000 Loss: 0.066 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:07:17,773] INFO: Step: 884888/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:07:31,043] INFO: Step: 884889/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:07:44,246] INFO: Step: 884890/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:07:57,347] INFO: Step: 884891/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:08:10,469] INFO: Step: 884892/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:08:23,673] INFO: Step: 884893/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:08:36,848] INFO: Step: 884894/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:08:50,072] INFO: Step: 884895/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:09:03,193] INFO: Step: 884896/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:09:16,540] INFO: Step: 884897/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:09:29,706] INFO: Step: 884898/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:09:42,861] INFO: Step: 884899/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:09:55,964] INFO: Step: 884900/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:10:09,176] INFO: Step: 884901/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:10:22,378] INFO: Step: 884902/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:10:35,548] INFO: Step: 884903/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:10:48,726] INFO: Step: 884904/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:11:01,838] INFO: Step: 884905/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:11:15,031] INFO: Step: 884906/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:11:28,283] INFO: Step: 884907/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:11:41,464] INFO: Step: 884908/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:11:54,689] INFO: Step: 884909/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:12:07,885] INFO: Step: 884910/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:12:21,018] INFO: Step: 884911/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:12:34,170] INFO: Step: 884912/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:12:47,478] INFO: Step: 884913/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:13:00,765] INFO: Step: 884914/1000000 Loss: 0.067 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:13:13,881] INFO: Step: 884915/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:13:27,107] INFO: Step: 884916/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:13:40,230] INFO: Step: 884917/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:13:53,468] INFO: Step: 884918/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:14:06,751] INFO: Step: 884919/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:14:19,970] INFO: Step: 884920/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:14:33,362] INFO: Step: 884921/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:14:46,773] INFO: Step: 884922/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:14:59,927] INFO: Step: 884923/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:15:13,139] INFO: Step: 884924/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:15:26,337] INFO: Step: 884925/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:15:39,722] INFO: Step: 884926/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:15:52,978] INFO: Step: 884927/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:16:06,243] INFO: Step: 884928/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:16:19,315] INFO: Step: 884929/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:16:32,673] INFO: Step: 884930/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:16:46,017] INFO: Step: 884931/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:16:59,291] INFO: Step: 884932/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:17:12,522] INFO: Step: 884933/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:17:25,660] INFO: Step: 884934/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:17:38,749] INFO: Step: 884935/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:17:51,916] INFO: Step: 884936/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:18:05,010] INFO: Step: 884937/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:18:18,105] INFO: Step: 884938/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:18:31,236] INFO: Step: 884939/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:18:44,484] INFO: Step: 884940/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:18:57,696] INFO: Step: 884941/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:19:10,949] INFO: Step: 884942/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:19:24,220] INFO: Step: 884943/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:19:37,474] INFO: Step: 884944/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:19:50,703] INFO: Step: 884945/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:20:03,927] INFO: Step: 884946/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:20:17,171] INFO: Step: 884947/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:20:30,285] INFO: Step: 884948/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:20:43,497] INFO: Step: 884949/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:20:56,589] INFO: Step: 884950/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:21:09,902] INFO: Step: 884951/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:21:23,071] INFO: Step: 884952/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:21:36,301] INFO: Step: 884953/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:21:49,490] INFO: Step: 884954/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:22:02,630] INFO: Step: 884955/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:22:15,929] INFO: Step: 884956/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:22:29,279] INFO: Step: 884957/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:22:42,480] INFO: Step: 884958/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:22:55,703] INFO: Step: 884959/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:23:09,000] INFO: Step: 884960/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:23:22,113] INFO: Step: 884961/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:23:35,445] INFO: Step: 884962/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:23:48,625] INFO: Step: 884963/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:24:01,874] INFO: Step: 884964/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:24:15,084] INFO: Step: 884965/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:24:28,278] INFO: Step: 884966/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:24:41,582] INFO: Step: 884967/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:24:54,775] INFO: Step: 884968/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:25:08,198] INFO: Step: 884969/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:25:21,393] INFO: Step: 884970/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:25:34,590] INFO: Step: 884971/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:25:47,870] INFO: Step: 884972/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:26:01,189] INFO: Step: 884973/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:26:14,392] INFO: Step: 884974/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:26:27,437] INFO: Step: 884975/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:26:40,599] INFO: Step: 884976/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:26:53,865] INFO: Step: 884977/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:27:07,160] INFO: Step: 884978/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:27:20,392] INFO: Step: 884979/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:27:33,807] INFO: Step: 884980/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:27:47,244] INFO: Step: 884981/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:28:00,516] INFO: Step: 884982/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:28:13,648] INFO: Step: 884983/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:28:26,824] INFO: Step: 884984/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:28:40,084] INFO: Step: 884985/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:28:53,442] INFO: Step: 884986/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:29:06,517] INFO: Step: 884987/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:29:19,755] INFO: Step: 884988/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:29:33,143] INFO: Step: 884989/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:29:46,381] INFO: Step: 884990/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:29:59,742] INFO: Step: 884991/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:30:13,113] INFO: Step: 884992/1000000 Loss: 0.068 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:30:26,215] INFO: Step: 884993/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:30:39,408] INFO: Step: 884994/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:30:52,475] INFO: Step: 884995/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:31:05,678] INFO: Step: 884996/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:31:18,972] INFO: Step: 884997/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:31:32,133] INFO: Step: 884998/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:31:45,413] INFO: Step: 884999/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:31:58,602] INFO: Step: 885000/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:31:58,603] INFO: Begin to Save model to workspace/i2v_motion/obj_train_motion/checkpoints/non_ema_00885000.pth +[2024-06-09 10:32:04,453] INFO: Save model to workspace/i2v_motion/obj_train_motion/checkpoints/non_ema_00885000.pth +[2024-06-09 10:32:16,601] INFO: Step: 885001/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:32:29,896] INFO: Step: 885002/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:32:43,106] INFO: Step: 885003/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:32:56,484] INFO: Step: 885004/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:33:09,745] INFO: Step: 885005/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:33:22,954] INFO: Step: 885006/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:33:36,132] INFO: Step: 885007/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:33:49,587] INFO: Step: 885008/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:34:02,813] INFO: Step: 885009/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:34:15,951] INFO: Step: 885010/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:34:29,270] INFO: Step: 885011/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:34:42,531] INFO: Step: 885012/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:34:55,808] INFO: Step: 885013/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:35:09,045] INFO: Step: 885014/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:35:22,266] INFO: Step: 885015/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:35:35,340] INFO: Step: 885016/1000000 Loss: 0.067 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:35:48,611] INFO: Step: 885017/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:36:01,871] INFO: Step: 885018/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:36:15,074] INFO: Step: 885019/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:36:28,195] INFO: Step: 885020/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:36:41,417] INFO: Step: 885021/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:36:54,588] INFO: Step: 885022/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:37:07,845] INFO: Step: 885023/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:37:21,058] INFO: Step: 885024/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:37:34,329] INFO: Step: 885025/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:37:47,482] INFO: Step: 885026/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:38:00,975] INFO: Step: 885027/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:38:14,208] INFO: Step: 885028/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:38:27,432] INFO: Step: 885029/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:38:40,636] INFO: Step: 885030/1000000 Loss: 0.066 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:38:53,890] INFO: Step: 885031/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:39:07,079] INFO: Step: 885032/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:39:20,504] INFO: Step: 885033/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:39:33,632] INFO: Step: 885034/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:39:46,782] INFO: Step: 885035/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:39:59,960] INFO: Step: 885036/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:40:13,133] INFO: Step: 885037/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:40:26,316] INFO: Step: 885038/1000000 Loss: 0.068 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:40:39,718] INFO: Step: 885039/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:40:52,944] INFO: Step: 885040/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:41:06,147] INFO: Step: 885041/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:41:19,424] INFO: Step: 885042/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:41:32,524] INFO: Step: 885043/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:41:45,697] INFO: Step: 885044/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:41:58,991] INFO: Step: 885045/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:42:12,197] INFO: Step: 885046/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:42:25,528] INFO: Step: 885047/1000000 Loss: 0.098 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:42:38,673] INFO: Step: 885048/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:42:51,977] INFO: Step: 885049/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:43:05,444] INFO: Step: 885050/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:43:18,750] INFO: Step: 885051/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:43:31,964] INFO: Step: 885052/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:43:45,123] INFO: Step: 885053/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:43:58,322] INFO: Step: 885054/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:44:11,614] INFO: Step: 885055/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:44:24,893] INFO: Step: 885056/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:44:37,994] INFO: Step: 885057/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:44:51,378] INFO: Step: 885058/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:45:04,704] INFO: Step: 885059/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:45:17,901] INFO: Step: 885060/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:45:31,055] INFO: Step: 885061/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:45:44,286] INFO: Step: 885062/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:45:57,485] INFO: Step: 885063/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:46:10,638] INFO: Step: 885064/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:46:23,702] INFO: Step: 885065/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:46:36,900] INFO: Step: 885066/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:46:50,150] INFO: Step: 885067/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:47:03,333] INFO: Step: 885068/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:47:16,615] INFO: Step: 885069/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:47:29,772] INFO: Step: 885070/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:47:42,950] INFO: Step: 885071/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:47:56,241] INFO: Step: 885072/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:48:09,443] INFO: Step: 885073/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:48:22,672] INFO: Step: 885074/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:48:36,124] INFO: Step: 885075/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:48:49,281] INFO: Step: 885076/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:49:02,503] INFO: Step: 885077/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:49:15,748] INFO: Step: 885078/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:49:28,971] INFO: Step: 885079/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:49:42,039] INFO: Step: 885080/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:49:55,406] INFO: Step: 885081/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:50:08,354] INFO: Step: 885082/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:50:21,535] INFO: Step: 885083/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:50:34,760] INFO: Step: 885084/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:50:47,933] INFO: Step: 885085/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:51:01,142] INFO: Step: 885086/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:51:14,466] INFO: Step: 885087/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:51:27,574] INFO: Step: 885088/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:51:40,771] INFO: Step: 885089/1000000 Loss: 0.099 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:51:54,010] INFO: Step: 885090/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:52:07,253] INFO: Step: 885091/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:52:20,363] INFO: Step: 885092/1000000 Loss: 0.096 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:52:33,541] INFO: Step: 885093/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:52:46,816] INFO: Step: 885094/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:53:00,057] INFO: Step: 885095/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:53:13,156] INFO: Step: 885096/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:53:26,290] INFO: Step: 885097/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:53:39,492] INFO: Step: 885098/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:53:52,768] INFO: Step: 885099/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:54:06,049] INFO: Step: 885100/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:54:19,184] INFO: Step: 885101/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:54:32,369] INFO: Step: 885102/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:54:45,591] INFO: Step: 885103/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:54:58,798] INFO: Step: 885104/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:55:12,067] INFO: Step: 885105/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:55:25,149] INFO: Step: 885106/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:55:38,332] INFO: Step: 885107/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:55:51,499] INFO: Step: 885108/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:56:04,630] INFO: Step: 885109/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:56:17,970] INFO: Step: 885110/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:56:31,096] INFO: Step: 885111/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:56:44,473] INFO: Step: 885112/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:56:57,629] INFO: Step: 885113/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:57:10,750] INFO: Step: 885114/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:57:23,766] INFO: Step: 885115/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:57:36,894] INFO: Step: 885116/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:57:50,069] INFO: Step: 885117/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:58:03,285] INFO: Step: 885118/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:58:16,592] INFO: Step: 885119/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:58:29,801] INFO: Step: 885120/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:58:43,006] INFO: Step: 885121/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:58:56,326] INFO: Step: 885122/1000000 Loss: 0.068 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:59:09,712] INFO: Step: 885123/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:59:23,022] INFO: Step: 885124/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:59:36,260] INFO: Step: 885125/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 10:59:49,512] INFO: Step: 885126/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:00:02,665] INFO: Step: 885127/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:00:15,918] INFO: Step: 885128/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:00:29,008] INFO: Step: 885129/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:00:42,379] INFO: Step: 885130/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:00:55,577] INFO: Step: 885131/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:01:08,652] INFO: Step: 885132/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:01:21,748] INFO: Step: 885133/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:01:34,962] INFO: Step: 885134/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:01:48,206] INFO: Step: 885135/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:02:01,509] INFO: Step: 885136/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:02:14,626] INFO: Step: 885137/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:02:27,784] INFO: Step: 885138/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:02:40,963] INFO: Step: 885139/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:02:54,059] INFO: Step: 885140/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:03:07,279] INFO: Step: 885141/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:03:20,570] INFO: Step: 885142/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:03:33,576] INFO: Step: 885143/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:03:46,685] INFO: Step: 885144/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:03:59,889] INFO: Step: 885145/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:04:13,092] INFO: Step: 885146/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:04:26,189] INFO: Step: 885147/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:04:39,380] INFO: Step: 885148/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:04:52,796] INFO: Step: 885149/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:05:06,016] INFO: Step: 885150/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:05:19,332] INFO: Step: 885151/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:05:32,496] INFO: Step: 885152/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:05:45,641] INFO: Step: 885153/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:05:58,876] INFO: Step: 885154/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:06:12,084] INFO: Step: 885155/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:06:25,302] INFO: Step: 885156/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:06:38,614] INFO: Step: 885157/1000000 Loss: 0.097 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:06:51,910] INFO: Step: 885158/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:07:05,081] INFO: Step: 885159/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:07:18,090] INFO: Step: 885160/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:07:31,411] INFO: Step: 885161/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:07:44,765] INFO: Step: 885162/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:07:58,129] INFO: Step: 885163/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:08:11,329] INFO: Step: 885164/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:08:24,483] INFO: Step: 885165/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:08:37,932] INFO: Step: 885166/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:08:51,195] INFO: Step: 885167/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:09:04,400] INFO: Step: 885168/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:09:17,687] INFO: Step: 885169/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:09:30,860] INFO: Step: 885170/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:09:43,944] INFO: Step: 885171/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:09:57,132] INFO: Step: 885172/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:10:10,371] INFO: Step: 885173/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:10:23,521] INFO: Step: 885174/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:10:36,690] INFO: Step: 885175/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:10:49,920] INFO: Step: 885176/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:11:03,052] INFO: Step: 885177/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:11:16,376] INFO: Step: 885178/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:11:29,662] INFO: Step: 885179/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:11:42,947] INFO: Step: 885180/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:11:56,240] INFO: Step: 885181/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:12:09,483] INFO: Step: 885182/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:12:22,799] INFO: Step: 885183/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:12:35,888] INFO: Step: 885184/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:12:49,056] INFO: Step: 885185/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:13:02,259] INFO: Step: 885186/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:13:15,379] INFO: Step: 885187/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:13:28,596] INFO: Step: 885188/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:13:41,648] INFO: Step: 885189/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:13:54,814] INFO: Step: 885190/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:14:08,353] INFO: Step: 885191/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:14:21,618] INFO: Step: 885192/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:14:34,869] INFO: Step: 885193/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:14:48,085] INFO: Step: 885194/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:15:01,300] INFO: Step: 885195/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:15:14,417] INFO: Step: 885196/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:15:27,711] INFO: Step: 885197/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:15:40,873] INFO: Step: 885198/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:15:54,204] INFO: Step: 885199/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:16:07,418] INFO: Step: 885200/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:16:20,612] INFO: Step: 885201/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:16:34,029] INFO: Step: 885202/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:16:47,323] INFO: Step: 885203/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:17:00,536] INFO: Step: 885204/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:17:13,718] INFO: Step: 885205/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:17:26,896] INFO: Step: 885206/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:17:40,043] INFO: Step: 885207/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:17:53,135] INFO: Step: 885208/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:18:06,415] INFO: Step: 885209/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:18:19,633] INFO: Step: 885210/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:18:32,634] INFO: Step: 885211/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:18:45,769] INFO: Step: 885212/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:18:59,021] INFO: Step: 885213/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:19:12,180] INFO: Step: 885214/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:19:25,366] INFO: Step: 885215/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:19:38,740] INFO: Step: 885216/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:19:51,964] INFO: Step: 885217/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:20:05,127] INFO: Step: 885218/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:20:18,362] INFO: Step: 885219/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:20:31,584] INFO: Step: 885220/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:20:44,779] INFO: Step: 885221/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:20:58,100] INFO: Step: 885222/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:21:11,447] INFO: Step: 885223/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:21:24,683] INFO: Step: 885224/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:21:37,917] INFO: Step: 885225/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:21:51,344] INFO: Step: 885226/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:22:04,572] INFO: Step: 885227/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:22:17,779] INFO: Step: 885228/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:22:31,057] INFO: Step: 885229/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:22:44,153] INFO: Step: 885230/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:22:57,429] INFO: Step: 885231/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:23:10,601] INFO: Step: 885232/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:23:23,754] INFO: Step: 885233/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:23:37,037] INFO: Step: 885234/1000000 Loss: 0.068 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:23:50,349] INFO: Step: 885235/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:24:03,640] INFO: Step: 885236/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:24:17,319] INFO: Step: 885237/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:24:30,522] INFO: Step: 885238/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:24:43,705] INFO: Step: 885239/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:24:56,908] INFO: Step: 885240/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:25:10,179] INFO: Step: 885241/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:25:23,457] INFO: Step: 885242/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:25:36,825] INFO: Step: 885243/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:25:50,073] INFO: Step: 885244/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:26:03,250] INFO: Step: 885245/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:26:16,436] INFO: Step: 885246/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:26:29,638] INFO: Step: 885247/1000000 Loss: 0.066 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:26:42,759] INFO: Step: 885248/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:26:56,052] INFO: Step: 885249/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:27:09,343] INFO: Step: 885250/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:27:22,578] INFO: Step: 885251/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:27:35,880] INFO: Step: 885252/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:27:49,200] INFO: Step: 885253/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:28:02,455] INFO: Step: 885254/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:28:15,883] INFO: Step: 885255/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:28:29,071] INFO: Step: 885256/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:28:42,348] INFO: Step: 885257/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:28:55,513] INFO: Step: 885258/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:29:08,735] INFO: Step: 885259/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:29:22,022] INFO: Step: 885260/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:29:35,209] INFO: Step: 885261/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:29:48,474] INFO: Step: 885262/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:30:01,735] INFO: Step: 885263/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:30:15,024] INFO: Step: 885264/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:30:28,318] INFO: Step: 885265/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:30:41,519] INFO: Step: 885266/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:30:54,914] INFO: Step: 885267/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:31:08,066] INFO: Step: 885268/1000000 Loss: 0.063 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:31:21,207] INFO: Step: 885269/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:31:34,761] INFO: Step: 885270/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:31:48,022] INFO: Step: 885271/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:32:01,386] INFO: Step: 885272/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:32:14,718] INFO: Step: 885273/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:32:28,169] INFO: Step: 885274/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:32:41,486] INFO: Step: 885275/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:32:54,910] INFO: Step: 885276/1000000 Loss: 0.096 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:33:08,173] INFO: Step: 885277/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:33:21,259] INFO: Step: 885278/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:33:34,726] INFO: Step: 885279/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:33:47,924] INFO: Step: 885280/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:34:00,967] INFO: Step: 885281/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:34:14,054] INFO: Step: 885282/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:34:27,202] INFO: Step: 885283/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:34:40,322] INFO: Step: 885284/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:34:53,451] INFO: Step: 885285/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:35:06,749] INFO: Step: 885286/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:35:19,846] INFO: Step: 885287/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:35:32,978] INFO: Step: 885288/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:35:46,259] INFO: Step: 885289/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:35:59,539] INFO: Step: 885290/1000000 Loss: 0.067 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:36:12,729] INFO: Step: 885291/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:36:26,038] INFO: Step: 885292/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:36:39,377] INFO: Step: 885293/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:36:52,703] INFO: Step: 885294/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:37:05,964] INFO: Step: 885295/1000000 Loss: 0.096 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:37:19,162] INFO: Step: 885296/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:37:32,317] INFO: Step: 885297/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:37:45,485] INFO: Step: 885298/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:37:58,607] INFO: Step: 885299/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:38:11,868] INFO: Step: 885300/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:38:25,022] INFO: Step: 885301/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:38:38,227] INFO: Step: 885302/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:38:51,428] INFO: Step: 885303/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:39:04,550] INFO: Step: 885304/1000000 Loss: 0.096 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:39:17,778] INFO: Step: 885305/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:39:30,990] INFO: Step: 885306/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:39:44,227] INFO: Step: 885307/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:39:57,465] INFO: Step: 885308/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:40:10,643] INFO: Step: 885309/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:40:23,845] INFO: Step: 885310/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:40:36,894] INFO: Step: 885311/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:40:50,160] INFO: Step: 885312/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:41:03,328] INFO: Step: 885313/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:41:16,407] INFO: Step: 885314/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:41:29,612] INFO: Step: 885315/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:41:42,837] INFO: Step: 885316/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:41:56,224] INFO: Step: 885317/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:42:09,499] INFO: Step: 885318/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:42:22,667] INFO: Step: 885319/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:42:35,830] INFO: Step: 885320/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:42:48,941] INFO: Step: 885321/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:43:02,091] INFO: Step: 885322/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:43:15,322] INFO: Step: 885323/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:43:28,654] INFO: Step: 885324/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:43:41,826] INFO: Step: 885325/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:43:55,037] INFO: Step: 885326/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:44:08,228] INFO: Step: 885327/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:44:21,251] INFO: Step: 885328/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:44:34,527] INFO: Step: 885329/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:44:47,560] INFO: Step: 885330/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:45:00,788] INFO: Step: 885331/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:45:14,180] INFO: Step: 885332/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:45:27,462] INFO: Step: 885333/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:45:40,552] INFO: Step: 885334/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:45:53,880] INFO: Step: 885335/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:46:07,105] INFO: Step: 885336/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:46:20,417] INFO: Step: 885337/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:46:33,603] INFO: Step: 885338/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:46:46,951] INFO: Step: 885339/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:47:00,176] INFO: Step: 885340/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:47:13,440] INFO: Step: 885341/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:47:26,680] INFO: Step: 885342/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:47:39,874] INFO: Step: 885343/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:47:52,951] INFO: Step: 885344/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:48:06,205] INFO: Step: 885345/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:48:19,498] INFO: Step: 885346/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:48:32,609] INFO: Step: 885347/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:48:45,886] INFO: Step: 885348/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:48:59,172] INFO: Step: 885349/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:49:12,308] INFO: Step: 885350/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:49:25,535] INFO: Step: 885351/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:49:38,792] INFO: Step: 885352/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:49:52,006] INFO: Step: 885353/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:50:05,400] INFO: Step: 885354/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:50:18,628] INFO: Step: 885355/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:50:31,802] INFO: Step: 885356/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:50:45,062] INFO: Step: 885357/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:50:58,332] INFO: Step: 885358/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:51:11,517] INFO: Step: 885359/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:51:24,741] INFO: Step: 885360/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:51:38,028] INFO: Step: 885361/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:51:51,390] INFO: Step: 885362/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:52:04,701] INFO: Step: 885363/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:52:17,988] INFO: Step: 885364/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:52:31,351] INFO: Step: 885365/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:52:44,488] INFO: Step: 885366/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:52:57,803] INFO: Step: 885367/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:53:11,036] INFO: Step: 885368/1000000 Loss: 0.067 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:53:24,269] INFO: Step: 885369/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:53:37,537] INFO: Step: 885370/1000000 Loss: 0.067 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:53:50,765] INFO: Step: 885371/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:54:03,646] INFO: Step: 885372/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:54:16,886] INFO: Step: 885373/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:54:30,247] INFO: Step: 885374/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:54:43,570] INFO: Step: 885375/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:54:57,020] INFO: Step: 885376/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:55:10,326] INFO: Step: 885377/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:55:23,565] INFO: Step: 885378/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:55:36,798] INFO: Step: 885379/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:55:50,041] INFO: Step: 885380/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:56:03,374] INFO: Step: 885381/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:56:16,431] INFO: Step: 885382/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:56:29,666] INFO: Step: 885383/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:56:42,859] INFO: Step: 885384/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:56:56,004] INFO: Step: 885385/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:57:09,176] INFO: Step: 885386/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:57:22,633] INFO: Step: 885387/1000000 Loss: 0.067 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:57:35,975] INFO: Step: 885388/1000000 Loss: 0.068 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:57:49,309] INFO: Step: 885389/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:58:02,697] INFO: Step: 885390/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:58:16,035] INFO: Step: 885391/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:58:29,456] INFO: Step: 885392/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:58:42,919] INFO: Step: 885393/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:58:56,199] INFO: Step: 885394/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:59:09,737] INFO: Step: 885395/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:59:23,105] INFO: Step: 885396/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:59:36,333] INFO: Step: 885397/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 11:59:49,455] INFO: Step: 885398/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:00:02,587] INFO: Step: 885399/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:00:16,080] INFO: Step: 885400/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:00:29,125] INFO: Step: 885401/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:00:42,418] INFO: Step: 885402/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:00:55,504] INFO: Step: 885403/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:01:08,606] INFO: Step: 885404/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:01:21,937] INFO: Step: 885405/1000000 Loss: 0.096 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:01:35,049] INFO: Step: 885406/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:01:48,266] INFO: Step: 885407/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:02:01,496] INFO: Step: 885408/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:02:14,530] INFO: Step: 885409/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:02:27,712] INFO: Step: 885410/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:02:40,939] INFO: Step: 885411/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:02:54,192] INFO: Step: 885412/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:03:07,560] INFO: Step: 885413/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:03:20,731] INFO: Step: 885414/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:03:34,035] INFO: Step: 885415/1000000 Loss: 0.099 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:03:47,344] INFO: Step: 885416/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:04:00,289] INFO: Step: 885417/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:04:13,541] INFO: Step: 885418/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:04:26,481] INFO: Step: 885419/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:04:39,872] INFO: Step: 885420/1000000 Loss: 0.066 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:04:53,078] INFO: Step: 885421/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:05:06,267] INFO: Step: 885422/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:05:19,621] INFO: Step: 885423/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:05:32,660] INFO: Step: 885424/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:05:45,874] INFO: Step: 885425/1000000 Loss: 0.098 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:05:59,105] INFO: Step: 885426/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:06:12,668] INFO: Step: 885427/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:06:26,075] INFO: Step: 885428/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:06:39,371] INFO: Step: 885429/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:06:52,382] INFO: Step: 885430/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:07:05,571] INFO: Step: 885431/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:07:18,796] INFO: Step: 885432/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:07:32,137] INFO: Step: 885433/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:07:45,428] INFO: Step: 885434/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:07:58,727] INFO: Step: 885435/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:08:11,822] INFO: Step: 885436/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:08:25,259] INFO: Step: 885437/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:08:38,409] INFO: Step: 885438/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:08:51,521] INFO: Step: 885439/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:09:04,610] INFO: Step: 885440/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:09:18,115] INFO: Step: 885441/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:09:31,685] INFO: Step: 885442/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:09:44,789] INFO: Step: 885443/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:09:58,184] INFO: Step: 885444/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:10:11,491] INFO: Step: 885445/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:10:24,659] INFO: Step: 885446/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:10:38,000] INFO: Step: 885447/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:10:51,321] INFO: Step: 885448/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:11:04,558] INFO: Step: 885449/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:11:17,645] INFO: Step: 885450/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:11:30,830] INFO: Step: 885451/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:11:44,060] INFO: Step: 885452/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:11:57,119] INFO: Step: 885453/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:12:10,345] INFO: Step: 885454/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:12:23,569] INFO: Step: 885455/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:12:36,907] INFO: Step: 885456/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:12:50,017] INFO: Step: 885457/1000000 Loss: 0.097 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:13:03,256] INFO: Step: 885458/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:13:16,506] INFO: Step: 885459/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:13:29,805] INFO: Step: 885460/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:13:43,280] INFO: Step: 885461/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:13:56,531] INFO: Step: 885462/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:14:09,556] INFO: Step: 885463/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:14:22,838] INFO: Step: 885464/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:14:36,136] INFO: Step: 885465/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:14:49,456] INFO: Step: 885466/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:15:02,658] INFO: Step: 885467/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:15:15,721] INFO: Step: 885468/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:15:28,944] INFO: Step: 885469/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:15:42,193] INFO: Step: 885470/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:15:55,583] INFO: Step: 885471/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:16:08,809] INFO: Step: 885472/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:16:22,070] INFO: Step: 885473/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:16:35,253] INFO: Step: 885474/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:16:48,471] INFO: Step: 885475/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:17:01,605] INFO: Step: 885476/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:17:14,812] INFO: Step: 885477/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:17:28,048] INFO: Step: 885478/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:17:41,344] INFO: Step: 885479/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:17:54,573] INFO: Step: 885480/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:18:07,896] INFO: Step: 885481/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:18:21,116] INFO: Step: 885482/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:18:34,340] INFO: Step: 885483/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:18:47,646] INFO: Step: 885484/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:19:00,796] INFO: Step: 885485/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:19:13,856] INFO: Step: 885486/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:19:26,983] INFO: Step: 885487/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:19:40,230] INFO: Step: 885488/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:19:53,294] INFO: Step: 885489/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:20:06,434] INFO: Step: 885490/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:20:19,471] INFO: Step: 885491/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:20:32,768] INFO: Step: 885492/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:20:46,037] INFO: Step: 885493/1000000 Loss: 0.068 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:20:59,324] INFO: Step: 885494/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:21:12,528] INFO: Step: 885495/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:21:25,750] INFO: Step: 885496/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:21:39,056] INFO: Step: 885497/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:21:52,156] INFO: Step: 885498/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:22:05,493] INFO: Step: 885499/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:22:18,836] INFO: Step: 885500/1000000 Loss: 0.098 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:22:18,836] INFO: Begin to Save model to workspace/i2v_motion/obj_train_motion/checkpoints/non_ema_00885500.pth +[2024-06-09 12:22:24,738] INFO: Save model to workspace/i2v_motion/obj_train_motion/checkpoints/non_ema_00885500.pth +[2024-06-09 12:22:36,860] INFO: Step: 885501/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:22:49,973] INFO: Step: 885502/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:23:03,286] INFO: Step: 885503/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:23:16,565] INFO: Step: 885504/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:23:29,783] INFO: Step: 885505/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:23:42,953] INFO: Step: 885506/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:23:56,244] INFO: Step: 885507/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:24:09,405] INFO: Step: 885508/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:24:22,663] INFO: Step: 885509/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:24:35,862] INFO: Step: 885510/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:24:49,058] INFO: Step: 885511/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:25:02,235] INFO: Step: 885512/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:25:15,401] INFO: Step: 885513/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:25:28,555] INFO: Step: 885514/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:25:41,872] INFO: Step: 885515/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:25:55,055] INFO: Step: 885516/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:26:08,131] INFO: Step: 885517/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:26:21,493] INFO: Step: 885518/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:26:34,714] INFO: Step: 885519/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:26:47,858] INFO: Step: 885520/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:27:01,065] INFO: Step: 885521/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:27:14,075] INFO: Step: 885522/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:27:27,271] INFO: Step: 885523/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:27:40,474] INFO: Step: 885524/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:27:53,785] INFO: Step: 885525/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:28:06,837] INFO: Step: 885526/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:28:20,057] INFO: Step: 885527/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:28:33,311] INFO: Step: 885528/1000000 Loss: 0.067 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:28:46,476] INFO: Step: 885529/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:28:59,858] INFO: Step: 885530/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:29:13,287] INFO: Step: 885531/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:29:26,557] INFO: Step: 885532/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:29:39,693] INFO: Step: 885533/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:29:52,968] INFO: Step: 885534/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:30:06,223] INFO: Step: 885535/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:30:19,493] INFO: Step: 885536/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:30:32,695] INFO: Step: 885537/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:30:45,915] INFO: Step: 885538/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:30:59,161] INFO: Step: 885539/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:31:12,410] INFO: Step: 885540/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:31:25,722] INFO: Step: 885541/1000000 Loss: 0.068 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:31:39,034] INFO: Step: 885542/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:31:52,171] INFO: Step: 885543/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:32:05,237] INFO: Step: 885544/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:32:18,492] INFO: Step: 885545/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:32:31,684] INFO: Step: 885546/1000000 Loss: 0.067 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:32:44,994] INFO: Step: 885547/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:32:58,184] INFO: Step: 885548/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:33:11,477] INFO: Step: 885549/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:33:24,656] INFO: Step: 885550/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:33:37,958] INFO: Step: 885551/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:33:51,120] INFO: Step: 885552/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:34:04,323] INFO: Step: 885553/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:34:17,402] INFO: Step: 885554/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:34:30,706] INFO: Step: 885555/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:34:43,970] INFO: Step: 885556/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:34:57,109] INFO: Step: 885557/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:35:10,276] INFO: Step: 885558/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:35:23,371] INFO: Step: 885559/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:35:36,414] INFO: Step: 885560/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:35:49,642] INFO: Step: 885561/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:36:02,775] INFO: Step: 885562/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:36:16,020] INFO: Step: 885563/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:36:29,462] INFO: Step: 885564/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:36:42,724] INFO: Step: 885565/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:36:55,821] INFO: Step: 885566/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:37:09,084] INFO: Step: 885567/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:37:22,345] INFO: Step: 885568/1000000 Loss: 0.064 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:37:35,450] INFO: Step: 885569/1000000 Loss: 0.098 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:37:48,737] INFO: Step: 885570/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:38:02,087] INFO: Step: 885571/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:38:15,552] INFO: Step: 885572/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:38:28,989] INFO: Step: 885573/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:38:42,229] INFO: Step: 885574/1000000 Loss: 0.066 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:38:55,474] INFO: Step: 885575/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:39:08,640] INFO: Step: 885576/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:39:21,811] INFO: Step: 885577/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:39:35,013] INFO: Step: 885578/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:39:48,245] INFO: Step: 885579/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:40:01,396] INFO: Step: 885580/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:40:14,571] INFO: Step: 885581/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:40:27,693] INFO: Step: 885582/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:40:40,900] INFO: Step: 885583/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:40:54,151] INFO: Step: 885584/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:41:07,476] INFO: Step: 885585/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:41:20,713] INFO: Step: 885586/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:41:34,062] INFO: Step: 885587/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:41:47,202] INFO: Step: 885588/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:42:00,399] INFO: Step: 885589/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:42:13,570] INFO: Step: 885590/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:42:26,674] INFO: Step: 885591/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:42:39,672] INFO: Step: 885592/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:42:52,927] INFO: Step: 885593/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:43:06,207] INFO: Step: 885594/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:43:19,370] INFO: Step: 885595/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:43:32,749] INFO: Step: 885596/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:43:46,009] INFO: Step: 885597/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:43:59,251] INFO: Step: 885598/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:44:12,401] INFO: Step: 885599/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:44:25,894] INFO: Step: 885600/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:44:39,051] INFO: Step: 885601/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:44:52,331] INFO: Step: 885602/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:45:05,780] INFO: Step: 885603/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:45:18,912] INFO: Step: 885604/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:45:32,001] INFO: Step: 885605/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:45:45,163] INFO: Step: 885606/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:45:58,371] INFO: Step: 885607/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:46:11,606] INFO: Step: 885608/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:46:24,870] INFO: Step: 885609/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:46:38,118] INFO: Step: 885610/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:46:51,201] INFO: Step: 885611/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:47:04,577] INFO: Step: 885612/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:47:17,703] INFO: Step: 885613/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:47:30,845] INFO: Step: 885614/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:47:43,909] INFO: Step: 885615/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:47:56,956] INFO: Step: 885616/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:48:10,090] INFO: Step: 885617/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:48:23,115] INFO: Step: 885618/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:48:36,338] INFO: Step: 885619/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:48:49,471] INFO: Step: 885620/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:49:02,681] INFO: Step: 885621/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:49:15,851] INFO: Step: 885622/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:49:29,055] INFO: Step: 885623/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:49:42,205] INFO: Step: 885624/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:49:55,416] INFO: Step: 885625/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:50:08,578] INFO: Step: 885626/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:50:21,987] INFO: Step: 885627/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:50:35,273] INFO: Step: 885628/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:50:48,508] INFO: Step: 885629/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:51:01,690] INFO: Step: 885630/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:51:14,806] INFO: Step: 885631/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:51:28,133] INFO: Step: 885632/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:51:41,189] INFO: Step: 885633/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:51:54,401] INFO: Step: 885634/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:52:07,534] INFO: Step: 885635/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:52:20,777] INFO: Step: 885636/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:52:33,925] INFO: Step: 885637/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:52:46,977] INFO: Step: 885638/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:53:00,204] INFO: Step: 885639/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:53:13,404] INFO: Step: 885640/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:53:26,687] INFO: Step: 885641/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:53:39,871] INFO: Step: 885642/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:53:53,185] INFO: Step: 885643/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:54:06,316] INFO: Step: 885644/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:54:19,510] INFO: Step: 885645/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:54:32,734] INFO: Step: 885646/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:54:46,038] INFO: Step: 885647/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:54:59,284] INFO: Step: 885648/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:55:12,466] INFO: Step: 885649/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:55:25,583] INFO: Step: 885650/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:55:38,992] INFO: Step: 885651/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:55:52,174] INFO: Step: 885652/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:56:05,471] INFO: Step: 885653/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:56:18,826] INFO: Step: 885654/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:56:32,096] INFO: Step: 885655/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:56:45,298] INFO: Step: 885656/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:56:58,591] INFO: Step: 885657/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:57:11,834] INFO: Step: 885658/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:57:24,954] INFO: Step: 885659/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:57:38,196] INFO: Step: 885660/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:57:51,431] INFO: Step: 885661/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:58:04,637] INFO: Step: 885662/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:58:17,875] INFO: Step: 885663/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:58:31,164] INFO: Step: 885664/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:58:44,390] INFO: Step: 885665/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:58:57,562] INFO: Step: 885666/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:59:10,802] INFO: Step: 885667/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:59:23,929] INFO: Step: 885668/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:59:37,246] INFO: Step: 885669/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 12:59:50,329] INFO: Step: 885670/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:00:03,321] INFO: Step: 885671/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:00:16,529] INFO: Step: 885672/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:00:29,712] INFO: Step: 885673/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:00:43,148] INFO: Step: 885674/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:00:56,317] INFO: Step: 885675/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:01:09,472] INFO: Step: 885676/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:01:22,659] INFO: Step: 885677/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:01:35,806] INFO: Step: 885678/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:01:49,066] INFO: Step: 885679/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:02:02,310] INFO: Step: 885680/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:02:15,560] INFO: Step: 885681/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:02:28,723] INFO: Step: 885682/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:02:41,870] INFO: Step: 885683/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:02:54,899] INFO: Step: 885684/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:03:08,056] INFO: Step: 885685/1000000 Loss: 0.100 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:03:21,249] INFO: Step: 885686/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:03:34,485] INFO: Step: 885687/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:03:47,661] INFO: Step: 885688/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:04:00,807] INFO: Step: 885689/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:04:14,018] INFO: Step: 885690/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:04:27,059] INFO: Step: 885691/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:04:40,465] INFO: Step: 885692/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:04:53,666] INFO: Step: 885693/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:05:06,784] INFO: Step: 885694/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:05:20,001] INFO: Step: 885695/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:05:33,118] INFO: Step: 885696/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:05:46,258] INFO: Step: 885697/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:05:59,500] INFO: Step: 885698/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:06:12,905] INFO: Step: 885699/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:06:26,224] INFO: Step: 885700/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:06:39,260] INFO: Step: 885701/1000000 Loss: 0.104 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:06:52,474] INFO: Step: 885702/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:07:05,805] INFO: Step: 885703/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:07:18,996] INFO: Step: 885704/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:07:32,156] INFO: Step: 885705/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:07:45,352] INFO: Step: 885706/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:07:58,619] INFO: Step: 885707/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:08:11,836] INFO: Step: 885708/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:08:25,121] INFO: Step: 885709/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:08:38,305] INFO: Step: 885710/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:08:51,411] INFO: Step: 885711/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:09:04,662] INFO: Step: 885712/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:09:17,634] INFO: Step: 885713/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:09:30,746] INFO: Step: 885714/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:09:44,082] INFO: Step: 885715/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:09:57,249] INFO: Step: 885716/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:10:10,747] INFO: Step: 885717/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:10:23,936] INFO: Step: 885718/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:10:37,235] INFO: Step: 885719/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:10:50,589] INFO: Step: 885720/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:11:03,883] INFO: Step: 885721/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:11:17,266] INFO: Step: 885722/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:11:30,607] INFO: Step: 885723/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:11:43,773] INFO: Step: 885724/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:11:57,072] INFO: Step: 885725/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:12:10,209] INFO: Step: 885726/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:12:23,483] INFO: Step: 885727/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:12:36,605] INFO: Step: 885728/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:12:49,857] INFO: Step: 885729/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:13:03,061] INFO: Step: 885730/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:13:16,425] INFO: Step: 885731/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:13:29,901] INFO: Step: 885732/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:13:43,070] INFO: Step: 885733/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:13:56,332] INFO: Step: 885734/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:14:09,613] INFO: Step: 885735/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:14:22,991] INFO: Step: 885736/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:14:36,157] INFO: Step: 885737/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:14:49,386] INFO: Step: 885738/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:15:02,592] INFO: Step: 885739/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:15:15,842] INFO: Step: 885740/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:15:29,132] INFO: Step: 885741/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:15:42,401] INFO: Step: 885742/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:15:55,709] INFO: Step: 885743/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:16:08,986] INFO: Step: 885744/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:16:22,257] INFO: Step: 885745/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:16:35,549] INFO: Step: 885746/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:16:48,865] INFO: Step: 885747/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:17:02,145] INFO: Step: 885748/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:17:15,393] INFO: Step: 885749/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:17:28,616] INFO: Step: 885750/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:17:41,976] INFO: Step: 885751/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:17:55,284] INFO: Step: 885752/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:18:08,343] INFO: Step: 885753/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:18:21,602] INFO: Step: 885754/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:18:34,723] INFO: Step: 885755/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:18:47,855] INFO: Step: 885756/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:19:01,047] INFO: Step: 885757/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:19:14,223] INFO: Step: 885758/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:19:27,460] INFO: Step: 885759/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:19:40,794] INFO: Step: 885760/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:19:54,172] INFO: Step: 885761/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:20:07,284] INFO: Step: 885762/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:20:20,564] INFO: Step: 885763/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:20:33,954] INFO: Step: 885764/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:20:47,470] INFO: Step: 885765/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:21:00,694] INFO: Step: 885766/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:21:14,070] INFO: Step: 885767/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:21:27,210] INFO: Step: 885768/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:21:40,535] INFO: Step: 885769/1000000 Loss: 0.098 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:21:53,789] INFO: Step: 885770/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:22:07,058] INFO: Step: 885771/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:22:20,254] INFO: Step: 885772/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:22:33,602] INFO: Step: 885773/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:22:46,753] INFO: Step: 885774/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:23:00,044] INFO: Step: 885775/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:23:13,309] INFO: Step: 885776/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:23:26,436] INFO: Step: 885777/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:23:39,565] INFO: Step: 885778/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:23:52,751] INFO: Step: 885779/1000000 Loss: 0.098 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:24:06,085] INFO: Step: 885780/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:24:19,393] INFO: Step: 885781/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:24:32,581] INFO: Step: 885782/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:24:45,952] INFO: Step: 885783/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:24:59,286] INFO: Step: 885784/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:25:12,498] INFO: Step: 885785/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:25:25,770] INFO: Step: 885786/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:25:39,117] INFO: Step: 885787/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:25:52,450] INFO: Step: 885788/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:26:05,815] INFO: Step: 885789/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:26:19,143] INFO: Step: 885790/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:26:32,310] INFO: Step: 885791/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:26:45,597] INFO: Step: 885792/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:26:58,909] INFO: Step: 885793/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:27:12,184] INFO: Step: 885794/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:27:25,351] INFO: Step: 885795/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:27:38,711] INFO: Step: 885796/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:27:51,817] INFO: Step: 885797/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:28:05,013] INFO: Step: 885798/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:28:18,276] INFO: Step: 885799/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:28:31,559] INFO: Step: 885800/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:28:44,810] INFO: Step: 885801/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:28:58,032] INFO: Step: 885802/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:29:11,135] INFO: Step: 885803/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:29:24,468] INFO: Step: 885804/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:29:37,658] INFO: Step: 885805/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:29:50,897] INFO: Step: 885806/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:30:04,250] INFO: Step: 885807/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:30:17,508] INFO: Step: 885808/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:30:30,702] INFO: Step: 885809/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:30:43,884] INFO: Step: 885810/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:30:57,116] INFO: Step: 885811/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:31:10,378] INFO: Step: 885812/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:31:23,578] INFO: Step: 885813/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:31:36,748] INFO: Step: 885814/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:31:49,804] INFO: Step: 885815/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:32:02,983] INFO: Step: 885816/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:32:16,266] INFO: Step: 885817/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:32:29,372] INFO: Step: 885818/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:32:42,666] INFO: Step: 885819/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:32:56,054] INFO: Step: 885820/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:33:09,343] INFO: Step: 885821/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:33:22,721] INFO: Step: 885822/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:33:35,974] INFO: Step: 885823/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:33:49,198] INFO: Step: 885824/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:34:02,365] INFO: Step: 885825/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:34:15,592] INFO: Step: 885826/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:34:28,885] INFO: Step: 885827/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:34:42,130] INFO: Step: 885828/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:34:55,458] INFO: Step: 885829/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:35:08,643] INFO: Step: 885830/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:35:21,897] INFO: Step: 885831/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:35:35,056] INFO: Step: 885832/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:35:48,466] INFO: Step: 885833/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:36:01,716] INFO: Step: 885834/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:36:14,982] INFO: Step: 885835/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:36:28,181] INFO: Step: 885836/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:36:41,459] INFO: Step: 885837/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:36:54,667] INFO: Step: 885838/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:37:07,693] INFO: Step: 885839/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:37:20,909] INFO: Step: 885840/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:37:33,998] INFO: Step: 885841/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:37:47,166] INFO: Step: 885842/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:38:00,584] INFO: Step: 885843/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:38:14,029] INFO: Step: 885844/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:38:27,401] INFO: Step: 885845/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:38:40,599] INFO: Step: 885846/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:38:53,853] INFO: Step: 885847/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:39:07,159] INFO: Step: 885848/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:39:20,482] INFO: Step: 885849/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:39:33,538] INFO: Step: 885850/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:39:46,757] INFO: Step: 885851/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:40:00,164] INFO: Step: 885852/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:40:13,460] INFO: Step: 885853/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:40:26,771] INFO: Step: 885854/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:40:39,982] INFO: Step: 885855/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:40:53,161] INFO: Step: 885856/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:41:06,288] INFO: Step: 885857/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:41:19,523] INFO: Step: 885858/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:41:32,733] INFO: Step: 885859/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:41:45,849] INFO: Step: 885860/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:41:59,128] INFO: Step: 885861/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:42:12,470] INFO: Step: 885862/1000000 Loss: 0.106 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:42:25,785] INFO: Step: 885863/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:42:39,076] INFO: Step: 885864/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:42:52,226] INFO: Step: 885865/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:43:05,476] INFO: Step: 885866/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:43:18,633] INFO: Step: 885867/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:43:31,984] INFO: Step: 885868/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:43:44,925] INFO: Step: 885869/1000000 Loss: 0.096 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:43:58,151] INFO: Step: 885870/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:44:11,424] INFO: Step: 885871/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:44:24,583] INFO: Step: 885872/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:44:37,767] INFO: Step: 885873/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:44:50,937] INFO: Step: 885874/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:45:04,147] INFO: Step: 885875/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:45:17,414] INFO: Step: 885876/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:45:30,665] INFO: Step: 885877/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:45:44,033] INFO: Step: 885878/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:45:57,392] INFO: Step: 885879/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:46:10,831] INFO: Step: 885880/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:46:24,111] INFO: Step: 885881/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:46:37,514] INFO: Step: 885882/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:46:50,611] INFO: Step: 885883/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:47:03,921] INFO: Step: 885884/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:47:17,155] INFO: Step: 885885/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:47:30,452] INFO: Step: 885886/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:47:43,690] INFO: Step: 885887/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:47:56,833] INFO: Step: 885888/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:48:10,352] INFO: Step: 885889/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:48:23,663] INFO: Step: 885890/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:48:37,164] INFO: Step: 885891/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:48:50,404] INFO: Step: 885892/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:49:03,583] INFO: Step: 885893/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:49:16,890] INFO: Step: 885894/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:49:30,092] INFO: Step: 885895/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:49:43,330] INFO: Step: 885896/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:49:56,548] INFO: Step: 885897/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:50:09,814] INFO: Step: 885898/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:50:23,042] INFO: Step: 885899/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:50:36,357] INFO: Step: 885900/1000000 Loss: 0.096 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:50:49,688] INFO: Step: 885901/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:51:02,837] INFO: Step: 885902/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:51:16,027] INFO: Step: 885903/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:51:29,262] INFO: Step: 885904/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:51:42,499] INFO: Step: 885905/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:51:55,826] INFO: Step: 885906/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:52:09,007] INFO: Step: 885907/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:52:22,247] INFO: Step: 885908/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:52:35,604] INFO: Step: 885909/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:52:48,859] INFO: Step: 885910/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:53:02,183] INFO: Step: 885911/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:53:15,433] INFO: Step: 885912/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:53:28,752] INFO: Step: 885913/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:53:41,927] INFO: Step: 885914/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:53:55,070] INFO: Step: 885915/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:54:08,505] INFO: Step: 885916/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:54:21,824] INFO: Step: 885917/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:54:35,280] INFO: Step: 885918/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:54:48,546] INFO: Step: 885919/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:55:01,847] INFO: Step: 885920/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:55:15,062] INFO: Step: 885921/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:55:28,264] INFO: Step: 885922/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:55:41,569] INFO: Step: 885923/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:55:54,883] INFO: Step: 885924/1000000 Loss: 0.096 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:56:08,092] INFO: Step: 885925/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:56:21,312] INFO: Step: 885926/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:56:34,712] INFO: Step: 885927/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:56:47,918] INFO: Step: 885928/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:57:01,116] INFO: Step: 885929/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:57:14,358] INFO: Step: 885930/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:57:27,782] INFO: Step: 885931/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:57:40,897] INFO: Step: 885932/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:57:54,287] INFO: Step: 885933/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:58:07,488] INFO: Step: 885934/1000000 Loss: 0.108 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:58:20,611] INFO: Step: 885935/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:58:33,728] INFO: Step: 885936/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:58:46,938] INFO: Step: 885937/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:59:00,126] INFO: Step: 885938/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:59:13,285] INFO: Step: 885939/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:59:26,554] INFO: Step: 885940/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:59:39,868] INFO: Step: 885941/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 13:59:53,106] INFO: Step: 885942/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:00:06,402] INFO: Step: 885943/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:00:19,609] INFO: Step: 885944/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:00:32,936] INFO: Step: 885945/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:00:46,287] INFO: Step: 885946/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:00:59,489] INFO: Step: 885947/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:01:12,680] INFO: Step: 885948/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:01:25,751] INFO: Step: 885949/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:01:39,140] INFO: Step: 885950/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:01:52,385] INFO: Step: 885951/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:02:05,539] INFO: Step: 885952/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:02:18,841] INFO: Step: 885953/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:02:32,105] INFO: Step: 885954/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:02:45,322] INFO: Step: 885955/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:02:58,717] INFO: Step: 885956/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:03:11,917] INFO: Step: 885957/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:03:25,209] INFO: Step: 885958/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:03:38,599] INFO: Step: 885959/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:03:51,841] INFO: Step: 885960/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:04:05,040] INFO: Step: 885961/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:04:18,251] INFO: Step: 885962/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:04:31,542] INFO: Step: 885963/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:04:44,871] INFO: Step: 885964/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:04:58,083] INFO: Step: 885965/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:05:11,290] INFO: Step: 885966/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:05:24,456] INFO: Step: 885967/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:05:37,902] INFO: Step: 885968/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:05:51,145] INFO: Step: 885969/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:06:04,368] INFO: Step: 885970/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:06:17,630] INFO: Step: 885971/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:06:30,916] INFO: Step: 885972/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:06:43,963] INFO: Step: 885973/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:06:57,294] INFO: Step: 885974/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:07:10,360] INFO: Step: 885975/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:07:23,571] INFO: Step: 885976/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:07:36,732] INFO: Step: 885977/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:07:49,946] INFO: Step: 885978/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:08:03,175] INFO: Step: 885979/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:08:16,181] INFO: Step: 885980/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:08:29,357] INFO: Step: 885981/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:08:42,545] INFO: Step: 885982/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:08:55,739] INFO: Step: 885983/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:09:09,025] INFO: Step: 885984/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:09:22,268] INFO: Step: 885985/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:09:35,526] INFO: Step: 885986/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:09:48,889] INFO: Step: 885987/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:10:02,215] INFO: Step: 885988/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:10:15,522] INFO: Step: 885989/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:10:28,789] INFO: Step: 885990/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:10:42,031] INFO: Step: 885991/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:10:55,274] INFO: Step: 885992/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:11:08,419] INFO: Step: 885993/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:11:21,627] INFO: Step: 885994/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:11:34,853] INFO: Step: 885995/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:11:48,045] INFO: Step: 885996/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:12:01,522] INFO: Step: 885997/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:12:14,881] INFO: Step: 885998/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:12:28,164] INFO: Step: 885999/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:12:41,485] INFO: Step: 886000/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:12:41,486] INFO: Begin to Save model to workspace/i2v_motion/obj_train_motion/checkpoints/non_ema_00886000.pth +[2024-06-09 14:12:48,923] INFO: Save model to workspace/i2v_motion/obj_train_motion/checkpoints/non_ema_00886000.pth +[2024-06-09 14:13:01,039] INFO: Step: 886001/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:13:14,334] INFO: Step: 886002/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:13:27,615] INFO: Step: 886003/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:13:40,972] INFO: Step: 886004/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:13:54,099] INFO: Step: 886005/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:14:07,237] INFO: Step: 886006/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:14:20,633] INFO: Step: 886007/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:14:33,855] INFO: Step: 886008/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:14:47,077] INFO: Step: 886009/1000000 Loss: 0.067 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:15:00,341] INFO: Step: 886010/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:15:13,616] INFO: Step: 886011/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:15:26,800] INFO: Step: 886012/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:15:39,990] INFO: Step: 886013/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:15:53,289] INFO: Step: 886014/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:16:06,680] INFO: Step: 886015/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:16:19,826] INFO: Step: 886016/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:16:33,158] INFO: Step: 886017/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:16:46,341] INFO: Step: 886018/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:16:59,481] INFO: Step: 886019/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:17:12,868] INFO: Step: 886020/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:17:26,054] INFO: Step: 886021/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:17:39,241] INFO: Step: 886022/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:17:52,456] INFO: Step: 886023/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:18:05,669] INFO: Step: 886024/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:18:19,020] INFO: Step: 886025/1000000 Loss: 0.096 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:18:32,436] INFO: Step: 886026/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:18:45,713] INFO: Step: 886027/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:18:59,004] INFO: Step: 886028/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:19:12,386] INFO: Step: 886029/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:19:25,587] INFO: Step: 886030/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:19:38,969] INFO: Step: 886031/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:19:52,170] INFO: Step: 886032/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:20:05,452] INFO: Step: 886033/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:20:18,677] INFO: Step: 886034/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:20:32,185] INFO: Step: 886035/1000000 Loss: 0.096 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:20:45,489] INFO: Step: 886036/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:20:58,792] INFO: Step: 886037/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:21:12,010] INFO: Step: 886038/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:21:25,002] INFO: Step: 886039/1000000 Loss: 0.065 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:21:38,388] INFO: Step: 886040/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:21:51,685] INFO: Step: 886041/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:22:04,993] INFO: Step: 886042/1000000 Loss: 0.095 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:22:18,293] INFO: Step: 886043/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:22:31,570] INFO: Step: 886044/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:22:44,772] INFO: Step: 886045/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:22:57,947] INFO: Step: 886046/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:23:11,315] INFO: Step: 886047/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:23:24,507] INFO: Step: 886048/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:23:37,754] INFO: Step: 886049/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:23:50,991] INFO: Step: 886050/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:24:04,083] INFO: Step: 886051/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:24:17,306] INFO: Step: 886052/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:24:30,547] INFO: Step: 886053/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:24:43,947] INFO: Step: 886054/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:24:57,169] INFO: Step: 886055/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:25:10,414] INFO: Step: 886056/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:25:23,623] INFO: Step: 886057/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:25:36,931] INFO: Step: 886058/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:25:50,444] INFO: Step: 886059/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:26:03,598] INFO: Step: 886060/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:26:16,786] INFO: Step: 886061/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:26:30,099] INFO: Step: 886062/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:26:43,261] INFO: Step: 886063/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:26:56,554] INFO: Step: 886064/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:27:09,874] INFO: Step: 886065/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:27:23,103] INFO: Step: 886066/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:27:36,330] INFO: Step: 886067/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:27:49,664] INFO: Step: 886068/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:28:02,799] INFO: Step: 886069/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:28:16,230] INFO: Step: 886070/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:28:29,542] INFO: Step: 886071/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:28:42,692] INFO: Step: 886072/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:28:55,844] INFO: Step: 886073/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:29:09,047] INFO: Step: 886074/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:29:22,226] INFO: Step: 886075/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:29:35,490] INFO: Step: 886076/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:29:48,683] INFO: Step: 886077/1000000 Loss: 0.065 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:30:01,922] INFO: Step: 886078/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:30:15,164] INFO: Step: 886079/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:30:28,397] INFO: Step: 886080/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:30:41,795] INFO: Step: 886081/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:30:55,032] INFO: Step: 886082/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:31:08,523] INFO: Step: 886083/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:31:21,860] INFO: Step: 886084/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:31:35,249] INFO: Step: 886085/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:31:48,611] INFO: Step: 886086/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:32:01,826] INFO: Step: 886087/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:32:15,163] INFO: Step: 886088/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:32:28,387] INFO: Step: 886089/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:32:41,717] INFO: Step: 886090/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:32:55,029] INFO: Step: 886091/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:33:08,369] INFO: Step: 886092/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:33:21,695] INFO: Step: 886093/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:33:34,976] INFO: Step: 886094/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:33:48,130] INFO: Step: 886095/1000000 Loss: 0.065 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:34:01,428] INFO: Step: 886096/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:34:14,656] INFO: Step: 886097/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:34:27,950] INFO: Step: 886098/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:34:40,983] INFO: Step: 886099/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:34:54,351] INFO: Step: 886100/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:35:07,576] INFO: Step: 886101/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:35:20,856] INFO: Step: 886102/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:35:34,142] INFO: Step: 886103/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:35:47,697] INFO: Step: 886104/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:36:01,102] INFO: Step: 886105/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:36:14,488] INFO: Step: 886106/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:36:27,727] INFO: Step: 886107/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:36:40,898] INFO: Step: 886108/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:36:54,214] INFO: Step: 886109/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:37:07,453] INFO: Step: 886110/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:37:20,809] INFO: Step: 886111/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:37:34,114] INFO: Step: 886112/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:37:47,487] INFO: Step: 886113/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:38:00,686] INFO: Step: 886114/1000000 Loss: 0.068 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:38:14,063] INFO: Step: 886115/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:38:27,203] INFO: Step: 886116/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:38:40,584] INFO: Step: 886117/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:38:53,905] INFO: Step: 886118/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:39:07,267] INFO: Step: 886119/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:39:20,412] INFO: Step: 886120/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:39:33,549] INFO: Step: 886121/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:39:46,826] INFO: Step: 886122/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:40:00,396] INFO: Step: 886123/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:40:13,756] INFO: Step: 886124/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:40:27,082] INFO: Step: 886125/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:40:40,406] INFO: Step: 886126/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:40:53,577] INFO: Step: 886127/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:41:06,922] INFO: Step: 886128/1000000 Loss: 0.065 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:41:20,201] INFO: Step: 886129/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:41:33,529] INFO: Step: 886130/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:41:46,858] INFO: Step: 886131/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:42:00,070] INFO: Step: 886132/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:42:13,253] INFO: Step: 886133/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:42:26,493] INFO: Step: 886134/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:42:39,743] INFO: Step: 886135/1000000 Loss: 0.067 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:42:53,046] INFO: Step: 886136/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:43:06,293] INFO: Step: 886137/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:43:19,715] INFO: Step: 886138/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:43:32,969] INFO: Step: 886139/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:43:46,239] INFO: Step: 886140/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:43:59,469] INFO: Step: 886141/1000000 Loss: 0.068 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:44:12,809] INFO: Step: 886142/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:44:26,086] INFO: Step: 886143/1000000 Loss: 0.066 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:44:39,312] INFO: Step: 886144/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:44:52,623] INFO: Step: 886145/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:45:05,877] INFO: Step: 886146/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:45:19,117] INFO: Step: 886147/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:45:32,264] INFO: Step: 886148/1000000 Loss: 0.067 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:45:45,433] INFO: Step: 886149/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:45:58,782] INFO: Step: 886150/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:46:12,158] INFO: Step: 886151/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:46:25,341] INFO: Step: 886152/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:46:38,494] INFO: Step: 886153/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:46:51,731] INFO: Step: 886154/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:47:05,043] INFO: Step: 886155/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:47:18,304] INFO: Step: 886156/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:47:31,614] INFO: Step: 886157/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:47:45,022] INFO: Step: 886158/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:47:58,210] INFO: Step: 886159/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:48:11,582] INFO: Step: 886160/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:48:24,932] INFO: Step: 886161/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:48:38,231] INFO: Step: 886162/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:48:51,407] INFO: Step: 886163/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:49:04,742] INFO: Step: 886164/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:49:17,927] INFO: Step: 886165/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:49:31,225] INFO: Step: 886166/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:49:44,448] INFO: Step: 886167/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:49:57,658] INFO: Step: 886168/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:50:10,994] INFO: Step: 886169/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:50:24,404] INFO: Step: 886170/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:50:37,757] INFO: Step: 886171/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:50:51,167] INFO: Step: 886172/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:51:04,365] INFO: Step: 886173/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:51:17,666] INFO: Step: 886174/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:51:30,957] INFO: Step: 886175/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:51:44,155] INFO: Step: 886176/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:51:57,489] INFO: Step: 886177/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:52:10,628] INFO: Step: 886178/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:52:24,020] INFO: Step: 886179/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:52:37,554] INFO: Step: 886180/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:52:51,015] INFO: Step: 886181/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:53:04,397] INFO: Step: 886182/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:53:17,750] INFO: Step: 886183/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:53:31,136] INFO: Step: 886184/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:53:44,467] INFO: Step: 886185/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:53:57,748] INFO: Step: 886186/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:54:11,102] INFO: Step: 886187/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:54:24,251] INFO: Step: 886188/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:54:37,638] INFO: Step: 886189/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:54:50,937] INFO: Step: 886190/1000000 Loss: 0.067 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:55:04,096] INFO: Step: 886191/1000000 Loss: 0.099 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:55:17,278] INFO: Step: 886192/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:55:30,582] INFO: Step: 886193/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:55:43,774] INFO: Step: 886194/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:55:57,016] INFO: Step: 886195/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:56:10,378] INFO: Step: 886196/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:56:23,614] INFO: Step: 886197/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:56:36,860] INFO: Step: 886198/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:56:50,174] INFO: Step: 886199/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:57:03,423] INFO: Step: 886200/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:57:16,718] INFO: Step: 886201/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:57:30,147] INFO: Step: 886202/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:57:43,555] INFO: Step: 886203/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:57:56,761] INFO: Step: 886204/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:58:09,924] INFO: Step: 886205/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:58:23,270] INFO: Step: 886206/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:58:36,450] INFO: Step: 886207/1000000 Loss: 0.067 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:58:49,685] INFO: Step: 886208/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:59:03,060] INFO: Step: 886209/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:59:16,309] INFO: Step: 886210/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:59:29,684] INFO: Step: 886211/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:59:43,050] INFO: Step: 886212/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 14:59:56,423] INFO: Step: 886213/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:00:09,719] INFO: Step: 886214/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:00:22,858] INFO: Step: 886215/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:00:36,220] INFO: Step: 886216/1000000 Loss: 0.068 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:00:49,402] INFO: Step: 886217/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:01:02,712] INFO: Step: 886218/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:01:15,915] INFO: Step: 886219/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:01:29,205] INFO: Step: 886220/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:01:42,266] INFO: Step: 886221/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:01:55,605] INFO: Step: 886222/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:02:08,879] INFO: Step: 886223/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:02:22,135] INFO: Step: 886224/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:02:35,349] INFO: Step: 886225/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:02:48,664] INFO: Step: 886226/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:03:02,058] INFO: Step: 886227/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:03:15,300] INFO: Step: 886228/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:03:28,469] INFO: Step: 886229/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:03:41,802] INFO: Step: 886230/1000000 Loss: 0.067 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:03:55,073] INFO: Step: 886231/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:04:08,408] INFO: Step: 886232/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:04:21,730] INFO: Step: 886233/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:04:34,855] INFO: Step: 886234/1000000 Loss: 0.063 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:04:48,055] INFO: Step: 886235/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:05:01,431] INFO: Step: 886236/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:05:14,669] INFO: Step: 886237/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:05:28,039] INFO: Step: 886238/1000000 Loss: 0.096 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:05:41,308] INFO: Step: 886239/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:05:54,674] INFO: Step: 886240/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:06:08,002] INFO: Step: 886241/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:06:21,401] INFO: Step: 886242/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:06:34,695] INFO: Step: 886243/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:06:47,926] INFO: Step: 886244/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:07:01,048] INFO: Step: 886245/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:07:14,237] INFO: Step: 886246/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:07:27,550] INFO: Step: 886247/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:07:40,962] INFO: Step: 886248/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:07:54,071] INFO: Step: 886249/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:08:07,484] INFO: Step: 886250/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:08:20,821] INFO: Step: 886251/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:08:34,131] INFO: Step: 886252/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:08:47,408] INFO: Step: 886253/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:09:00,873] INFO: Step: 886254/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:09:14,027] INFO: Step: 886255/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:09:27,339] INFO: Step: 886256/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:09:40,625] INFO: Step: 886257/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:09:53,878] INFO: Step: 886258/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:10:07,116] INFO: Step: 886259/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:10:20,366] INFO: Step: 886260/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:10:33,652] INFO: Step: 886261/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:10:46,911] INFO: Step: 886262/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:11:00,128] INFO: Step: 886263/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:11:13,406] INFO: Step: 886264/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:11:26,689] INFO: Step: 886265/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:11:39,935] INFO: Step: 886266/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:11:53,276] INFO: Step: 886267/1000000 Loss: 0.068 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:12:06,552] INFO: Step: 886268/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:12:19,678] INFO: Step: 886269/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:12:32,835] INFO: Step: 886270/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:12:46,035] INFO: Step: 886271/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:12:59,413] INFO: Step: 886272/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:13:12,772] INFO: Step: 886273/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:13:26,027] INFO: Step: 886274/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:13:39,464] INFO: Step: 886275/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:13:52,628] INFO: Step: 886276/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:14:05,866] INFO: Step: 886277/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:14:19,142] INFO: Step: 886278/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:14:32,310] INFO: Step: 886279/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:14:45,491] INFO: Step: 886280/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:14:58,833] INFO: Step: 886281/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:15:12,041] INFO: Step: 886282/1000000 Loss: 0.064 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:15:25,331] INFO: Step: 886283/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:15:38,701] INFO: Step: 886284/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:15:52,042] INFO: Step: 886285/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:16:05,352] INFO: Step: 886286/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:16:18,590] INFO: Step: 886287/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:16:32,192] INFO: Step: 886288/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:16:45,493] INFO: Step: 886289/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:16:58,820] INFO: Step: 886290/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:17:12,024] INFO: Step: 886291/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:17:25,396] INFO: Step: 886292/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:17:38,636] INFO: Step: 886293/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:17:51,930] INFO: Step: 886294/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:18:05,203] INFO: Step: 886295/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:18:18,372] INFO: Step: 886296/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:18:31,484] INFO: Step: 886297/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:18:44,734] INFO: Step: 886298/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:18:57,899] INFO: Step: 886299/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:19:11,190] INFO: Step: 886300/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:19:24,420] INFO: Step: 886301/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:19:37,685] INFO: Step: 886302/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:19:51,055] INFO: Step: 886303/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:20:04,429] INFO: Step: 886304/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:20:17,681] INFO: Step: 886305/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:20:31,152] INFO: Step: 886306/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:20:44,494] INFO: Step: 886307/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:20:57,848] INFO: Step: 886308/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:21:11,063] INFO: Step: 886309/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:21:24,316] INFO: Step: 886310/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:21:37,506] INFO: Step: 886311/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:21:50,862] INFO: Step: 886312/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:22:04,259] INFO: Step: 886313/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:22:17,475] INFO: Step: 886314/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:22:30,772] INFO: Step: 886315/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:22:43,981] INFO: Step: 886316/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:22:57,294] INFO: Step: 886317/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:23:10,461] INFO: Step: 886318/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:23:23,577] INFO: Step: 886319/1000000 Loss: 0.096 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:23:36,818] INFO: Step: 886320/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:23:49,587] INFO: Step: 886321/1000000 Loss: 0.064 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:24:02,855] INFO: Step: 886322/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:24:16,284] INFO: Step: 886323/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:24:29,542] INFO: Step: 886324/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:24:42,741] INFO: Step: 886325/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:24:55,990] INFO: Step: 886326/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:25:09,022] INFO: Step: 886327/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:25:22,315] INFO: Step: 886328/1000000 Loss: 0.096 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:25:35,584] INFO: Step: 886329/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:25:48,921] INFO: Step: 886330/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:26:02,146] INFO: Step: 886331/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:26:15,336] INFO: Step: 886332/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:26:28,468] INFO: Step: 886333/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:26:41,634] INFO: Step: 886334/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:26:54,846] INFO: Step: 886335/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:27:08,285] INFO: Step: 886336/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:27:21,532] INFO: Step: 886337/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:27:34,677] INFO: Step: 886338/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:27:47,899] INFO: Step: 886339/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:28:01,156] INFO: Step: 886340/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:28:14,480] INFO: Step: 886341/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:28:27,629] INFO: Step: 886342/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:28:40,755] INFO: Step: 886343/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:28:54,129] INFO: Step: 886344/1000000 Loss: 0.068 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:29:07,394] INFO: Step: 886345/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:29:20,753] INFO: Step: 886346/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:29:34,237] INFO: Step: 886347/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:29:47,582] INFO: Step: 886348/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:30:00,953] INFO: Step: 886349/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:30:14,204] INFO: Step: 886350/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:30:27,448] INFO: Step: 886351/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:30:40,874] INFO: Step: 886352/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:30:54,036] INFO: Step: 886353/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:31:07,202] INFO: Step: 886354/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:31:20,319] INFO: Step: 886355/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:31:33,575] INFO: Step: 886356/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:31:46,866] INFO: Step: 886357/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:32:00,153] INFO: Step: 886358/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:32:13,448] INFO: Step: 886359/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:32:26,574] INFO: Step: 886360/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:32:39,917] INFO: Step: 886361/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:32:53,239] INFO: Step: 886362/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:33:06,521] INFO: Step: 886363/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:33:19,567] INFO: Step: 886364/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:33:32,875] INFO: Step: 886365/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:33:46,067] INFO: Step: 886366/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:33:59,371] INFO: Step: 886367/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:34:12,625] INFO: Step: 886368/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:34:25,841] INFO: Step: 886369/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:34:39,045] INFO: Step: 886370/1000000 Loss: 0.065 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:34:52,443] INFO: Step: 886371/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:35:05,824] INFO: Step: 886372/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:35:19,097] INFO: Step: 886373/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:35:32,249] INFO: Step: 886374/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:35:45,471] INFO: Step: 886375/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:35:58,647] INFO: Step: 886376/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:36:11,938] INFO: Step: 886377/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:36:25,186] INFO: Step: 886378/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:36:38,557] INFO: Step: 886379/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:36:51,807] INFO: Step: 886380/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:37:04,891] INFO: Step: 886381/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:37:18,058] INFO: Step: 886382/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:37:31,315] INFO: Step: 886383/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:37:44,584] INFO: Step: 886384/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:37:57,829] INFO: Step: 886385/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:38:11,103] INFO: Step: 886386/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:38:24,373] INFO: Step: 886387/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:38:37,677] INFO: Step: 886388/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:38:50,774] INFO: Step: 886389/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:39:04,154] INFO: Step: 886390/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:39:17,323] INFO: Step: 886391/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:39:30,512] INFO: Step: 886392/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:39:43,844] INFO: Step: 886393/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:39:57,154] INFO: Step: 886394/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:40:10,407] INFO: Step: 886395/1000000 Loss: 0.064 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:40:23,621] INFO: Step: 886396/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:40:36,947] INFO: Step: 886397/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:40:50,304] INFO: Step: 886398/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:41:03,487] INFO: Step: 886399/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:41:16,881] INFO: Step: 886400/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:41:30,042] INFO: Step: 886401/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:41:43,394] INFO: Step: 886402/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:41:56,802] INFO: Step: 886403/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:42:10,210] INFO: Step: 886404/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:42:23,419] INFO: Step: 886405/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:42:36,718] INFO: Step: 886406/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:42:49,941] INFO: Step: 886407/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:43:03,195] INFO: Step: 886408/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:43:16,587] INFO: Step: 886409/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:43:29,754] INFO: Step: 886410/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:43:43,223] INFO: Step: 886411/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:43:56,568] INFO: Step: 886412/1000000 Loss: 0.067 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:44:09,696] INFO: Step: 886413/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:44:23,311] INFO: Step: 886414/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:44:36,574] INFO: Step: 886415/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:44:49,829] INFO: Step: 886416/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:45:03,083] INFO: Step: 886417/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:45:16,333] INFO: Step: 886418/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:45:29,844] INFO: Step: 886419/1000000 Loss: 0.070 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:45:43,277] INFO: Step: 886420/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:45:56,693] INFO: Step: 886421/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:46:10,106] INFO: Step: 886422/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:46:23,289] INFO: Step: 886423/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:46:36,817] INFO: Step: 886424/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:46:50,076] INFO: Step: 886425/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:47:03,357] INFO: Step: 886426/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:47:16,608] INFO: Step: 886427/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:47:29,799] INFO: Step: 886428/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:47:42,992] INFO: Step: 886429/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:47:56,423] INFO: Step: 886430/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:48:09,636] INFO: Step: 886431/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:48:22,860] INFO: Step: 886432/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:48:36,281] INFO: Step: 886433/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:48:49,757] INFO: Step: 886434/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:49:03,065] INFO: Step: 886435/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:49:16,351] INFO: Step: 886436/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:49:29,530] INFO: Step: 886437/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:49:42,822] INFO: Step: 886438/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:49:56,077] INFO: Step: 886439/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:50:09,235] INFO: Step: 886440/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:50:22,605] INFO: Step: 886441/1000000 Loss: 0.084 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:50:36,057] INFO: Step: 886442/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:50:49,467] INFO: Step: 886443/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:51:02,723] INFO: Step: 886444/1000000 Loss: 0.094 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:51:15,934] INFO: Step: 886445/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:51:29,378] INFO: Step: 886446/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:51:42,617] INFO: Step: 886447/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:51:55,699] INFO: Step: 886448/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:52:09,033] INFO: Step: 886449/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:52:22,219] INFO: Step: 886450/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:52:35,553] INFO: Step: 886451/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:52:48,922] INFO: Step: 886452/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:53:02,219] INFO: Step: 886453/1000000 Loss: 0.091 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:53:15,402] INFO: Step: 886454/1000000 Loss: 0.078 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:53:28,725] INFO: Step: 886455/1000000 Loss: 0.071 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:53:41,860] INFO: Step: 886456/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:53:55,204] INFO: Step: 886457/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:54:08,466] INFO: Step: 886458/1000000 Loss: 0.088 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:54:21,752] INFO: Step: 886459/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:54:34,855] INFO: Step: 886460/1000000 Loss: 0.086 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:54:48,179] INFO: Step: 886461/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:55:01,523] INFO: Step: 886462/1000000 Loss: 0.076 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:55:14,632] INFO: Step: 886463/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:55:27,835] INFO: Step: 886464/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:55:41,144] INFO: Step: 886465/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:55:54,264] INFO: Step: 886466/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:56:07,524] INFO: Step: 886467/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:56:20,879] INFO: Step: 886468/1000000 Loss: 0.069 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:56:34,168] INFO: Step: 886469/1000000 Loss: 0.073 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:56:47,491] INFO: Step: 886470/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:57:00,657] INFO: Step: 886471/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:57:13,793] INFO: Step: 886472/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:57:26,934] INFO: Step: 886473/1000000 Loss: 0.068 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:57:39,955] INFO: Step: 886474/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:57:53,153] INFO: Step: 886475/1000000 Loss: 0.089 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:58:06,318] INFO: Step: 886476/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:58:19,578] INFO: Step: 886477/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:58:32,506] INFO: Step: 886478/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:58:45,743] INFO: Step: 886479/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:58:59,011] INFO: Step: 886480/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:59:12,389] INFO: Step: 886481/1000000 Loss: 0.075 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:59:25,636] INFO: Step: 886482/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:59:38,948] INFO: Step: 886483/1000000 Loss: 0.077 scale: 65536.0 LR: 0.0000300 +[2024-06-09 15:59:52,301] INFO: Step: 886484/1000000 Loss: 0.079 scale: 65536.0 LR: 0.0000300 +[2024-06-09 16:00:05,649] INFO: Step: 886485/1000000 Loss: 0.090 scale: 65536.0 LR: 0.0000300 +[2024-06-09 16:00:19,027] INFO: Step: 886486/1000000 Loss: 0.074 scale: 65536.0 LR: 0.0000300 +[2024-06-09 16:00:32,283] INFO: Step: 886487/1000000 Loss: 0.093 scale: 65536.0 LR: 0.0000300 +[2024-06-09 16:00:45,446] INFO: Step: 886488/1000000 Loss: 0.087 scale: 65536.0 LR: 0.0000300 +[2024-06-09 16:00:58,935] INFO: Step: 886489/1000000 Loss: 0.082 scale: 65536.0 LR: 0.0000300 +[2024-06-09 16:01:12,104] INFO: Step: 886490/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 16:01:25,350] INFO: Step: 886491/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 16:01:38,638] INFO: Step: 886492/1000000 Loss: 0.080 scale: 65536.0 LR: 0.0000300 +[2024-06-09 16:01:51,937] INFO: Step: 886493/1000000 Loss: 0.083 scale: 65536.0 LR: 0.0000300 +[2024-06-09 16:02:05,417] INFO: Step: 886494/1000000 Loss: 0.081 scale: 65536.0 LR: 0.0000300 +[2024-06-09 16:02:18,635] INFO: Step: 886495/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 16:02:32,090] INFO: Step: 886496/1000000 Loss: 0.092 scale: 65536.0 LR: 0.0000300 +[2024-06-09 16:02:45,347] INFO: Step: 886497/1000000 Loss: 0.085 scale: 65536.0 LR: 0.0000300 +[2024-06-09 16:02:58,285] INFO: Step: 886498/1000000 Loss: 0.072 scale: 65536.0 LR: 0.0000300 +[2024-06-09 16:03:11,629] INFO: Step: 886499/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:03:25,008] INFO: Step: 886500/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:03:25,009] INFO: Begin to Save model to workspace/i2v_motion/obj_train_motion/checkpoints/non_ema_00886500.pth +[2024-06-09 16:03:32,140] INFO: Save model to workspace/i2v_motion/obj_train_motion/checkpoints/non_ema_00886500.pth +[2024-06-09 16:03:44,147] INFO: Step: 886501/1000000 Loss: 0.095 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:03:57,355] INFO: Step: 886502/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:04:10,593] INFO: Step: 886503/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:04:23,897] INFO: Step: 886504/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:04:37,290] INFO: Step: 886505/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:04:50,636] INFO: Step: 886506/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:05:03,651] INFO: Step: 886507/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:05:16,930] INFO: Step: 886508/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:05:30,129] INFO: Step: 886509/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:05:43,629] INFO: Step: 886510/1000000 Loss: 0.090 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:05:56,875] INFO: Step: 886511/1000000 Loss: 0.065 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:06:10,228] INFO: Step: 886512/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:06:23,695] INFO: Step: 886513/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:06:36,999] INFO: Step: 886514/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:06:50,505] INFO: Step: 886515/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:07:03,852] INFO: Step: 886516/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:07:17,160] INFO: Step: 886517/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:07:30,512] INFO: Step: 886518/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:07:43,785] INFO: Step: 886519/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:07:57,107] INFO: Step: 886520/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:08:10,329] INFO: Step: 886521/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:08:23,719] INFO: Step: 886522/1000000 Loss: 0.099 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:08:36,986] INFO: Step: 886523/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:08:50,243] INFO: Step: 886524/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:09:03,576] INFO: Step: 886525/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:09:16,875] INFO: Step: 886526/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:09:30,237] INFO: Step: 886527/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:09:43,461] INFO: Step: 886528/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:09:56,882] INFO: Step: 886529/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:10:10,360] INFO: Step: 886530/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:10:23,741] INFO: Step: 886531/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:10:36,862] INFO: Step: 886532/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:10:50,129] INFO: Step: 886533/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:11:03,439] INFO: Step: 886534/1000000 Loss: 0.090 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:11:16,659] INFO: Step: 886535/1000000 Loss: 0.068 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:11:29,944] INFO: Step: 886536/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:11:43,311] INFO: Step: 886537/1000000 Loss: 0.069 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:11:56,571] INFO: Step: 886538/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:12:09,817] INFO: Step: 886539/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:12:23,174] INFO: Step: 886540/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:12:36,369] INFO: Step: 886541/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:12:49,590] INFO: Step: 886542/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:13:02,913] INFO: Step: 886543/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:13:16,187] INFO: Step: 886544/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:13:29,322] INFO: Step: 886545/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:13:42,711] INFO: Step: 886546/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:13:56,053] INFO: Step: 886547/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:14:09,388] INFO: Step: 886548/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:14:22,653] INFO: Step: 886549/1000000 Loss: 0.092 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:14:35,991] INFO: Step: 886550/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:14:49,176] INFO: Step: 886551/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:15:02,455] INFO: Step: 886552/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:15:15,553] INFO: Step: 886553/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:15:28,714] INFO: Step: 886554/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:15:41,967] INFO: Step: 886555/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:15:55,279] INFO: Step: 886556/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:16:08,511] INFO: Step: 886557/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:16:21,930] INFO: Step: 886558/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:16:35,377] INFO: Step: 886559/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:16:48,609] INFO: Step: 886560/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:17:01,845] INFO: Step: 886561/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:17:15,235] INFO: Step: 886562/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:17:28,594] INFO: Step: 886563/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:17:41,833] INFO: Step: 886564/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:17:55,113] INFO: Step: 886565/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:18:08,334] INFO: Step: 886566/1000000 Loss: 0.096 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:18:21,571] INFO: Step: 886567/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:18:34,800] INFO: Step: 886568/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:18:48,285] INFO: Step: 886569/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:19:01,540] INFO: Step: 886570/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:19:14,736] INFO: Step: 886571/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:19:27,986] INFO: Step: 886572/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:19:41,258] INFO: Step: 886573/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:19:54,668] INFO: Step: 886574/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:20:08,148] INFO: Step: 886575/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:20:21,839] INFO: Step: 886576/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:20:35,073] INFO: Step: 886577/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:20:48,388] INFO: Step: 886578/1000000 Loss: 0.094 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:21:01,616] INFO: Step: 886579/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:21:14,781] INFO: Step: 886580/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:21:28,044] INFO: Step: 886581/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:21:41,273] INFO: Step: 886582/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:21:54,456] INFO: Step: 886583/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:22:07,799] INFO: Step: 886584/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:22:21,407] INFO: Step: 886585/1000000 Loss: 0.090 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:22:34,946] INFO: Step: 886586/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:22:48,378] INFO: Step: 886587/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:23:01,656] INFO: Step: 886588/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:23:14,935] INFO: Step: 886589/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:23:28,297] INFO: Step: 886590/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:23:41,535] INFO: Step: 886591/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:23:54,941] INFO: Step: 886592/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:24:08,250] INFO: Step: 886593/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:24:21,582] INFO: Step: 886594/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:24:34,867] INFO: Step: 886595/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:24:48,193] INFO: Step: 886596/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:25:01,554] INFO: Step: 886597/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:25:14,749] INFO: Step: 886598/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:25:28,086] INFO: Step: 886599/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:25:41,395] INFO: Step: 886600/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:25:54,806] INFO: Step: 886601/1000000 Loss: 0.066 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:26:08,214] INFO: Step: 886602/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:26:21,440] INFO: Step: 886603/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:26:34,798] INFO: Step: 886604/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:26:48,123] INFO: Step: 886605/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:27:01,424] INFO: Step: 886606/1000000 Loss: 0.096 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:27:14,838] INFO: Step: 886607/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:27:28,087] INFO: Step: 886608/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:27:41,280] INFO: Step: 886609/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:27:54,625] INFO: Step: 886610/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:28:08,060] INFO: Step: 886611/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:28:21,336] INFO: Step: 886612/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:28:34,651] INFO: Step: 886613/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:28:47,815] INFO: Step: 886614/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:29:01,018] INFO: Step: 886615/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:29:14,259] INFO: Step: 886616/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:29:27,570] INFO: Step: 886617/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:29:40,878] INFO: Step: 886618/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:29:54,101] INFO: Step: 886619/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:30:07,470] INFO: Step: 886620/1000000 Loss: 0.070 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:30:20,789] INFO: Step: 886621/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:30:34,027] INFO: Step: 886622/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:30:47,405] INFO: Step: 886623/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:31:00,681] INFO: Step: 886624/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:31:14,153] INFO: Step: 886625/1000000 Loss: 0.070 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:31:27,437] INFO: Step: 886626/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:31:40,619] INFO: Step: 886627/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:31:53,727] INFO: Step: 886628/1000000 Loss: 0.090 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:32:06,938] INFO: Step: 886629/1000000 Loss: 0.093 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:32:20,228] INFO: Step: 886630/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:32:33,435] INFO: Step: 886631/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:32:46,801] INFO: Step: 886632/1000000 Loss: 0.103 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:33:00,198] INFO: Step: 886633/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:33:13,392] INFO: Step: 886634/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:33:26,781] INFO: Step: 886635/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:33:39,957] INFO: Step: 886636/1000000 Loss: 0.070 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:33:53,114] INFO: Step: 886637/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:34:06,263] INFO: Step: 886638/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:34:19,520] INFO: Step: 886639/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:34:32,717] INFO: Step: 886640/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:34:45,917] INFO: Step: 886641/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:34:59,385] INFO: Step: 886642/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:35:12,556] INFO: Step: 886643/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:35:25,779] INFO: Step: 886644/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:35:39,132] INFO: Step: 886645/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:35:52,483] INFO: Step: 886646/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:36:05,704] INFO: Step: 886647/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:36:19,042] INFO: Step: 886648/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:36:32,464] INFO: Step: 886649/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:36:45,748] INFO: Step: 886650/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:36:58,945] INFO: Step: 886651/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:37:12,180] INFO: Step: 886652/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:37:25,389] INFO: Step: 886653/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:37:38,561] INFO: Step: 886654/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:37:51,890] INFO: Step: 886655/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:38:05,285] INFO: Step: 886656/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:38:18,556] INFO: Step: 886657/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:38:31,789] INFO: Step: 886658/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:38:45,251] INFO: Step: 886659/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:38:58,609] INFO: Step: 886660/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:39:11,885] INFO: Step: 886661/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:39:25,225] INFO: Step: 886662/1000000 Loss: 0.069 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:39:38,636] INFO: Step: 886663/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:39:51,874] INFO: Step: 886664/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:40:05,127] INFO: Step: 886665/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:40:18,327] INFO: Step: 886666/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:40:31,604] INFO: Step: 886667/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:40:45,030] INFO: Step: 886668/1000000 Loss: 0.070 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:40:58,312] INFO: Step: 886669/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:41:11,577] INFO: Step: 886670/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:41:24,922] INFO: Step: 886671/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:41:38,101] INFO: Step: 886672/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:41:51,424] INFO: Step: 886673/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:42:04,801] INFO: Step: 886674/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:42:18,142] INFO: Step: 886675/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:42:31,202] INFO: Step: 886676/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:42:44,310] INFO: Step: 886677/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:42:57,606] INFO: Step: 886678/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:43:10,983] INFO: Step: 886679/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:43:24,286] INFO: Step: 886680/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:43:37,609] INFO: Step: 886681/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:43:50,995] INFO: Step: 886682/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:44:04,243] INFO: Step: 886683/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:44:17,500] INFO: Step: 886684/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:44:31,001] INFO: Step: 886685/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:44:44,258] INFO: Step: 886686/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:44:57,712] INFO: Step: 886687/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:45:11,042] INFO: Step: 886688/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:45:24,337] INFO: Step: 886689/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:45:37,525] INFO: Step: 886690/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:45:50,913] INFO: Step: 886691/1000000 Loss: 0.069 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:46:03,934] INFO: Step: 886692/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:46:17,223] INFO: Step: 886693/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:46:30,602] INFO: Step: 886694/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:46:43,903] INFO: Step: 886695/1000000 Loss: 0.090 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:46:57,084] INFO: Step: 886696/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:47:10,516] INFO: Step: 886697/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:47:23,670] INFO: Step: 886698/1000000 Loss: 0.093 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:47:36,870] INFO: Step: 886699/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:47:50,150] INFO: Step: 886700/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:48:03,406] INFO: Step: 886701/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:48:16,651] INFO: Step: 886702/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:48:29,890] INFO: Step: 886703/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:48:43,109] INFO: Step: 886704/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:48:56,331] INFO: Step: 886705/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:49:09,660] INFO: Step: 886706/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:49:23,232] INFO: Step: 886707/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:49:36,260] INFO: Step: 886708/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:49:49,393] INFO: Step: 886709/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:50:02,694] INFO: Step: 886710/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:50:16,011] INFO: Step: 886711/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:50:29,214] INFO: Step: 886712/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:50:42,433] INFO: Step: 886713/1000000 Loss: 0.069 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:50:55,707] INFO: Step: 886714/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:51:08,785] INFO: Step: 886715/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:51:22,158] INFO: Step: 886716/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:51:35,474] INFO: Step: 886717/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:51:48,746] INFO: Step: 886718/1000000 Loss: 0.095 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:52:01,919] INFO: Step: 886719/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:52:15,347] INFO: Step: 886720/1000000 Loss: 0.091 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:52:28,729] INFO: Step: 886721/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:52:42,017] INFO: Step: 886722/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:52:55,150] INFO: Step: 886723/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:53:08,475] INFO: Step: 886724/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:53:21,866] INFO: Step: 886725/1000000 Loss: 0.095 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:53:35,275] INFO: Step: 886726/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:53:48,473] INFO: Step: 886727/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:54:01,923] INFO: Step: 886728/1000000 Loss: 0.093 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:54:15,131] INFO: Step: 886729/1000000 Loss: 0.068 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:54:28,498] INFO: Step: 886730/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:54:41,790] INFO: Step: 886731/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:54:55,210] INFO: Step: 886732/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:55:08,477] INFO: Step: 886733/1000000 Loss: 0.091 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:55:21,643] INFO: Step: 886734/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:55:34,912] INFO: Step: 886735/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:55:48,456] INFO: Step: 886736/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:56:01,861] INFO: Step: 886737/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:56:15,379] INFO: Step: 886738/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:56:28,853] INFO: Step: 886739/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:56:42,198] INFO: Step: 886740/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:56:55,547] INFO: Step: 886741/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:57:08,778] INFO: Step: 886742/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:57:22,050] INFO: Step: 886743/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:57:35,346] INFO: Step: 886744/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:57:48,744] INFO: Step: 886745/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:58:02,156] INFO: Step: 886746/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:58:15,366] INFO: Step: 886747/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:58:28,722] INFO: Step: 886748/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:58:42,078] INFO: Step: 886749/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:58:55,038] INFO: Step: 886750/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:59:08,315] INFO: Step: 886751/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:59:21,735] INFO: Step: 886752/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:59:35,027] INFO: Step: 886753/1000000 Loss: 0.069 scale: 131072.0 LR: 0.0000300 +[2024-06-09 16:59:48,344] INFO: Step: 886754/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:00:01,734] INFO: Step: 886755/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:00:15,008] INFO: Step: 886756/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:00:28,380] INFO: Step: 886757/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:00:41,580] INFO: Step: 886758/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:00:54,971] INFO: Step: 886759/1000000 Loss: 0.090 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:01:08,244] INFO: Step: 886760/1000000 Loss: 0.069 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:01:21,763] INFO: Step: 886761/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:01:35,061] INFO: Step: 886762/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:01:48,362] INFO: Step: 886763/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:02:01,654] INFO: Step: 886764/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:02:14,951] INFO: Step: 886765/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:02:28,299] INFO: Step: 886766/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:02:41,607] INFO: Step: 886767/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:02:54,914] INFO: Step: 886768/1000000 Loss: 0.096 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:03:08,160] INFO: Step: 886769/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:03:21,299] INFO: Step: 886770/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:03:34,581] INFO: Step: 886771/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:03:47,912] INFO: Step: 886772/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:04:01,427] INFO: Step: 886773/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:04:14,782] INFO: Step: 886774/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:04:28,120] INFO: Step: 886775/1000000 Loss: 0.069 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:04:41,582] INFO: Step: 886776/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:04:54,910] INFO: Step: 886777/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:05:08,386] INFO: Step: 886778/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:05:21,953] INFO: Step: 886779/1000000 Loss: 0.070 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:05:35,157] INFO: Step: 886780/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:05:48,331] INFO: Step: 886781/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:06:01,498] INFO: Step: 886782/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:06:14,716] INFO: Step: 886783/1000000 Loss: 0.092 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:06:27,979] INFO: Step: 886784/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:06:41,288] INFO: Step: 886785/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:06:54,388] INFO: Step: 886786/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:07:07,456] INFO: Step: 886787/1000000 Loss: 0.069 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:07:20,650] INFO: Step: 886788/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:07:33,969] INFO: Step: 886789/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:07:47,250] INFO: Step: 886790/1000000 Loss: 0.070 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:08:00,517] INFO: Step: 886791/1000000 Loss: 0.069 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:08:13,781] INFO: Step: 886792/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:08:27,041] INFO: Step: 886793/1000000 Loss: 0.091 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:08:40,317] INFO: Step: 886794/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:08:53,560] INFO: Step: 886795/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:09:07,179] INFO: Step: 886796/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:09:20,638] INFO: Step: 886797/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:09:34,291] INFO: Step: 886798/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:09:47,794] INFO: Step: 886799/1000000 Loss: 0.091 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:10:01,390] INFO: Step: 886800/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:10:15,092] INFO: Step: 886801/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:10:28,643] INFO: Step: 886802/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:10:42,511] INFO: Step: 886803/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:10:55,822] INFO: Step: 886804/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:11:09,100] INFO: Step: 886805/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:11:22,374] INFO: Step: 886806/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:11:35,591] INFO: Step: 886807/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:11:48,910] INFO: Step: 886808/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:12:02,370] INFO: Step: 886809/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:12:15,563] INFO: Step: 886810/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:12:28,803] INFO: Step: 886811/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:12:42,347] INFO: Step: 886812/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:12:55,619] INFO: Step: 886813/1000000 Loss: 0.069 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:13:08,929] INFO: Step: 886814/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:13:22,119] INFO: Step: 886815/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:13:35,335] INFO: Step: 886816/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:13:48,659] INFO: Step: 886817/1000000 Loss: 0.090 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:14:01,801] INFO: Step: 886818/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:14:14,989] INFO: Step: 886819/1000000 Loss: 0.067 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:14:28,313] INFO: Step: 886820/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:14:41,642] INFO: Step: 886821/1000000 Loss: 0.096 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:14:55,167] INFO: Step: 886822/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:15:08,553] INFO: Step: 886823/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:15:21,891] INFO: Step: 886824/1000000 Loss: 0.068 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:15:35,207] INFO: Step: 886825/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:15:48,343] INFO: Step: 886826/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:16:01,577] INFO: Step: 886827/1000000 Loss: 0.099 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:16:15,059] INFO: Step: 886828/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:16:28,406] INFO: Step: 886829/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:16:41,655] INFO: Step: 886830/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:16:54,947] INFO: Step: 886831/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:17:08,254] INFO: Step: 886832/1000000 Loss: 0.070 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:17:21,494] INFO: Step: 886833/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:17:34,795] INFO: Step: 886834/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:17:48,103] INFO: Step: 886835/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:18:01,370] INFO: Step: 886836/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:18:14,853] INFO: Step: 886837/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:18:28,130] INFO: Step: 886838/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:18:41,377] INFO: Step: 886839/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:18:54,858] INFO: Step: 886840/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:19:08,113] INFO: Step: 886841/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:19:21,391] INFO: Step: 886842/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:19:34,590] INFO: Step: 886843/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:19:47,986] INFO: Step: 886844/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:20:01,088] INFO: Step: 886845/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:20:14,414] INFO: Step: 886846/1000000 Loss: 0.070 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:20:27,798] INFO: Step: 886847/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:20:41,115] INFO: Step: 886848/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:20:54,404] INFO: Step: 886849/1000000 Loss: 0.092 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:21:07,835] INFO: Step: 886850/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:21:21,359] INFO: Step: 886851/1000000 Loss: 0.070 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:21:34,683] INFO: Step: 886852/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:21:48,010] INFO: Step: 886853/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:22:01,219] INFO: Step: 886854/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:22:14,366] INFO: Step: 886855/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:22:27,602] INFO: Step: 886856/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:22:40,946] INFO: Step: 886857/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:22:54,342] INFO: Step: 886858/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:23:07,650] INFO: Step: 886859/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:23:21,115] INFO: Step: 886860/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:23:34,448] INFO: Step: 886861/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:23:47,702] INFO: Step: 886862/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:24:00,854] INFO: Step: 886863/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:24:14,061] INFO: Step: 886864/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:24:27,354] INFO: Step: 886865/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:24:40,686] INFO: Step: 886866/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:24:54,023] INFO: Step: 886867/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:25:07,338] INFO: Step: 886868/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:25:20,486] INFO: Step: 886869/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:25:33,810] INFO: Step: 886870/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:25:47,053] INFO: Step: 886871/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:26:00,242] INFO: Step: 886872/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:26:13,417] INFO: Step: 886873/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:26:26,616] INFO: Step: 886874/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:26:40,013] INFO: Step: 886875/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:26:53,374] INFO: Step: 886876/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:27:06,620] INFO: Step: 886877/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:27:19,830] INFO: Step: 886878/1000000 Loss: 0.065 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:27:33,240] INFO: Step: 886879/1000000 Loss: 0.093 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:27:46,446] INFO: Step: 886880/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:27:59,713] INFO: Step: 886881/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:28:13,139] INFO: Step: 886882/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:28:26,342] INFO: Step: 886883/1000000 Loss: 0.066 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:28:39,632] INFO: Step: 886884/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:28:53,085] INFO: Step: 886885/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:29:06,385] INFO: Step: 886886/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:29:19,557] INFO: Step: 886887/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:29:32,873] INFO: Step: 886888/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:29:46,232] INFO: Step: 886889/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:29:59,601] INFO: Step: 886890/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:30:12,749] INFO: Step: 886891/1000000 Loss: 0.069 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:30:26,105] INFO: Step: 886892/1000000 Loss: 0.069 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:30:39,384] INFO: Step: 886893/1000000 Loss: 0.095 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:30:52,651] INFO: Step: 886894/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:31:05,906] INFO: Step: 886895/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:31:19,123] INFO: Step: 886896/1000000 Loss: 0.092 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:31:32,384] INFO: Step: 886897/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:31:45,889] INFO: Step: 886898/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:31:59,427] INFO: Step: 886899/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:32:12,590] INFO: Step: 886900/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:32:25,905] INFO: Step: 886901/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:32:39,314] INFO: Step: 886902/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:32:52,526] INFO: Step: 886903/1000000 Loss: 0.091 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:33:05,759] INFO: Step: 886904/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:33:19,112] INFO: Step: 886905/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:33:32,464] INFO: Step: 886906/1000000 Loss: 0.094 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:33:45,608] INFO: Step: 886907/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:33:58,951] INFO: Step: 886908/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:34:12,356] INFO: Step: 886909/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:34:25,493] INFO: Step: 886910/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:34:38,643] INFO: Step: 886911/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:34:51,566] INFO: Step: 886912/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:35:04,928] INFO: Step: 886913/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:35:18,477] INFO: Step: 886914/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:35:31,860] INFO: Step: 886915/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:35:45,137] INFO: Step: 886916/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:35:58,378] INFO: Step: 886917/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:36:11,445] INFO: Step: 886918/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:36:24,855] INFO: Step: 886919/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:36:38,111] INFO: Step: 886920/1000000 Loss: 0.070 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:36:51,593] INFO: Step: 886921/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:37:04,964] INFO: Step: 886922/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:37:18,326] INFO: Step: 886923/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:37:31,460] INFO: Step: 886924/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:37:44,850] INFO: Step: 886925/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:37:57,946] INFO: Step: 886926/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:38:11,310] INFO: Step: 886927/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:38:24,536] INFO: Step: 886928/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:38:37,872] INFO: Step: 886929/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:38:51,026] INFO: Step: 886930/1000000 Loss: 0.069 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:39:04,272] INFO: Step: 886931/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:39:17,625] INFO: Step: 886932/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:39:31,057] INFO: Step: 886933/1000000 Loss: 0.069 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:39:44,301] INFO: Step: 886934/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:39:57,678] INFO: Step: 886935/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:40:11,030] INFO: Step: 886936/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:40:24,437] INFO: Step: 886937/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:40:37,747] INFO: Step: 886938/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:40:51,013] INFO: Step: 886939/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:41:04,098] INFO: Step: 886940/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:41:17,426] INFO: Step: 886941/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:41:30,608] INFO: Step: 886942/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:41:43,858] INFO: Step: 886943/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:41:57,051] INFO: Step: 886944/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:42:10,344] INFO: Step: 886945/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:42:23,487] INFO: Step: 886946/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:42:36,803] INFO: Step: 886947/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:42:50,295] INFO: Step: 886948/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:43:03,564] INFO: Step: 886949/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:43:16,714] INFO: Step: 886950/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:43:29,835] INFO: Step: 886951/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:43:42,955] INFO: Step: 886952/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:43:56,251] INFO: Step: 886953/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:44:09,525] INFO: Step: 886954/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:44:22,910] INFO: Step: 886955/1000000 Loss: 0.092 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:44:36,152] INFO: Step: 886956/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:44:49,453] INFO: Step: 886957/1000000 Loss: 0.097 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:45:02,807] INFO: Step: 886958/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:45:16,079] INFO: Step: 886959/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:45:29,240] INFO: Step: 886960/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:45:42,563] INFO: Step: 886961/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:45:55,948] INFO: Step: 886962/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:46:09,020] INFO: Step: 886963/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:46:22,217] INFO: Step: 886964/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:46:35,540] INFO: Step: 886965/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:46:48,831] INFO: Step: 886966/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:47:02,089] INFO: Step: 886967/1000000 Loss: 0.068 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:47:15,379] INFO: Step: 886968/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:47:28,661] INFO: Step: 886969/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:47:41,901] INFO: Step: 886970/1000000 Loss: 0.098 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:47:55,180] INFO: Step: 886971/1000000 Loss: 0.091 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:48:08,543] INFO: Step: 886972/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:48:21,752] INFO: Step: 886973/1000000 Loss: 0.090 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:48:35,015] INFO: Step: 886974/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:48:48,471] INFO: Step: 886975/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:49:01,676] INFO: Step: 886976/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:49:14,879] INFO: Step: 886977/1000000 Loss: 0.094 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:49:28,274] INFO: Step: 886978/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:49:41,432] INFO: Step: 886979/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:49:54,767] INFO: Step: 886980/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:50:08,166] INFO: Step: 886981/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:50:21,425] INFO: Step: 886982/1000000 Loss: 0.103 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:50:34,856] INFO: Step: 886983/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:50:48,181] INFO: Step: 886984/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:51:01,372] INFO: Step: 886985/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:51:14,606] INFO: Step: 886986/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:51:27,852] INFO: Step: 886987/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:51:41,286] INFO: Step: 886988/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:51:54,660] INFO: Step: 886989/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:52:08,014] INFO: Step: 886990/1000000 Loss: 0.093 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:52:21,385] INFO: Step: 886991/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:52:34,694] INFO: Step: 886992/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:52:48,132] INFO: Step: 886993/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:53:01,453] INFO: Step: 886994/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:53:14,880] INFO: Step: 886995/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:53:28,287] INFO: Step: 886996/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:53:41,698] INFO: Step: 886997/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:53:54,856] INFO: Step: 886998/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:54:08,080] INFO: Step: 886999/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:54:21,443] INFO: Step: 887000/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:54:21,444] INFO: Begin to Save model to workspace/i2v_motion/obj_train_motion/checkpoints/non_ema_00887000.pth +[2024-06-09 17:54:27,752] INFO: Save model to workspace/i2v_motion/obj_train_motion/checkpoints/non_ema_00887000.pth +[2024-06-09 17:54:39,959] INFO: Step: 887001/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:54:53,110] INFO: Step: 887002/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:55:06,202] INFO: Step: 887003/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:55:19,311] INFO: Step: 887004/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:55:32,717] INFO: Step: 887005/1000000 Loss: 0.095 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:55:45,886] INFO: Step: 887006/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:55:58,925] INFO: Step: 887007/1000000 Loss: 0.063 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:56:12,171] INFO: Step: 887008/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:56:25,425] INFO: Step: 887009/1000000 Loss: 0.069 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:56:38,689] INFO: Step: 887010/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:56:52,093] INFO: Step: 887011/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:57:05,518] INFO: Step: 887012/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:57:18,993] INFO: Step: 887013/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:57:32,365] INFO: Step: 887014/1000000 Loss: 0.068 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:57:45,510] INFO: Step: 887015/1000000 Loss: 0.095 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:57:58,721] INFO: Step: 887016/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:58:12,053] INFO: Step: 887017/1000000 Loss: 0.070 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:58:25,463] INFO: Step: 887018/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:58:38,850] INFO: Step: 887019/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:58:52,125] INFO: Step: 887020/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:59:05,464] INFO: Step: 887021/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:59:18,864] INFO: Step: 887022/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:59:32,045] INFO: Step: 887023/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:59:45,376] INFO: Step: 887024/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 17:59:58,633] INFO: Step: 887025/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:00:11,812] INFO: Step: 887026/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:00:25,324] INFO: Step: 887027/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:00:38,583] INFO: Step: 887028/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:00:51,922] INFO: Step: 887029/1000000 Loss: 0.070 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:01:05,314] INFO: Step: 887030/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:01:18,584] INFO: Step: 887031/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:01:31,759] INFO: Step: 887032/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:01:44,910] INFO: Step: 887033/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:01:58,300] INFO: Step: 887034/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:02:11,430] INFO: Step: 887035/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:02:24,494] INFO: Step: 887036/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:02:37,573] INFO: Step: 887037/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:02:50,871] INFO: Step: 887038/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:03:04,148] INFO: Step: 887039/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:03:17,648] INFO: Step: 887040/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:03:30,994] INFO: Step: 887041/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:03:44,387] INFO: Step: 887042/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:03:58,136] INFO: Step: 887043/1000000 Loss: 0.069 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:04:11,327] INFO: Step: 887044/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:04:24,596] INFO: Step: 887045/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:04:37,897] INFO: Step: 887046/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:04:51,128] INFO: Step: 887047/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:05:04,596] INFO: Step: 887048/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:05:17,755] INFO: Step: 887049/1000000 Loss: 0.096 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:05:30,971] INFO: Step: 887050/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:05:44,198] INFO: Step: 887051/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:05:57,438] INFO: Step: 887052/1000000 Loss: 0.068 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:06:10,721] INFO: Step: 887053/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:06:23,805] INFO: Step: 887054/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:06:36,978] INFO: Step: 887055/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:06:50,282] INFO: Step: 887056/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:07:03,601] INFO: Step: 887057/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:07:16,918] INFO: Step: 887058/1000000 Loss: 0.094 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:07:30,121] INFO: Step: 887059/1000000 Loss: 0.067 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:07:43,325] INFO: Step: 887060/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:07:56,627] INFO: Step: 887061/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:08:10,060] INFO: Step: 887062/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:08:23,313] INFO: Step: 887063/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:08:36,511] INFO: Step: 887064/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:08:49,737] INFO: Step: 887065/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:09:03,117] INFO: Step: 887066/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:09:16,480] INFO: Step: 887067/1000000 Loss: 0.090 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:09:29,554] INFO: Step: 887068/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:09:42,841] INFO: Step: 887069/1000000 Loss: 0.094 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:09:56,174] INFO: Step: 887070/1000000 Loss: 0.068 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:10:09,437] INFO: Step: 887071/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:10:22,706] INFO: Step: 887072/1000000 Loss: 0.092 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:10:35,971] INFO: Step: 887073/1000000 Loss: 0.099 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:10:49,084] INFO: Step: 887074/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:11:02,414] INFO: Step: 887075/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:11:15,824] INFO: Step: 887076/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:11:29,145] INFO: Step: 887077/1000000 Loss: 0.091 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:11:42,441] INFO: Step: 887078/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:11:55,957] INFO: Step: 887079/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:12:08,951] INFO: Step: 887080/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:12:22,171] INFO: Step: 887081/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:12:35,402] INFO: Step: 887082/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:12:48,917] INFO: Step: 887083/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:13:02,493] INFO: Step: 887084/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:13:15,661] INFO: Step: 887085/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:13:28,901] INFO: Step: 887086/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:13:41,775] INFO: Step: 887087/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:13:55,102] INFO: Step: 887088/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:14:08,429] INFO: Step: 887089/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:14:21,705] INFO: Step: 887090/1000000 Loss: 0.066 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:14:35,000] INFO: Step: 887091/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:14:48,256] INFO: Step: 887092/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:15:01,608] INFO: Step: 887093/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:15:15,172] INFO: Step: 887094/1000000 Loss: 0.090 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:15:28,447] INFO: Step: 887095/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:15:41,589] INFO: Step: 887096/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:15:54,833] INFO: Step: 887097/1000000 Loss: 0.068 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:16:08,214] INFO: Step: 887098/1000000 Loss: 0.094 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:16:21,568] INFO: Step: 887099/1000000 Loss: 0.091 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:16:35,031] INFO: Step: 887100/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:16:48,252] INFO: Step: 887101/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:17:01,709] INFO: Step: 887102/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:17:15,058] INFO: Step: 887103/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:17:28,339] INFO: Step: 887104/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:17:41,606] INFO: Step: 887105/1000000 Loss: 0.091 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:17:55,158] INFO: Step: 887106/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:18:08,509] INFO: Step: 887107/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:18:21,899] INFO: Step: 887108/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:18:35,096] INFO: Step: 887109/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:18:48,464] INFO: Step: 887110/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:19:01,874] INFO: Step: 887111/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:19:15,080] INFO: Step: 887112/1000000 Loss: 0.070 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:19:28,271] INFO: Step: 887113/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:19:41,659] INFO: Step: 887114/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:19:55,108] INFO: Step: 887115/1000000 Loss: 0.093 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:20:08,553] INFO: Step: 887116/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:20:21,811] INFO: Step: 887117/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:20:35,338] INFO: Step: 887118/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:20:48,651] INFO: Step: 887119/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:21:01,875] INFO: Step: 887120/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:21:15,320] INFO: Step: 887121/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:21:28,606] INFO: Step: 887122/1000000 Loss: 0.091 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:21:42,157] INFO: Step: 887123/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:21:55,534] INFO: Step: 887124/1000000 Loss: 0.070 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:22:08,912] INFO: Step: 887125/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:22:22,182] INFO: Step: 887126/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:22:35,577] INFO: Step: 887127/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:22:48,886] INFO: Step: 887128/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:23:02,216] INFO: Step: 887129/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:23:15,366] INFO: Step: 887130/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:23:28,690] INFO: Step: 887131/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:23:41,970] INFO: Step: 887132/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:23:55,286] INFO: Step: 887133/1000000 Loss: 0.091 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:24:08,647] INFO: Step: 887134/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:24:21,747] INFO: Step: 887135/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:24:35,180] INFO: Step: 887136/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:24:48,487] INFO: Step: 887137/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:25:01,989] INFO: Step: 887138/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:25:15,505] INFO: Step: 887139/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:25:28,765] INFO: Step: 887140/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:25:42,137] INFO: Step: 887141/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:25:55,513] INFO: Step: 887142/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:26:08,965] INFO: Step: 887143/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:26:22,381] INFO: Step: 887144/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:26:35,673] INFO: Step: 887145/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:26:48,966] INFO: Step: 887146/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:27:02,351] INFO: Step: 887147/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:27:15,642] INFO: Step: 887148/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:27:28,941] INFO: Step: 887149/1000000 Loss: 0.068 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:27:42,315] INFO: Step: 887150/1000000 Loss: 0.069 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:27:55,689] INFO: Step: 887151/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:28:09,009] INFO: Step: 887152/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:28:22,439] INFO: Step: 887153/1000000 Loss: 0.093 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:28:35,747] INFO: Step: 887154/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:28:49,013] INFO: Step: 887155/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:29:02,182] INFO: Step: 887156/1000000 Loss: 0.091 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:29:15,439] INFO: Step: 887157/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:29:28,612] INFO: Step: 887158/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:29:41,836] INFO: Step: 887159/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:29:55,107] INFO: Step: 887160/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:30:08,458] INFO: Step: 887161/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:30:21,915] INFO: Step: 887162/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:30:35,214] INFO: Step: 887163/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:30:48,522] INFO: Step: 887164/1000000 Loss: 0.069 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:31:02,028] INFO: Step: 887165/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:31:15,257] INFO: Step: 887166/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:31:28,569] INFO: Step: 887167/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:31:41,938] INFO: Step: 887168/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:31:55,270] INFO: Step: 887169/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:32:08,548] INFO: Step: 887170/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:32:21,811] INFO: Step: 887171/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:32:34,946] INFO: Step: 887172/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:32:48,354] INFO: Step: 887173/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:33:01,639] INFO: Step: 887174/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:33:14,949] INFO: Step: 887175/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:33:28,366] INFO: Step: 887176/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:33:41,674] INFO: Step: 887177/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:33:55,043] INFO: Step: 887178/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:34:08,320] INFO: Step: 887179/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:34:21,626] INFO: Step: 887180/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:34:34,944] INFO: Step: 887181/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:34:48,370] INFO: Step: 887182/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:35:01,717] INFO: Step: 887183/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:35:14,937] INFO: Step: 887184/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:35:28,419] INFO: Step: 887185/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:35:41,659] INFO: Step: 887186/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:35:55,130] INFO: Step: 887187/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:36:08,435] INFO: Step: 887188/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:36:21,698] INFO: Step: 887189/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:36:35,070] INFO: Step: 887190/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:36:48,514] INFO: Step: 887191/1000000 Loss: 0.068 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:37:01,779] INFO: Step: 887192/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:37:15,076] INFO: Step: 887193/1000000 Loss: 0.094 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:37:28,384] INFO: Step: 887194/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:37:41,706] INFO: Step: 887195/1000000 Loss: 0.060 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:37:55,020] INFO: Step: 887196/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:38:08,369] INFO: Step: 887197/1000000 Loss: 0.091 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:38:21,791] INFO: Step: 887198/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:38:35,065] INFO: Step: 887199/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:38:48,360] INFO: Step: 887200/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:39:01,543] INFO: Step: 887201/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:39:14,816] INFO: Step: 887202/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:39:28,172] INFO: Step: 887203/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:39:41,496] INFO: Step: 887204/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:39:54,794] INFO: Step: 887205/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:40:08,125] INFO: Step: 887206/1000000 Loss: 0.093 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:40:21,503] INFO: Step: 887207/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:40:34,998] INFO: Step: 887208/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:40:48,143] INFO: Step: 887209/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:41:01,680] INFO: Step: 887210/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:41:15,025] INFO: Step: 887211/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:41:28,474] INFO: Step: 887212/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:41:41,571] INFO: Step: 887213/1000000 Loss: 0.070 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:41:54,908] INFO: Step: 887214/1000000 Loss: 0.091 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:42:08,045] INFO: Step: 887215/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:42:21,268] INFO: Step: 887216/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:42:34,658] INFO: Step: 887217/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:42:47,910] INFO: Step: 887218/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:43:01,249] INFO: Step: 887219/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:43:14,490] INFO: Step: 887220/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:43:27,706] INFO: Step: 887221/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:43:41,100] INFO: Step: 887222/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:43:54,509] INFO: Step: 887223/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:44:07,867] INFO: Step: 887224/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:44:21,143] INFO: Step: 887225/1000000 Loss: 0.068 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:44:34,147] INFO: Step: 887226/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:44:47,411] INFO: Step: 887227/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:45:00,628] INFO: Step: 887228/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:45:13,918] INFO: Step: 887229/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:45:27,117] INFO: Step: 887230/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:45:40,395] INFO: Step: 887231/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:45:53,600] INFO: Step: 887232/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:46:06,862] INFO: Step: 887233/1000000 Loss: 0.093 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:46:20,163] INFO: Step: 887234/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:46:33,701] INFO: Step: 887235/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:46:47,048] INFO: Step: 887236/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:47:00,331] INFO: Step: 887237/1000000 Loss: 0.065 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:47:13,451] INFO: Step: 887238/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:47:26,626] INFO: Step: 887239/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:47:39,806] INFO: Step: 887240/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:47:53,099] INFO: Step: 887241/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:48:06,256] INFO: Step: 887242/1000000 Loss: 0.095 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:48:19,467] INFO: Step: 887243/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:48:32,696] INFO: Step: 887244/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:48:45,979] INFO: Step: 887245/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:48:59,400] INFO: Step: 887246/1000000 Loss: 0.066 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:49:12,693] INFO: Step: 887247/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:49:25,992] INFO: Step: 887248/1000000 Loss: 0.067 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:49:39,274] INFO: Step: 887249/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:49:52,398] INFO: Step: 887250/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:50:05,645] INFO: Step: 887251/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:50:19,072] INFO: Step: 887252/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:50:32,312] INFO: Step: 887253/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:50:45,553] INFO: Step: 887254/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:50:58,759] INFO: Step: 887255/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:51:11,716] INFO: Step: 887256/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:51:24,836] INFO: Step: 887257/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:51:38,039] INFO: Step: 887258/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:51:51,162] INFO: Step: 887259/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:52:04,546] INFO: Step: 887260/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:52:17,952] INFO: Step: 887261/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:52:31,524] INFO: Step: 887262/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:52:44,793] INFO: Step: 887263/1000000 Loss: 0.069 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:52:58,155] INFO: Step: 887264/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:53:11,476] INFO: Step: 887265/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:53:24,911] INFO: Step: 887266/1000000 Loss: 0.094 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:53:38,264] INFO: Step: 887267/1000000 Loss: 0.064 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:53:51,435] INFO: Step: 887268/1000000 Loss: 0.068 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:54:04,789] INFO: Step: 887269/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:54:18,022] INFO: Step: 887270/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:54:31,307] INFO: Step: 887271/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:54:44,530] INFO: Step: 887272/1000000 Loss: 0.097 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:54:57,905] INFO: Step: 887273/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:55:11,181] INFO: Step: 887274/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:55:24,152] INFO: Step: 887275/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:55:37,349] INFO: Step: 887276/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:55:50,574] INFO: Step: 887277/1000000 Loss: 0.068 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:56:03,889] INFO: Step: 887278/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:56:17,071] INFO: Step: 887279/1000000 Loss: 0.099 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:56:30,317] INFO: Step: 887280/1000000 Loss: 0.068 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:56:43,564] INFO: Step: 887281/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:56:56,970] INFO: Step: 887282/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:57:10,456] INFO: Step: 887283/1000000 Loss: 0.090 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:57:23,685] INFO: Step: 887284/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:57:37,145] INFO: Step: 887285/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:57:50,456] INFO: Step: 887286/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:58:03,818] INFO: Step: 887287/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:58:17,147] INFO: Step: 887288/1000000 Loss: 0.091 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:58:30,478] INFO: Step: 887289/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:58:43,808] INFO: Step: 887290/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:58:57,044] INFO: Step: 887291/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:59:10,633] INFO: Step: 887292/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:59:23,994] INFO: Step: 887293/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:59:37,243] INFO: Step: 887294/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 18:59:50,582] INFO: Step: 887295/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:00:04,170] INFO: Step: 887296/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:00:17,569] INFO: Step: 887297/1000000 Loss: 0.062 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:00:30,891] INFO: Step: 887298/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:00:44,201] INFO: Step: 887299/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:00:57,698] INFO: Step: 887300/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:01:10,975] INFO: Step: 887301/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:01:24,289] INFO: Step: 887302/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:01:37,676] INFO: Step: 887303/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:01:50,947] INFO: Step: 887304/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:02:04,313] INFO: Step: 887305/1000000 Loss: 0.070 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:02:17,642] INFO: Step: 887306/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:02:31,106] INFO: Step: 887307/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:02:44,464] INFO: Step: 887308/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:02:57,762] INFO: Step: 887309/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:03:11,046] INFO: Step: 887310/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:03:24,259] INFO: Step: 887311/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:03:37,770] INFO: Step: 887312/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:03:51,104] INFO: Step: 887313/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:04:04,397] INFO: Step: 887314/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:04:17,593] INFO: Step: 887315/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:04:30,881] INFO: Step: 887316/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:04:44,103] INFO: Step: 887317/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:04:57,289] INFO: Step: 887318/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:05:10,549] INFO: Step: 887319/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:05:23,706] INFO: Step: 887320/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:05:36,926] INFO: Step: 887321/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:05:50,048] INFO: Step: 887322/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:06:03,369] INFO: Step: 887323/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:06:16,671] INFO: Step: 887324/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:06:30,128] INFO: Step: 887325/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:06:43,296] INFO: Step: 887326/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:06:56,358] INFO: Step: 887327/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:07:09,603] INFO: Step: 887328/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:07:22,822] INFO: Step: 887329/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:07:36,367] INFO: Step: 887330/1000000 Loss: 0.067 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:07:49,817] INFO: Step: 887331/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:08:03,050] INFO: Step: 887332/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:08:16,339] INFO: Step: 887333/1000000 Loss: 0.092 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:08:29,628] INFO: Step: 887334/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:08:42,673] INFO: Step: 887335/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:08:56,145] INFO: Step: 887336/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:09:09,477] INFO: Step: 887337/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:09:22,806] INFO: Step: 887338/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:09:36,175] INFO: Step: 887339/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:09:49,370] INFO: Step: 887340/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:10:02,824] INFO: Step: 887341/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:10:16,113] INFO: Step: 887342/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:10:29,275] INFO: Step: 887343/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:10:42,336] INFO: Step: 887344/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:10:55,371] INFO: Step: 887345/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:11:08,506] INFO: Step: 887346/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:11:21,861] INFO: Step: 887347/1000000 Loss: 0.100 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:11:35,092] INFO: Step: 887348/1000000 Loss: 0.064 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:11:48,282] INFO: Step: 887349/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:12:01,909] INFO: Step: 887350/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:12:15,050] INFO: Step: 887351/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:12:28,313] INFO: Step: 887352/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:12:41,533] INFO: Step: 887353/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:12:54,749] INFO: Step: 887354/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:13:08,186] INFO: Step: 887355/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:13:21,455] INFO: Step: 887356/1000000 Loss: 0.068 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:13:34,736] INFO: Step: 887357/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:13:48,067] INFO: Step: 887358/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:14:01,624] INFO: Step: 887359/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:14:15,059] INFO: Step: 887360/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:14:28,185] INFO: Step: 887361/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:14:41,504] INFO: Step: 887362/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:14:54,801] INFO: Step: 887363/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:15:08,008] INFO: Step: 887364/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:15:21,243] INFO: Step: 887365/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:15:34,457] INFO: Step: 887366/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:15:47,735] INFO: Step: 887367/1000000 Loss: 0.090 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:16:00,944] INFO: Step: 887368/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:16:14,063] INFO: Step: 887369/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:16:27,222] INFO: Step: 887370/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:16:40,416] INFO: Step: 887371/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:16:53,587] INFO: Step: 887372/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:17:06,802] INFO: Step: 887373/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:17:19,964] INFO: Step: 887374/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:17:33,161] INFO: Step: 887375/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:17:46,758] INFO: Step: 887376/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:18:00,014] INFO: Step: 887377/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:18:13,252] INFO: Step: 887378/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:18:26,590] INFO: Step: 887379/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:18:39,562] INFO: Step: 887380/1000000 Loss: 0.068 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:18:52,701] INFO: Step: 887381/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:19:05,678] INFO: Step: 887382/1000000 Loss: 0.092 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:19:18,817] INFO: Step: 887383/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:19:31,945] INFO: Step: 887384/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:19:45,021] INFO: Step: 887385/1000000 Loss: 0.091 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:19:58,304] INFO: Step: 887386/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:20:11,637] INFO: Step: 887387/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:20:24,829] INFO: Step: 887388/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:20:38,319] INFO: Step: 887389/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:20:51,616] INFO: Step: 887390/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:21:04,897] INFO: Step: 887391/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:21:18,278] INFO: Step: 887392/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:21:31,717] INFO: Step: 887393/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:21:45,124] INFO: Step: 887394/1000000 Loss: 0.064 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:21:58,771] INFO: Step: 887395/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:22:12,076] INFO: Step: 887396/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:22:25,526] INFO: Step: 887397/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:22:38,735] INFO: Step: 887398/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:22:52,104] INFO: Step: 887399/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:23:05,575] INFO: Step: 887400/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:23:18,902] INFO: Step: 887401/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:23:32,210] INFO: Step: 887402/1000000 Loss: 0.070 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:23:45,434] INFO: Step: 887403/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:23:58,746] INFO: Step: 887404/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:24:11,993] INFO: Step: 887405/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:24:25,283] INFO: Step: 887406/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:24:38,553] INFO: Step: 887407/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:24:51,806] INFO: Step: 887408/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:25:05,174] INFO: Step: 887409/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:25:18,274] INFO: Step: 887410/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:25:31,519] INFO: Step: 887411/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:25:44,708] INFO: Step: 887412/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:25:57,978] INFO: Step: 887413/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:26:11,315] INFO: Step: 887414/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:26:24,503] INFO: Step: 887415/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:26:37,858] INFO: Step: 887416/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:26:51,180] INFO: Step: 887417/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:27:04,650] INFO: Step: 887418/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:27:17,967] INFO: Step: 887419/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:27:31,192] INFO: Step: 887420/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:27:44,515] INFO: Step: 887421/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:27:57,779] INFO: Step: 887422/1000000 Loss: 0.096 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:28:11,083] INFO: Step: 887423/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:28:24,193] INFO: Step: 887424/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:28:37,488] INFO: Step: 887425/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:28:50,716] INFO: Step: 887426/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:29:04,129] INFO: Step: 887427/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:29:17,332] INFO: Step: 887428/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:29:30,574] INFO: Step: 887429/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:29:43,752] INFO: Step: 887430/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:29:56,974] INFO: Step: 887431/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:30:10,206] INFO: Step: 887432/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:30:23,530] INFO: Step: 887433/1000000 Loss: 0.064 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:30:36,799] INFO: Step: 887434/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:30:50,200] INFO: Step: 887435/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:31:03,565] INFO: Step: 887436/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:31:16,884] INFO: Step: 887437/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:31:30,063] INFO: Step: 887438/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:31:43,401] INFO: Step: 887439/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:31:56,682] INFO: Step: 887440/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:32:10,033] INFO: Step: 887441/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:32:23,269] INFO: Step: 887442/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:32:36,598] INFO: Step: 887443/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:32:49,937] INFO: Step: 887444/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:33:03,303] INFO: Step: 887445/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:33:16,623] INFO: Step: 887446/1000000 Loss: 0.094 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:33:29,920] INFO: Step: 887447/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:33:43,412] INFO: Step: 887448/1000000 Loss: 0.090 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:33:56,710] INFO: Step: 887449/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:34:10,014] INFO: Step: 887450/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:34:23,378] INFO: Step: 887451/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:34:36,609] INFO: Step: 887452/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:34:49,880] INFO: Step: 887453/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:35:03,304] INFO: Step: 887454/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:35:16,697] INFO: Step: 887455/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:35:29,930] INFO: Step: 887456/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:35:43,212] INFO: Step: 887457/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:35:56,604] INFO: Step: 887458/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:36:10,062] INFO: Step: 887459/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:36:23,345] INFO: Step: 887460/1000000 Loss: 0.094 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:36:36,828] INFO: Step: 887461/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:36:50,075] INFO: Step: 887462/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:37:03,245] INFO: Step: 887463/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:37:16,663] INFO: Step: 887464/1000000 Loss: 0.067 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:37:29,907] INFO: Step: 887465/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:37:43,188] INFO: Step: 887466/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:37:56,472] INFO: Step: 887467/1000000 Loss: 0.066 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:38:09,804] INFO: Step: 887468/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:38:23,154] INFO: Step: 887469/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:38:36,420] INFO: Step: 887470/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:38:49,767] INFO: Step: 887471/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:39:02,817] INFO: Step: 887472/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:39:16,051] INFO: Step: 887473/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:39:29,425] INFO: Step: 887474/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:39:42,838] INFO: Step: 887475/1000000 Loss: 0.070 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:39:55,981] INFO: Step: 887476/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:40:09,556] INFO: Step: 887477/1000000 Loss: 0.066 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:40:22,814] INFO: Step: 887478/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:40:36,278] INFO: Step: 887479/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:40:49,548] INFO: Step: 887480/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:41:02,917] INFO: Step: 887481/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:41:16,300] INFO: Step: 887482/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:41:29,442] INFO: Step: 887483/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:41:42,777] INFO: Step: 887484/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:41:55,948] INFO: Step: 887485/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:42:09,225] INFO: Step: 887486/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:42:22,500] INFO: Step: 887487/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:42:35,801] INFO: Step: 887488/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:42:49,174] INFO: Step: 887489/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:43:02,564] INFO: Step: 887490/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:43:16,062] INFO: Step: 887491/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:43:29,271] INFO: Step: 887492/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:43:42,733] INFO: Step: 887493/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:43:56,055] INFO: Step: 887494/1000000 Loss: 0.094 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:44:09,408] INFO: Step: 887495/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:44:22,725] INFO: Step: 887496/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:44:36,084] INFO: Step: 887497/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:44:49,310] INFO: Step: 887498/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:45:02,546] INFO: Step: 887499/1000000 Loss: 0.093 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:45:15,820] INFO: Step: 887500/1000000 Loss: 0.090 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:45:15,821] INFO: Begin to Save model to workspace/i2v_motion/obj_train_motion/checkpoints/non_ema_00887500.pth +[2024-06-09 19:45:23,214] INFO: Save model to workspace/i2v_motion/obj_train_motion/checkpoints/non_ema_00887500.pth +[2024-06-09 19:45:35,454] INFO: Step: 887501/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:45:48,580] INFO: Step: 887502/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:46:01,906] INFO: Step: 887503/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:46:15,160] INFO: Step: 887504/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:46:28,488] INFO: Step: 887505/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:46:41,803] INFO: Step: 887506/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:46:55,060] INFO: Step: 887507/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:47:08,297] INFO: Step: 887508/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:47:21,641] INFO: Step: 887509/1000000 Loss: 0.064 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:47:34,837] INFO: Step: 887510/1000000 Loss: 0.092 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:47:48,020] INFO: Step: 887511/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:48:01,252] INFO: Step: 887512/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:48:14,446] INFO: Step: 887513/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:48:27,550] INFO: Step: 887514/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:48:40,619] INFO: Step: 887515/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:48:53,863] INFO: Step: 887516/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:49:06,940] INFO: Step: 887517/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:49:20,055] INFO: Step: 887518/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:49:33,163] INFO: Step: 887519/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:49:46,264] INFO: Step: 887520/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:49:59,535] INFO: Step: 887521/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:50:12,662] INFO: Step: 887522/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:50:25,910] INFO: Step: 887523/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:50:39,154] INFO: Step: 887524/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:50:52,319] INFO: Step: 887525/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:51:05,700] INFO: Step: 887526/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:51:18,838] INFO: Step: 887527/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:51:32,104] INFO: Step: 887528/1000000 Loss: 0.069 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:51:45,312] INFO: Step: 887529/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:51:58,407] INFO: Step: 887530/1000000 Loss: 0.092 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:52:11,551] INFO: Step: 887531/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:52:24,711] INFO: Step: 887532/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:52:37,974] INFO: Step: 887533/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:52:51,335] INFO: Step: 887534/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:53:04,497] INFO: Step: 887535/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:53:17,747] INFO: Step: 887536/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:53:31,053] INFO: Step: 887537/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:53:44,225] INFO: Step: 887538/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:53:57,488] INFO: Step: 887539/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:54:10,775] INFO: Step: 887540/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:54:23,851] INFO: Step: 887541/1000000 Loss: 0.103 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:54:37,246] INFO: Step: 887542/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:54:50,405] INFO: Step: 887543/1000000 Loss: 0.068 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:55:03,590] INFO: Step: 887544/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:55:16,811] INFO: Step: 887545/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:55:30,105] INFO: Step: 887546/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:55:43,275] INFO: Step: 887547/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:55:56,428] INFO: Step: 887548/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:56:09,804] INFO: Step: 887549/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:56:22,960] INFO: Step: 887550/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:56:36,154] INFO: Step: 887551/1000000 Loss: 0.063 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:56:49,408] INFO: Step: 887552/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:57:02,589] INFO: Step: 887553/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:57:15,869] INFO: Step: 887554/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:57:29,050] INFO: Step: 887555/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:57:42,238] INFO: Step: 887556/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:57:55,615] INFO: Step: 887557/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:58:08,837] INFO: Step: 887558/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:58:22,080] INFO: Step: 887559/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:58:35,370] INFO: Step: 887560/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:58:48,647] INFO: Step: 887561/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:59:01,825] INFO: Step: 887562/1000000 Loss: 0.069 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:59:14,894] INFO: Step: 887563/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:59:28,332] INFO: Step: 887564/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:59:41,433] INFO: Step: 887565/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 19:59:54,457] INFO: Step: 887566/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:00:07,651] INFO: Step: 887567/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:00:20,882] INFO: Step: 887568/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:00:34,101] INFO: Step: 887569/1000000 Loss: 0.090 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:00:47,211] INFO: Step: 887570/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:01:00,520] INFO: Step: 887571/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:01:13,511] INFO: Step: 887572/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:01:26,535] INFO: Step: 887573/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:01:39,644] INFO: Step: 887574/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:01:52,773] INFO: Step: 887575/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:02:06,058] INFO: Step: 887576/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:02:19,112] INFO: Step: 887577/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:02:32,110] INFO: Step: 887578/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:02:45,341] INFO: Step: 887579/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:02:58,442] INFO: Step: 887580/1000000 Loss: 0.071 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:03:11,517] INFO: Step: 887581/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:03:24,774] INFO: Step: 887582/1000000 Loss: 0.068 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:03:38,031] INFO: Step: 887583/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:03:51,407] INFO: Step: 887584/1000000 Loss: 0.088 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:04:04,694] INFO: Step: 887585/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:04:17,810] INFO: Step: 887586/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:04:31,099] INFO: Step: 887587/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:04:44,337] INFO: Step: 887588/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:04:57,725] INFO: Step: 887589/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:05:11,417] INFO: Step: 887590/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:05:24,808] INFO: Step: 887591/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:05:37,984] INFO: Step: 887592/1000000 Loss: 0.069 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:05:51,383] INFO: Step: 887593/1000000 Loss: 0.072 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:06:04,645] INFO: Step: 887594/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:06:17,905] INFO: Step: 887595/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:06:31,168] INFO: Step: 887596/1000000 Loss: 0.068 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:06:44,574] INFO: Step: 887597/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:06:57,710] INFO: Step: 887598/1000000 Loss: 0.091 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:07:10,993] INFO: Step: 887599/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:07:24,319] INFO: Step: 887600/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:07:37,612] INFO: Step: 887601/1000000 Loss: 0.100 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:07:51,052] INFO: Step: 887602/1000000 Loss: 0.096 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:08:04,344] INFO: Step: 887603/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:08:17,652] INFO: Step: 887604/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:08:31,047] INFO: Step: 887605/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:08:44,304] INFO: Step: 887606/1000000 Loss: 0.087 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:08:57,472] INFO: Step: 887607/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:09:10,824] INFO: Step: 887608/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:09:24,278] INFO: Step: 887609/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:09:37,670] INFO: Step: 887610/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:09:51,025] INFO: Step: 887611/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:10:04,273] INFO: Step: 887612/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:10:17,540] INFO: Step: 887613/1000000 Loss: 0.092 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:10:30,711] INFO: Step: 887614/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:10:43,955] INFO: Step: 887615/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:10:57,252] INFO: Step: 887616/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:11:10,534] INFO: Step: 887617/1000000 Loss: 0.092 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:11:23,827] INFO: Step: 887618/1000000 Loss: 0.089 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:11:37,066] INFO: Step: 887619/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:11:50,122] INFO: Step: 887620/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:12:03,459] INFO: Step: 887621/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:12:16,652] INFO: Step: 887622/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:12:29,852] INFO: Step: 887623/1000000 Loss: 0.085 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:12:42,991] INFO: Step: 887624/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:12:56,210] INFO: Step: 887625/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:13:09,499] INFO: Step: 887626/1000000 Loss: 0.076 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:13:22,664] INFO: Step: 887627/1000000 Loss: 0.091 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:13:35,899] INFO: Step: 887628/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:13:49,221] INFO: Step: 887629/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:14:02,358] INFO: Step: 887630/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:14:15,621] INFO: Step: 887631/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:14:28,896] INFO: Step: 887632/1000000 Loss: 0.077 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:14:42,098] INFO: Step: 887633/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:14:55,393] INFO: Step: 887634/1000000 Loss: 0.080 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:15:08,558] INFO: Step: 887635/1000000 Loss: 0.093 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:15:21,702] INFO: Step: 887636/1000000 Loss: 0.084 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:15:34,973] INFO: Step: 887637/1000000 Loss: 0.070 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:15:47,916] INFO: Step: 887638/1000000 Loss: 0.074 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:16:01,121] INFO: Step: 887639/1000000 Loss: 0.081 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:16:14,106] INFO: Step: 887640/1000000 Loss: 0.086 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:16:27,239] INFO: Step: 887641/1000000 Loss: 0.073 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:16:40,519] INFO: Step: 887642/1000000 Loss: 0.100 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:16:53,705] INFO: Step: 887643/1000000 Loss: 0.069 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:17:07,028] INFO: Step: 887644/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:17:20,218] INFO: Step: 887645/1000000 Loss: 0.083 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:17:33,455] INFO: Step: 887646/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:17:46,659] INFO: Step: 887647/1000000 Loss: 0.095 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:17:59,901] INFO: Step: 887648/1000000 Loss: 0.079 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:18:13,232] INFO: Step: 887649/1000000 Loss: 0.082 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:18:26,365] INFO: Step: 887650/1000000 Loss: 0.090 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:18:39,608] INFO: Step: 887651/1000000 Loss: 0.078 scale: 131072.0 LR: 0.0000300 +[2024-06-09 20:18:52,967] INFO: Step: 887652/1000000 Loss: 0.075 scale: 131072.0 LR: 0.0000300