diff --git "a/imagenet/resnet18_dino_pretrain/log/train.info.log.2023-09-05" "b/imagenet/resnet18_dino_pretrain/log/train.info.log.2023-09-05" new file mode 100644--- /dev/null +++ "b/imagenet/resnet18_dino_pretrain/log/train.info.log.2023-09-05" @@ -0,0 +1,3780 @@ +2023-09-10 13:03:52 - network: resnet18 +2023-09-10 13:03:52 - num_classes: 1000 +2023-09-10 13:03:52 - input_image_size: 224 +2023-09-10 13:03:52 - scale: 1.1428571428571428 +2023-09-10 13:03:52 - trained_model_path: /root/code/SimpleAICV_pytorch_training_examples_on_ImageNet_COCO_ADE20K/pretrained_models/resnet_dino_pretrain_on_imagenet1k/resnet18_dino_pretrain_model-student-loss3.081.pth +2023-09-10 13:03:52 - train_criterion: CELoss( + (loss): CrossEntropyLoss() +) +2023-09-10 13:03:52 - test_criterion: CELoss( + (loss): CrossEntropyLoss() +) +2023-09-10 13:03:52 - train_dataset: +2023-09-10 13:03:52 - test_dataset: +2023-09-10 13:03:52 - train_collater: +2023-09-10 13:03:52 - test_collater: +2023-09-10 13:03:52 - seed: 0 +2023-09-10 13:03:52 - batch_size: 256 +2023-09-10 13:03:52 - num_workers: 12 +2023-09-10 13:03:52 - accumulation_steps: 1 +2023-09-10 13:03:52 - optimizer: ('SGD', {'lr': 0.1, 'momentum': 0.9, 'global_weight_decay': False, 'weight_decay': 0.0001, 'no_weight_decay_layer_name_list': []}) +2023-09-10 13:03:52 - scheduler: ('MultiStepLR', {'warm_up_epochs': 0, 'gamma': 0.1, 'milestones': [30, 60, 90]}) +2023-09-10 13:03:52 - epochs: 100 +2023-09-10 13:03:52 - print_interval: 100 +2023-09-10 13:03:52 - sync_bn: False +2023-09-10 13:03:52 - use_amp: True +2023-09-10 13:03:52 - use_compile: False +2023-09-10 13:03:52 - compile_params: {'mode': 'default'} +2023-09-10 13:03:52 - use_ema_model: False +2023-09-10 13:03:52 - ema_model_decay: 0.9999 +2023-09-10 13:03:52 - gpus_type: NVIDIA RTX A5000 +2023-09-10 13:03:52 - gpus_num: 1 +2023-09-10 13:03:52 - group: +2023-09-10 13:03:52 - --------------------parameters-------------------- +2023-09-10 13:03:52 - name: conv1.layer.0.weight, grad: True +2023-09-10 13:03:52 - name: conv1.layer.1.weight, grad: True +2023-09-10 13:03:52 - name: conv1.layer.1.bias, grad: True +2023-09-10 13:03:52 - name: layer1.0.conv1.layer.0.weight, grad: True +2023-09-10 13:03:52 - name: layer1.0.conv1.layer.1.weight, grad: True +2023-09-10 13:03:52 - name: layer1.0.conv1.layer.1.bias, grad: True +2023-09-10 13:03:52 - name: layer1.0.conv2.layer.0.weight, grad: True +2023-09-10 13:03:52 - name: layer1.0.conv2.layer.1.weight, grad: True +2023-09-10 13:03:52 - name: layer1.0.conv2.layer.1.bias, grad: True +2023-09-10 13:03:52 - name: layer1.1.conv1.layer.0.weight, grad: True +2023-09-10 13:03:52 - name: layer1.1.conv1.layer.1.weight, grad: True +2023-09-10 13:03:52 - name: layer1.1.conv1.layer.1.bias, grad: True +2023-09-10 13:03:52 - name: layer1.1.conv2.layer.0.weight, grad: True +2023-09-10 13:03:52 - name: layer1.1.conv2.layer.1.weight, grad: True +2023-09-10 13:03:52 - name: layer1.1.conv2.layer.1.bias, grad: True +2023-09-10 13:03:52 - name: layer2.0.conv1.layer.0.weight, grad: True +2023-09-10 13:03:52 - name: layer2.0.conv1.layer.1.weight, grad: True +2023-09-10 13:03:52 - name: layer2.0.conv1.layer.1.bias, grad: True +2023-09-10 13:03:52 - name: layer2.0.conv2.layer.0.weight, grad: True +2023-09-10 13:03:52 - name: layer2.0.conv2.layer.1.weight, grad: True +2023-09-10 13:03:52 - name: layer2.0.conv2.layer.1.bias, grad: True +2023-09-10 13:03:52 - name: layer2.0.downsample_conv.layer.0.weight, grad: True +2023-09-10 13:03:52 - name: layer2.0.downsample_conv.layer.1.weight, grad: True +2023-09-10 13:03:52 - name: layer2.0.downsample_conv.layer.1.bias, grad: True +2023-09-10 13:03:52 - name: layer2.1.conv1.layer.0.weight, grad: True +2023-09-10 13:03:52 - name: layer2.1.conv1.layer.1.weight, grad: True +2023-09-10 13:03:52 - name: layer2.1.conv1.layer.1.bias, grad: True +2023-09-10 13:03:52 - name: layer2.1.conv2.layer.0.weight, grad: True +2023-09-10 13:03:52 - name: layer2.1.conv2.layer.1.weight, grad: True +2023-09-10 13:03:52 - name: layer2.1.conv2.layer.1.bias, grad: True +2023-09-10 13:03:52 - name: layer3.0.conv1.layer.0.weight, grad: True +2023-09-10 13:03:52 - name: layer3.0.conv1.layer.1.weight, grad: True +2023-09-10 13:03:52 - name: layer3.0.conv1.layer.1.bias, grad: True +2023-09-10 13:03:52 - name: layer3.0.conv2.layer.0.weight, grad: True +2023-09-10 13:03:52 - name: layer3.0.conv2.layer.1.weight, grad: True +2023-09-10 13:03:52 - name: layer3.0.conv2.layer.1.bias, grad: True +2023-09-10 13:03:52 - name: layer3.0.downsample_conv.layer.0.weight, grad: True +2023-09-10 13:03:52 - name: layer3.0.downsample_conv.layer.1.weight, grad: True +2023-09-10 13:03:52 - name: layer3.0.downsample_conv.layer.1.bias, grad: True +2023-09-10 13:03:52 - name: layer3.1.conv1.layer.0.weight, grad: True +2023-09-10 13:03:52 - name: layer3.1.conv1.layer.1.weight, grad: True +2023-09-10 13:03:52 - name: layer3.1.conv1.layer.1.bias, grad: True +2023-09-10 13:03:52 - name: layer3.1.conv2.layer.0.weight, grad: True +2023-09-10 13:03:52 - name: layer3.1.conv2.layer.1.weight, grad: True +2023-09-10 13:03:52 - name: layer3.1.conv2.layer.1.bias, grad: True +2023-09-10 13:03:52 - name: layer4.0.conv1.layer.0.weight, grad: True +2023-09-10 13:03:52 - name: layer4.0.conv1.layer.1.weight, grad: True +2023-09-10 13:03:52 - name: layer4.0.conv1.layer.1.bias, grad: True +2023-09-10 13:03:52 - name: layer4.0.conv2.layer.0.weight, grad: True +2023-09-10 13:03:52 - name: layer4.0.conv2.layer.1.weight, grad: True +2023-09-10 13:03:52 - name: layer4.0.conv2.layer.1.bias, grad: True +2023-09-10 13:03:52 - name: layer4.0.downsample_conv.layer.0.weight, grad: True +2023-09-10 13:03:52 - name: layer4.0.downsample_conv.layer.1.weight, grad: True +2023-09-10 13:03:52 - name: layer4.0.downsample_conv.layer.1.bias, grad: True +2023-09-10 13:03:52 - name: layer4.1.conv1.layer.0.weight, grad: True +2023-09-10 13:03:52 - name: layer4.1.conv1.layer.1.weight, grad: True +2023-09-10 13:03:52 - name: layer4.1.conv1.layer.1.bias, grad: True +2023-09-10 13:03:52 - name: layer4.1.conv2.layer.0.weight, grad: True +2023-09-10 13:03:52 - name: layer4.1.conv2.layer.1.weight, grad: True +2023-09-10 13:03:52 - name: layer4.1.conv2.layer.1.bias, grad: True +2023-09-10 13:03:52 - name: fc.weight, grad: True +2023-09-10 13:03:52 - name: fc.bias, grad: True +2023-09-10 13:03:52 - --------------------buffers-------------------- +2023-09-10 13:03:52 - name: conv1.layer.1.running_mean, grad: False +2023-09-10 13:03:52 - name: conv1.layer.1.running_var, grad: False +2023-09-10 13:03:52 - name: conv1.layer.1.num_batches_tracked, grad: False +2023-09-10 13:03:52 - name: layer1.0.conv1.layer.1.running_mean, grad: False +2023-09-10 13:03:52 - name: layer1.0.conv1.layer.1.running_var, grad: False +2023-09-10 13:03:52 - name: layer1.0.conv1.layer.1.num_batches_tracked, grad: False +2023-09-10 13:03:52 - name: layer1.0.conv2.layer.1.running_mean, grad: False +2023-09-10 13:03:52 - name: layer1.0.conv2.layer.1.running_var, grad: False +2023-09-10 13:03:52 - name: layer1.0.conv2.layer.1.num_batches_tracked, grad: False +2023-09-10 13:03:52 - name: layer1.1.conv1.layer.1.running_mean, grad: False +2023-09-10 13:03:52 - name: layer1.1.conv1.layer.1.running_var, grad: False +2023-09-10 13:03:52 - name: layer1.1.conv1.layer.1.num_batches_tracked, grad: False +2023-09-10 13:03:52 - name: layer1.1.conv2.layer.1.running_mean, grad: False +2023-09-10 13:03:52 - name: layer1.1.conv2.layer.1.running_var, grad: False +2023-09-10 13:03:52 - name: layer1.1.conv2.layer.1.num_batches_tracked, grad: False +2023-09-10 13:03:52 - name: layer2.0.conv1.layer.1.running_mean, grad: False +2023-09-10 13:03:52 - name: layer2.0.conv1.layer.1.running_var, grad: False +2023-09-10 13:03:52 - name: layer2.0.conv1.layer.1.num_batches_tracked, grad: False +2023-09-10 13:03:52 - name: layer2.0.conv2.layer.1.running_mean, grad: False +2023-09-10 13:03:52 - name: layer2.0.conv2.layer.1.running_var, grad: False +2023-09-10 13:03:52 - name: layer2.0.conv2.layer.1.num_batches_tracked, grad: False +2023-09-10 13:03:52 - name: layer2.0.downsample_conv.layer.1.running_mean, grad: False +2023-09-10 13:03:52 - name: layer2.0.downsample_conv.layer.1.running_var, grad: False +2023-09-10 13:03:52 - name: layer2.0.downsample_conv.layer.1.num_batches_tracked, grad: False +2023-09-10 13:03:52 - name: layer2.1.conv1.layer.1.running_mean, grad: False +2023-09-10 13:03:52 - name: layer2.1.conv1.layer.1.running_var, grad: False +2023-09-10 13:03:52 - name: layer2.1.conv1.layer.1.num_batches_tracked, grad: False +2023-09-10 13:03:52 - name: layer2.1.conv2.layer.1.running_mean, grad: False +2023-09-10 13:03:52 - name: layer2.1.conv2.layer.1.running_var, grad: False +2023-09-10 13:03:52 - name: layer2.1.conv2.layer.1.num_batches_tracked, grad: False +2023-09-10 13:03:52 - name: layer3.0.conv1.layer.1.running_mean, grad: False +2023-09-10 13:03:52 - name: layer3.0.conv1.layer.1.running_var, grad: False +2023-09-10 13:03:52 - name: layer3.0.conv1.layer.1.num_batches_tracked, grad: False +2023-09-10 13:03:52 - name: layer3.0.conv2.layer.1.running_mean, grad: False +2023-09-10 13:03:52 - name: layer3.0.conv2.layer.1.running_var, grad: False +2023-09-10 13:03:52 - name: layer3.0.conv2.layer.1.num_batches_tracked, grad: False +2023-09-10 13:03:52 - name: layer3.0.downsample_conv.layer.1.running_mean, grad: False +2023-09-10 13:03:52 - name: layer3.0.downsample_conv.layer.1.running_var, grad: False +2023-09-10 13:03:52 - name: layer3.0.downsample_conv.layer.1.num_batches_tracked, grad: False +2023-09-10 13:03:52 - name: layer3.1.conv1.layer.1.running_mean, grad: False +2023-09-10 13:03:52 - name: layer3.1.conv1.layer.1.running_var, grad: False +2023-09-10 13:03:52 - name: layer3.1.conv1.layer.1.num_batches_tracked, grad: False +2023-09-10 13:03:52 - name: layer3.1.conv2.layer.1.running_mean, grad: False +2023-09-10 13:03:52 - name: layer3.1.conv2.layer.1.running_var, grad: False +2023-09-10 13:03:52 - name: layer3.1.conv2.layer.1.num_batches_tracked, grad: False +2023-09-10 13:03:52 - name: layer4.0.conv1.layer.1.running_mean, grad: False +2023-09-10 13:03:52 - name: layer4.0.conv1.layer.1.running_var, grad: False +2023-09-10 13:03:52 - name: layer4.0.conv1.layer.1.num_batches_tracked, grad: False +2023-09-10 13:03:52 - name: layer4.0.conv2.layer.1.running_mean, grad: False +2023-09-10 13:03:52 - name: layer4.0.conv2.layer.1.running_var, grad: False +2023-09-10 13:03:52 - name: layer4.0.conv2.layer.1.num_batches_tracked, grad: False +2023-09-10 13:03:52 - name: layer4.0.downsample_conv.layer.1.running_mean, grad: False +2023-09-10 13:03:52 - name: layer4.0.downsample_conv.layer.1.running_var, grad: False +2023-09-10 13:03:52 - name: layer4.0.downsample_conv.layer.1.num_batches_tracked, grad: False +2023-09-10 13:03:52 - name: layer4.1.conv1.layer.1.running_mean, grad: False +2023-09-10 13:03:52 - name: layer4.1.conv1.layer.1.running_var, grad: False +2023-09-10 13:03:52 - name: layer4.1.conv1.layer.1.num_batches_tracked, grad: False +2023-09-10 13:03:52 - name: layer4.1.conv2.layer.1.running_mean, grad: False +2023-09-10 13:03:52 - name: layer4.1.conv2.layer.1.running_var, grad: False +2023-09-10 13:03:52 - name: layer4.1.conv2.layer.1.num_batches_tracked, grad: False +2023-09-10 13:03:52 - -------------layers weight decay--------------- +2023-09-10 13:03:52 - name: conv1.layer.1.weight, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: conv1.layer.1.bias, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer1.0.conv1.layer.1.weight, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer1.0.conv1.layer.1.bias, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer1.0.conv2.layer.1.weight, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer1.0.conv2.layer.1.bias, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer1.1.conv1.layer.1.weight, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer1.1.conv1.layer.1.bias, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer1.1.conv2.layer.1.weight, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer1.1.conv2.layer.1.bias, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer2.0.conv1.layer.1.weight, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer2.0.conv1.layer.1.bias, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer2.0.conv2.layer.1.weight, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer2.0.conv2.layer.1.bias, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer2.0.downsample_conv.layer.1.weight, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer2.0.downsample_conv.layer.1.bias, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer2.1.conv1.layer.1.weight, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer2.1.conv1.layer.1.bias, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer2.1.conv2.layer.1.weight, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer2.1.conv2.layer.1.bias, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer3.0.conv1.layer.1.weight, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer3.0.conv1.layer.1.bias, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer3.0.conv2.layer.1.weight, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer3.0.conv2.layer.1.bias, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer3.0.downsample_conv.layer.1.weight, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer3.0.downsample_conv.layer.1.bias, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer3.1.conv1.layer.1.weight, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer3.1.conv1.layer.1.bias, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer3.1.conv2.layer.1.weight, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer3.1.conv2.layer.1.bias, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer4.0.conv1.layer.1.weight, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer4.0.conv1.layer.1.bias, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer4.0.conv2.layer.1.weight, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer4.0.conv2.layer.1.bias, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer4.0.downsample_conv.layer.1.weight, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer4.0.downsample_conv.layer.1.bias, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer4.1.conv1.layer.1.weight, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer4.1.conv1.layer.1.bias, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer4.1.conv2.layer.1.weight, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer4.1.conv2.layer.1.bias, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: fc.bias, lr: 0.1, weight_decay: 0.0, lr_scale: not setting! +2023-09-10 13:03:52 - name: conv1.layer.0.weight, lr: 0.1, weight_decay: 0.0001, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer1.0.conv1.layer.0.weight, lr: 0.1, weight_decay: 0.0001, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer1.0.conv2.layer.0.weight, lr: 0.1, weight_decay: 0.0001, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer1.1.conv1.layer.0.weight, lr: 0.1, weight_decay: 0.0001, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer1.1.conv2.layer.0.weight, lr: 0.1, weight_decay: 0.0001, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer2.0.conv1.layer.0.weight, lr: 0.1, weight_decay: 0.0001, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer2.0.conv2.layer.0.weight, lr: 0.1, weight_decay: 0.0001, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer2.0.downsample_conv.layer.0.weight, lr: 0.1, weight_decay: 0.0001, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer2.1.conv1.layer.0.weight, lr: 0.1, weight_decay: 0.0001, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer2.1.conv2.layer.0.weight, lr: 0.1, weight_decay: 0.0001, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer3.0.conv1.layer.0.weight, lr: 0.1, weight_decay: 0.0001, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer3.0.conv2.layer.0.weight, lr: 0.1, weight_decay: 0.0001, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer3.0.downsample_conv.layer.0.weight, lr: 0.1, weight_decay: 0.0001, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer3.1.conv1.layer.0.weight, lr: 0.1, weight_decay: 0.0001, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer3.1.conv2.layer.0.weight, lr: 0.1, weight_decay: 0.0001, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer4.0.conv1.layer.0.weight, lr: 0.1, weight_decay: 0.0001, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer4.0.conv2.layer.0.weight, lr: 0.1, weight_decay: 0.0001, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer4.0.downsample_conv.layer.0.weight, lr: 0.1, weight_decay: 0.0001, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer4.1.conv1.layer.0.weight, lr: 0.1, weight_decay: 0.0001, lr_scale: not setting! +2023-09-10 13:03:52 - name: layer4.1.conv2.layer.0.weight, lr: 0.1, weight_decay: 0.0001, lr_scale: not setting! +2023-09-10 13:03:52 - name: fc.weight, lr: 0.1, weight_decay: 0.0001, lr_scale: not setting! +2023-09-10 13:03:52 - using torch version:2.0.0+cu118 +2023-09-10 13:03:52 - this torch version support torch.compile function. +2023-09-10 13:03:52 - epoch 001 lr: 0.100000 +2023-09-10 13:04:39 - train: epoch 0001, iter [00100, 05004], lr: 0.100000, loss: 6.8396 +2023-09-10 13:05:20 - train: epoch 0001, iter [00200, 05004], lr: 0.100000, loss: 6.6867 +2023-09-10 13:05:55 - train: epoch 0001, iter [00300, 05004], lr: 0.100000, loss: 6.4771 +2023-09-10 13:06:30 - train: epoch 0001, iter [00400, 05004], lr: 0.100000, loss: 6.3253 +2023-09-10 13:07:09 - train: epoch 0001, iter [00500, 05004], lr: 0.100000, loss: 6.1631 +2023-09-10 13:07:45 - train: epoch 0001, iter [00600, 05004], lr: 0.100000, loss: 5.7324 +2023-09-10 13:08:22 - train: epoch 0001, iter [00700, 05004], lr: 0.100000, loss: 5.8089 +2023-09-10 13:09:01 - train: epoch 0001, iter [00800, 05004], lr: 0.100000, loss: 5.6633 +2023-09-10 13:09:36 - train: epoch 0001, iter [00900, 05004], lr: 0.100000, loss: 5.5579 +2023-09-10 13:10:12 - train: epoch 0001, iter [01000, 05004], lr: 0.100000, loss: 5.3556 +2023-09-10 13:10:50 - train: epoch 0001, iter [01100, 05004], lr: 0.100000, loss: 5.4030 +2023-09-10 13:11:27 - train: epoch 0001, iter [01200, 05004], lr: 0.100000, loss: 5.1919 +2023-09-10 13:12:01 - train: epoch 0001, iter [01300, 05004], lr: 0.100000, loss: 5.0671 +2023-09-10 13:12:37 - train: epoch 0001, iter [01400, 05004], lr: 0.100000, loss: 5.0707 +2023-09-10 13:13:14 - train: epoch 0001, iter [01500, 05004], lr: 0.100000, loss: 4.7720 +2023-09-10 13:13:51 - train: epoch 0001, iter [01600, 05004], lr: 0.100000, loss: 5.0815 +2023-09-10 13:14:27 - train: epoch 0001, iter [01700, 05004], lr: 0.100000, loss: 4.6818 +2023-09-10 13:15:06 - train: epoch 0001, iter [01800, 05004], lr: 0.100000, loss: 4.8203 +2023-09-10 13:15:41 - train: epoch 0001, iter [01900, 05004], lr: 0.100000, loss: 4.6346 +2023-09-10 13:16:15 - train: epoch 0001, iter [02000, 05004], lr: 0.100000, loss: 4.4740 +2023-09-10 13:16:54 - train: epoch 0001, iter [02100, 05004], lr: 0.100000, loss: 4.5532 +2023-09-10 13:17:29 - train: epoch 0001, iter [02200, 05004], lr: 0.100000, loss: 4.4487 +2023-09-10 13:18:05 - train: epoch 0001, iter [02300, 05004], lr: 0.100000, loss: 4.1446 +2023-09-10 13:18:42 - train: epoch 0001, iter [02400, 05004], lr: 0.100000, loss: 4.2638 +2023-09-10 13:19:20 - train: epoch 0001, iter [02500, 05004], lr: 0.100000, loss: 4.3064 +2023-09-10 13:19:55 - train: epoch 0001, iter [02600, 05004], lr: 0.100000, loss: 4.4744 +2023-09-10 13:20:31 - train: epoch 0001, iter [02700, 05004], lr: 0.100000, loss: 4.2419 +2023-09-10 13:21:09 - train: epoch 0001, iter [02800, 05004], lr: 0.100000, loss: 4.0654 +2023-09-10 13:21:45 - train: epoch 0001, iter [02900, 05004], lr: 0.100000, loss: 4.0371 +2023-09-10 13:22:20 - train: epoch 0001, iter [03000, 05004], lr: 0.100000, loss: 4.1586 +2023-09-10 13:22:58 - train: epoch 0001, iter [03100, 05004], lr: 0.100000, loss: 4.1911 +2023-09-10 13:23:34 - train: epoch 0001, iter [03200, 05004], lr: 0.100000, loss: 3.9603 +2023-09-10 13:24:09 - train: epoch 0001, iter [03300, 05004], lr: 0.100000, loss: 3.7084 +2023-09-10 13:24:47 - train: epoch 0001, iter [03400, 05004], lr: 0.100000, loss: 3.9293 +2023-09-10 13:25:25 - train: epoch 0001, iter [03500, 05004], lr: 0.100000, loss: 3.8576 +2023-09-10 13:26:00 - train: epoch 0001, iter [03600, 05004], lr: 0.100000, loss: 3.8884 +2023-09-10 13:26:38 - train: epoch 0001, iter [03700, 05004], lr: 0.100000, loss: 3.9073 +2023-09-10 13:27:15 - train: epoch 0001, iter [03800, 05004], lr: 0.100000, loss: 3.6313 +2023-09-10 13:27:49 - train: epoch 0001, iter [03900, 05004], lr: 0.100000, loss: 3.6930 +2023-09-10 13:28:26 - train: epoch 0001, iter [04000, 05004], lr: 0.100000, loss: 3.6740 +2023-09-10 13:29:06 - train: epoch 0001, iter [04100, 05004], lr: 0.100000, loss: 3.8830 +2023-09-10 13:29:41 - train: epoch 0001, iter [04200, 05004], lr: 0.100000, loss: 3.6041 +2023-09-10 13:30:18 - train: epoch 0001, iter [04300, 05004], lr: 0.100000, loss: 3.6731 +2023-09-10 13:30:55 - train: epoch 0001, iter [04400, 05004], lr: 0.100000, loss: 3.4271 +2023-09-10 13:31:30 - train: epoch 0001, iter [04500, 05004], lr: 0.100000, loss: 3.6142 +2023-09-10 13:32:06 - train: epoch 0001, iter [04600, 05004], lr: 0.100000, loss: 3.7867 +2023-09-10 13:32:45 - train: epoch 0001, iter [04700, 05004], lr: 0.100000, loss: 3.6151 +2023-09-10 13:33:18 - train: epoch 0001, iter [04800, 05004], lr: 0.100000, loss: 3.6295 +2023-09-10 13:33:56 - train: epoch 0001, iter [04900, 05004], lr: 0.100000, loss: 3.4318 +2023-09-10 13:34:30 - train: epoch 0001, iter [05000, 05004], lr: 0.100000, loss: 3.3651 +2023-09-10 13:34:30 - train: epoch 001, train_loss: 4.5371 +2023-09-10 13:35:53 - eval: epoch: 001, acc1: 28.782%, acc5: 53.992%, test_loss: 3.4079, per_image_load_time: 1.352ms, per_image_inference_time: 0.289ms +2023-09-10 13:35:53 - until epoch: 001, best_acc1: 28.782% +2023-09-10 13:35:53 - epoch 002 lr: 0.100000 +2023-09-10 13:36:38 - train: epoch 0002, iter [00100, 05004], lr: 0.100000, loss: 3.4767 +2023-09-10 13:37:14 - train: epoch 0002, iter [00200, 05004], lr: 0.100000, loss: 3.2297 +2023-09-10 13:37:49 - train: epoch 0002, iter [00300, 05004], lr: 0.100000, loss: 3.4643 +2023-09-10 13:38:27 - train: epoch 0002, iter [00400, 05004], lr: 0.100000, loss: 3.7648 +2023-09-10 13:39:01 - train: epoch 0002, iter [00500, 05004], lr: 0.100000, loss: 3.2184 +2023-09-10 13:39:36 - train: epoch 0002, iter [00600, 05004], lr: 0.100000, loss: 3.2586 +2023-09-10 13:40:14 - train: epoch 0002, iter [00700, 05004], lr: 0.100000, loss: 3.4815 +2023-09-10 13:40:49 - train: epoch 0002, iter [00800, 05004], lr: 0.100000, loss: 3.1947 +2023-09-10 13:41:25 - train: epoch 0002, iter [00900, 05004], lr: 0.100000, loss: 3.1713 +2023-09-10 13:42:00 - train: epoch 0002, iter [01000, 05004], lr: 0.100000, loss: 3.3200 +2023-09-10 13:42:36 - train: epoch 0002, iter [01100, 05004], lr: 0.100000, loss: 3.2249 +2023-09-10 13:43:14 - train: epoch 0002, iter [01200, 05004], lr: 0.100000, loss: 3.3837 +2023-09-10 13:43:48 - train: epoch 0002, iter [01300, 05004], lr: 0.100000, loss: 3.2134 +2023-09-10 13:44:25 - train: epoch 0002, iter [01400, 05004], lr: 0.100000, loss: 3.3950 +2023-09-10 13:45:01 - train: epoch 0002, iter [01500, 05004], lr: 0.100000, loss: 3.3494 +2023-09-10 13:45:36 - train: epoch 0002, iter [01600, 05004], lr: 0.100000, loss: 3.1704 +2023-09-10 13:46:11 - train: epoch 0002, iter [01700, 05004], lr: 0.100000, loss: 3.2359 +2023-09-10 13:46:49 - train: epoch 0002, iter [01800, 05004], lr: 0.100000, loss: 3.2341 +2023-09-10 13:47:23 - train: epoch 0002, iter [01900, 05004], lr: 0.100000, loss: 3.1737 +2023-09-10 13:47:59 - train: epoch 0002, iter [02000, 05004], lr: 0.100000, loss: 2.9579 +2023-09-10 13:48:35 - train: epoch 0002, iter [02100, 05004], lr: 0.100000, loss: 3.3989 +2023-09-10 13:49:11 - train: epoch 0002, iter [02200, 05004], lr: 0.100000, loss: 3.0869 +2023-09-10 13:49:48 - train: epoch 0002, iter [02300, 05004], lr: 0.100000, loss: 3.2520 +2023-09-10 13:50:23 - train: epoch 0002, iter [02400, 05004], lr: 0.100000, loss: 3.1377 +2023-09-10 13:50:57 - train: epoch 0002, iter [02500, 05004], lr: 0.100000, loss: 2.9957 +2023-09-10 13:51:35 - train: epoch 0002, iter [02600, 05004], lr: 0.100000, loss: 3.0439 +2023-09-10 13:52:09 - train: epoch 0002, iter [02700, 05004], lr: 0.100000, loss: 3.2430 +2023-09-10 13:52:44 - train: epoch 0002, iter [02800, 05004], lr: 0.100000, loss: 3.2384 +2023-09-10 13:53:21 - train: epoch 0002, iter [02900, 05004], lr: 0.100000, loss: 3.1013 +2023-09-10 13:53:57 - train: epoch 0002, iter [03000, 05004], lr: 0.100000, loss: 3.1357 +2023-09-10 13:54:32 - train: epoch 0002, iter [03100, 05004], lr: 0.100000, loss: 2.8581 +2023-09-10 13:55:10 - train: epoch 0002, iter [03200, 05004], lr: 0.100000, loss: 2.8836 +2023-09-10 13:55:44 - train: epoch 0002, iter [03300, 05004], lr: 0.100000, loss: 3.1406 +2023-09-10 13:56:19 - train: epoch 0002, iter [03400, 05004], lr: 0.100000, loss: 3.0703 +2023-09-10 13:56:56 - train: epoch 0002, iter [03500, 05004], lr: 0.100000, loss: 2.9752 +2023-09-10 13:57:32 - train: epoch 0002, iter [03600, 05004], lr: 0.100000, loss: 3.1480 +2023-09-10 13:58:06 - train: epoch 0002, iter [03700, 05004], lr: 0.100000, loss: 3.1622 +2023-09-10 13:58:43 - train: epoch 0002, iter [03800, 05004], lr: 0.100000, loss: 2.8442 +2023-09-10 13:59:19 - train: epoch 0002, iter [03900, 05004], lr: 0.100000, loss: 3.0705 +2023-09-10 13:59:54 - train: epoch 0002, iter [04000, 05004], lr: 0.100000, loss: 2.8338 +2023-09-10 14:00:31 - train: epoch 0002, iter [04100, 05004], lr: 0.100000, loss: 3.2130 +2023-09-10 14:01:09 - train: epoch 0002, iter [04200, 05004], lr: 0.100000, loss: 3.0423 +2023-09-10 14:01:44 - train: epoch 0002, iter [04300, 05004], lr: 0.100000, loss: 2.9898 +2023-09-10 14:02:18 - train: epoch 0002, iter [04400, 05004], lr: 0.100000, loss: 2.8656 +2023-09-10 14:02:56 - train: epoch 0002, iter [04500, 05004], lr: 0.100000, loss: 2.8824 +2023-09-10 14:03:30 - train: epoch 0002, iter [04600, 05004], lr: 0.100000, loss: 2.9897 +2023-09-10 14:04:05 - train: epoch 0002, iter [04700, 05004], lr: 0.100000, loss: 2.9565 +2023-09-10 14:04:44 - train: epoch 0002, iter [04800, 05004], lr: 0.100000, loss: 3.0500 +2023-09-10 14:05:19 - train: epoch 0002, iter [04900, 05004], lr: 0.100000, loss: 2.8672 +2023-09-10 14:05:52 - train: epoch 0002, iter [05000, 05004], lr: 0.100000, loss: 2.9895 +2023-09-10 14:05:54 - train: epoch 002, train_loss: 3.1251 +2023-09-10 14:07:17 - eval: epoch: 002, acc1: 40.400%, acc5: 67.150%, test_loss: 2.6651, per_image_load_time: 1.374ms, per_image_inference_time: 0.282ms +2023-09-10 14:07:18 - until epoch: 002, best_acc1: 40.400% +2023-09-10 14:07:18 - epoch 003 lr: 0.100000 +2023-09-10 14:08:00 - train: epoch 0003, iter [00100, 05004], lr: 0.100000, loss: 2.9494 +2023-09-10 14:08:38 - train: epoch 0003, iter [00200, 05004], lr: 0.100000, loss: 2.8452 +2023-09-10 14:09:13 - train: epoch 0003, iter [00300, 05004], lr: 0.100000, loss: 2.9655 +2023-09-10 14:09:53 - train: epoch 0003, iter [00400, 05004], lr: 0.100000, loss: 2.9407 +2023-09-10 14:10:29 - train: epoch 0003, iter [00500, 05004], lr: 0.100000, loss: 3.0130 +2023-09-10 14:11:05 - train: epoch 0003, iter [00600, 05004], lr: 0.100000, loss: 2.6720 +2023-09-10 14:11:44 - train: epoch 0003, iter [00700, 05004], lr: 0.100000, loss: 2.8583 +2023-09-10 14:12:19 - train: epoch 0003, iter [00800, 05004], lr: 0.100000, loss: 3.0129 +2023-09-10 14:12:55 - train: epoch 0003, iter [00900, 05004], lr: 0.100000, loss: 2.6826 +2023-09-10 14:13:33 - train: epoch 0003, iter [01000, 05004], lr: 0.100000, loss: 2.9025 +2023-09-10 14:14:10 - train: epoch 0003, iter [01100, 05004], lr: 0.100000, loss: 2.8495 +2023-09-10 14:14:43 - train: epoch 0003, iter [01200, 05004], lr: 0.100000, loss: 2.8829 +2023-09-10 14:15:21 - train: epoch 0003, iter [01300, 05004], lr: 0.100000, loss: 2.7405 +2023-09-10 14:15:58 - train: epoch 0003, iter [01400, 05004], lr: 0.100000, loss: 2.7222 +2023-09-10 14:16:33 - train: epoch 0003, iter [01500, 05004], lr: 0.100000, loss: 3.0222 +2023-09-10 14:17:08 - train: epoch 0003, iter [01600, 05004], lr: 0.100000, loss: 2.7435 +2023-09-10 14:17:47 - train: epoch 0003, iter [01700, 05004], lr: 0.100000, loss: 2.9219 +2023-09-10 14:18:22 - train: epoch 0003, iter [01800, 05004], lr: 0.100000, loss: 2.9025 +2023-09-10 14:18:58 - train: epoch 0003, iter [01900, 05004], lr: 0.100000, loss: 2.9395 +2023-09-10 14:19:36 - train: epoch 0003, iter [02000, 05004], lr: 0.100000, loss: 2.7097 +2023-09-10 14:20:11 - train: epoch 0003, iter [02100, 05004], lr: 0.100000, loss: 2.8676 +2023-09-10 14:20:47 - train: epoch 0003, iter [02200, 05004], lr: 0.100000, loss: 3.2387 +2023-09-10 14:21:25 - train: epoch 0003, iter [02300, 05004], lr: 0.100000, loss: 2.8133 +2023-09-10 14:22:00 - train: epoch 0003, iter [02400, 05004], lr: 0.100000, loss: 2.7212 +2023-09-10 14:22:36 - train: epoch 0003, iter [02500, 05004], lr: 0.100000, loss: 2.8393 +2023-09-10 14:23:14 - train: epoch 0003, iter [02600, 05004], lr: 0.100000, loss: 2.7010 +2023-09-10 14:23:50 - train: epoch 0003, iter [02700, 05004], lr: 0.100000, loss: 3.1034 +2023-09-10 14:24:24 - train: epoch 0003, iter [02800, 05004], lr: 0.100000, loss: 2.9464 +2023-09-10 14:25:01 - train: epoch 0003, iter [02900, 05004], lr: 0.100000, loss: 2.8296 +2023-09-10 14:25:37 - train: epoch 0003, iter [03000, 05004], lr: 0.100000, loss: 2.9070 +2023-09-10 14:26:12 - train: epoch 0003, iter [03100, 05004], lr: 0.100000, loss: 3.0577 +2023-09-10 14:26:50 - train: epoch 0003, iter [03200, 05004], lr: 0.100000, loss: 2.9689 +2023-09-10 14:27:26 - train: epoch 0003, iter [03300, 05004], lr: 0.100000, loss: 2.9025 +2023-09-10 14:28:01 - train: epoch 0003, iter [03400, 05004], lr: 0.100000, loss: 2.9823 +2023-09-10 14:28:39 - train: epoch 0003, iter [03500, 05004], lr: 0.100000, loss: 2.6014 +2023-09-10 14:29:15 - train: epoch 0003, iter [03600, 05004], lr: 0.100000, loss: 2.7094 +2023-09-10 14:29:50 - train: epoch 0003, iter [03700, 05004], lr: 0.100000, loss: 2.9373 +2023-09-10 14:30:28 - train: epoch 0003, iter [03800, 05004], lr: 0.100000, loss: 2.9779 +2023-09-10 14:31:04 - train: epoch 0003, iter [03900, 05004], lr: 0.100000, loss: 3.1091 +2023-09-10 14:31:40 - train: epoch 0003, iter [04000, 05004], lr: 0.100000, loss: 2.5649 +2023-09-10 14:32:17 - train: epoch 0003, iter [04100, 05004], lr: 0.100000, loss: 2.7675 +2023-09-10 14:32:51 - train: epoch 0003, iter [04200, 05004], lr: 0.100000, loss: 2.8928 +2023-09-10 14:33:27 - train: epoch 0003, iter [04300, 05004], lr: 0.100000, loss: 2.5458 +2023-09-10 14:34:05 - train: epoch 0003, iter [04400, 05004], lr: 0.100000, loss: 2.6472 +2023-09-10 14:34:41 - train: epoch 0003, iter [04500, 05004], lr: 0.100000, loss: 2.7758 +2023-09-10 14:35:15 - train: epoch 0003, iter [04600, 05004], lr: 0.100000, loss: 2.6959 +2023-09-10 14:35:53 - train: epoch 0003, iter [04700, 05004], lr: 0.100000, loss: 2.9056 +2023-09-10 14:36:28 - train: epoch 0003, iter [04800, 05004], lr: 0.100000, loss: 2.9065 +2023-09-10 14:37:04 - train: epoch 0003, iter [04900, 05004], lr: 0.100000, loss: 2.8535 +2023-09-10 14:37:38 - train: epoch 0003, iter [05000, 05004], lr: 0.100000, loss: 2.8631 +2023-09-10 14:37:39 - train: epoch 003, train_loss: 2.8067 +2023-09-10 14:39:02 - eval: epoch: 003, acc1: 42.926%, acc5: 69.176%, test_loss: 2.5603, per_image_load_time: 1.371ms, per_image_inference_time: 0.277ms +2023-09-10 14:39:02 - until epoch: 003, best_acc1: 42.926% +2023-09-10 14:39:02 - epoch 004 lr: 0.100000 +2023-09-10 14:39:45 - train: epoch 0004, iter [00100, 05004], lr: 0.100000, loss: 2.6953 +2023-09-10 14:40:21 - train: epoch 0004, iter [00200, 05004], lr: 0.100000, loss: 2.4838 +2023-09-10 14:40:56 - train: epoch 0004, iter [00300, 05004], lr: 0.100000, loss: 2.6695 +2023-09-10 14:41:33 - train: epoch 0004, iter [00400, 05004], lr: 0.100000, loss: 2.7355 +2023-09-10 14:42:09 - train: epoch 0004, iter [00500, 05004], lr: 0.100000, loss: 2.4817 +2023-09-10 14:42:44 - train: epoch 0004, iter [00600, 05004], lr: 0.100000, loss: 2.8909 +2023-09-10 14:43:19 - train: epoch 0004, iter [00700, 05004], lr: 0.100000, loss: 2.6965 +2023-09-10 14:43:56 - train: epoch 0004, iter [00800, 05004], lr: 0.100000, loss: 2.6306 +2023-09-10 14:44:31 - train: epoch 0004, iter [00900, 05004], lr: 0.100000, loss: 2.4379 +2023-09-10 14:45:09 - train: epoch 0004, iter [01000, 05004], lr: 0.100000, loss: 2.6739 +2023-09-10 14:45:44 - train: epoch 0004, iter [01100, 05004], lr: 0.100000, loss: 2.9176 +2023-09-10 14:46:19 - train: epoch 0004, iter [01200, 05004], lr: 0.100000, loss: 2.4550 +2023-09-10 14:46:56 - train: epoch 0004, iter [01300, 05004], lr: 0.100000, loss: 2.5684 +2023-09-10 14:47:31 - train: epoch 0004, iter [01400, 05004], lr: 0.100000, loss: 2.5537 +2023-09-10 14:48:07 - train: epoch 0004, iter [01500, 05004], lr: 0.100000, loss: 2.6345 +2023-09-10 14:48:43 - train: epoch 0004, iter [01600, 05004], lr: 0.100000, loss: 2.7555 +2023-09-10 14:49:20 - train: epoch 0004, iter [01700, 05004], lr: 0.100000, loss: 2.8297 +2023-09-10 14:49:54 - train: epoch 0004, iter [01800, 05004], lr: 0.100000, loss: 2.7384 +2023-09-10 14:50:29 - train: epoch 0004, iter [01900, 05004], lr: 0.100000, loss: 2.6921 +2023-09-10 14:51:06 - train: epoch 0004, iter [02000, 05004], lr: 0.100000, loss: 2.4988 +2023-09-10 14:51:42 - train: epoch 0004, iter [02100, 05004], lr: 0.100000, loss: 2.6783 +2023-09-10 14:52:16 - train: epoch 0004, iter [02200, 05004], lr: 0.100000, loss: 2.5510 +2023-09-10 14:52:53 - train: epoch 0004, iter [02300, 05004], lr: 0.100000, loss: 2.3022 +2023-09-10 14:53:28 - train: epoch 0004, iter [02400, 05004], lr: 0.100000, loss: 2.3642 +2023-09-10 14:54:03 - train: epoch 0004, iter [02500, 05004], lr: 0.100000, loss: 2.3855 +2023-09-10 14:54:41 - train: epoch 0004, iter [02600, 05004], lr: 0.100000, loss: 2.7039 +2023-09-10 14:55:14 - train: epoch 0004, iter [02700, 05004], lr: 0.100000, loss: 2.6296 +2023-09-10 14:55:52 - train: epoch 0004, iter [02800, 05004], lr: 0.100000, loss: 2.9072 +2023-09-10 14:56:27 - train: epoch 0004, iter [02900, 05004], lr: 0.100000, loss: 2.5772 +2023-09-10 14:57:03 - train: epoch 0004, iter [03000, 05004], lr: 0.100000, loss: 2.6543 +2023-09-10 14:57:38 - train: epoch 0004, iter [03100, 05004], lr: 0.100000, loss: 2.4650 +2023-09-10 14:58:15 - train: epoch 0004, iter [03200, 05004], lr: 0.100000, loss: 2.6827 +2023-09-10 14:58:50 - train: epoch 0004, iter [03300, 05004], lr: 0.100000, loss: 2.6228 +2023-09-10 14:59:25 - train: epoch 0004, iter [03400, 05004], lr: 0.100000, loss: 2.6185 +2023-09-10 15:00:02 - train: epoch 0004, iter [03500, 05004], lr: 0.100000, loss: 2.5835 +2023-09-10 15:00:37 - train: epoch 0004, iter [03600, 05004], lr: 0.100000, loss: 2.6128 +2023-09-10 15:01:13 - train: epoch 0004, iter [03700, 05004], lr: 0.100000, loss: 2.6906 +2023-09-10 15:01:51 - train: epoch 0004, iter [03800, 05004], lr: 0.100000, loss: 2.3922 +2023-09-10 15:02:27 - train: epoch 0004, iter [03900, 05004], lr: 0.100000, loss: 2.3353 +2023-09-10 15:03:01 - train: epoch 0004, iter [04000, 05004], lr: 0.100000, loss: 2.5205 +2023-09-10 15:03:39 - train: epoch 0004, iter [04100, 05004], lr: 0.100000, loss: 2.4728 +2023-09-10 15:04:14 - train: epoch 0004, iter [04200, 05004], lr: 0.100000, loss: 2.4844 +2023-09-10 15:04:49 - train: epoch 0004, iter [04300, 05004], lr: 0.100000, loss: 2.4719 +2023-09-10 15:05:26 - train: epoch 0004, iter [04400, 05004], lr: 0.100000, loss: 2.5688 +2023-09-10 15:06:02 - train: epoch 0004, iter [04500, 05004], lr: 0.100000, loss: 2.1131 +2023-09-10 15:06:38 - train: epoch 0004, iter [04600, 05004], lr: 0.100000, loss: 2.4937 +2023-09-10 15:07:16 - train: epoch 0004, iter [04700, 05004], lr: 0.100000, loss: 2.4556 +2023-09-10 15:07:52 - train: epoch 0004, iter [04800, 05004], lr: 0.100000, loss: 2.2790 +2023-09-10 15:08:26 - train: epoch 0004, iter [04900, 05004], lr: 0.100000, loss: 2.6044 +2023-09-10 15:09:00 - train: epoch 0004, iter [05000, 05004], lr: 0.100000, loss: 2.6859 +2023-09-10 15:09:01 - train: epoch 004, train_loss: 2.6614 +2023-09-10 15:10:23 - eval: epoch: 004, acc1: 43.890%, acc5: 70.558%, test_loss: 2.4772, per_image_load_time: 1.323ms, per_image_inference_time: 0.300ms +2023-09-10 15:10:23 - until epoch: 004, best_acc1: 43.890% +2023-09-10 15:10:23 - epoch 005 lr: 0.100000 +2023-09-10 15:11:08 - train: epoch 0005, iter [00100, 05004], lr: 0.100000, loss: 2.4106 +2023-09-10 15:11:44 - train: epoch 0005, iter [00200, 05004], lr: 0.100000, loss: 2.5905 +2023-09-10 15:12:18 - train: epoch 0005, iter [00300, 05004], lr: 0.100000, loss: 2.7093 +2023-09-10 15:12:57 - train: epoch 0005, iter [00400, 05004], lr: 0.100000, loss: 2.6030 +2023-09-10 15:13:32 - train: epoch 0005, iter [00500, 05004], lr: 0.100000, loss: 2.5061 +2023-09-10 15:14:07 - train: epoch 0005, iter [00600, 05004], lr: 0.100000, loss: 2.6596 +2023-09-10 15:14:44 - train: epoch 0005, iter [00700, 05004], lr: 0.100000, loss: 2.4312 +2023-09-10 15:15:20 - train: epoch 0005, iter [00800, 05004], lr: 0.100000, loss: 2.7375 +2023-09-10 15:15:54 - train: epoch 0005, iter [00900, 05004], lr: 0.100000, loss: 2.5633 +2023-09-10 15:16:31 - train: epoch 0005, iter [01000, 05004], lr: 0.100000, loss: 2.6940 +2023-09-10 15:17:07 - train: epoch 0005, iter [01100, 05004], lr: 0.100000, loss: 2.7762 +2023-09-10 15:17:43 - train: epoch 0005, iter [01200, 05004], lr: 0.100000, loss: 2.7016 +2023-09-10 15:18:21 - train: epoch 0005, iter [01300, 05004], lr: 0.100000, loss: 2.4078 +2023-09-10 15:18:55 - train: epoch 0005, iter [01400, 05004], lr: 0.100000, loss: 2.5146 +2023-09-10 15:19:30 - train: epoch 0005, iter [01500, 05004], lr: 0.100000, loss: 2.4056 +2023-09-10 15:20:08 - train: epoch 0005, iter [01600, 05004], lr: 0.100000, loss: 2.3914 +2023-09-10 15:20:43 - train: epoch 0005, iter [01700, 05004], lr: 0.100000, loss: 2.6494 +2023-09-10 15:21:18 - train: epoch 0005, iter [01800, 05004], lr: 0.100000, loss: 2.5107 +2023-09-10 15:21:55 - train: epoch 0005, iter [01900, 05004], lr: 0.100000, loss: 2.4584 +2023-09-10 15:22:31 - train: epoch 0005, iter [02000, 05004], lr: 0.100000, loss: 2.6748 +2023-09-10 15:23:06 - train: epoch 0005, iter [02100, 05004], lr: 0.100000, loss: 2.5244 +2023-09-10 15:23:44 - train: epoch 0005, iter [02200, 05004], lr: 0.100000, loss: 2.5936 +2023-09-10 15:24:19 - train: epoch 0005, iter [02300, 05004], lr: 0.100000, loss: 2.5088 +2023-09-10 15:24:55 - train: epoch 0005, iter [02400, 05004], lr: 0.100000, loss: 2.8601 +2023-09-10 15:25:31 - train: epoch 0005, iter [02500, 05004], lr: 0.100000, loss: 2.4635 +2023-09-10 15:26:06 - train: epoch 0005, iter [02600, 05004], lr: 0.100000, loss: 2.6972 +2023-09-10 15:26:44 - train: epoch 0005, iter [02700, 05004], lr: 0.100000, loss: 2.5870 +2023-09-10 15:27:19 - train: epoch 0005, iter [02800, 05004], lr: 0.100000, loss: 2.6695 +2023-09-10 15:27:53 - train: epoch 0005, iter [02900, 05004], lr: 0.100000, loss: 2.3349 +2023-09-10 15:28:30 - train: epoch 0005, iter [03000, 05004], lr: 0.100000, loss: 2.5473 +2023-09-10 15:29:06 - train: epoch 0005, iter [03100, 05004], lr: 0.100000, loss: 2.4879 +2023-09-10 15:29:41 - train: epoch 0005, iter [03200, 05004], lr: 0.100000, loss: 2.6710 +2023-09-10 15:30:19 - train: epoch 0005, iter [03300, 05004], lr: 0.100000, loss: 2.6855 +2023-09-10 15:30:55 - train: epoch 0005, iter [03400, 05004], lr: 0.100000, loss: 2.4323 +2023-09-10 15:31:30 - train: epoch 0005, iter [03500, 05004], lr: 0.100000, loss: 2.6285 +2023-09-10 15:32:07 - train: epoch 0005, iter [03600, 05004], lr: 0.100000, loss: 2.5587 +2023-09-10 15:32:42 - train: epoch 0005, iter [03700, 05004], lr: 0.100000, loss: 2.2974 +2023-09-10 15:33:17 - train: epoch 0005, iter [03800, 05004], lr: 0.100000, loss: 2.5157 +2023-09-10 15:33:55 - train: epoch 0005, iter [03900, 05004], lr: 0.100000, loss: 2.6539 +2023-09-10 15:34:30 - train: epoch 0005, iter [04000, 05004], lr: 0.100000, loss: 2.4068 +2023-09-10 15:35:05 - train: epoch 0005, iter [04100, 05004], lr: 0.100000, loss: 2.6005 +2023-09-10 15:35:43 - train: epoch 0005, iter [04200, 05004], lr: 0.100000, loss: 2.5974 +2023-09-10 15:36:18 - train: epoch 0005, iter [04300, 05004], lr: 0.100000, loss: 2.6146 +2023-09-10 15:36:54 - train: epoch 0005, iter [04400, 05004], lr: 0.100000, loss: 2.6870 +2023-09-10 15:37:32 - train: epoch 0005, iter [04500, 05004], lr: 0.100000, loss: 2.5711 +2023-09-10 15:38:07 - train: epoch 0005, iter [04600, 05004], lr: 0.100000, loss: 2.4746 +2023-09-10 15:38:40 - train: epoch 0005, iter [04700, 05004], lr: 0.100000, loss: 2.4081 +2023-09-10 15:39:18 - train: epoch 0005, iter [04800, 05004], lr: 0.100000, loss: 2.3775 +2023-09-10 15:39:54 - train: epoch 0005, iter [04900, 05004], lr: 0.100000, loss: 2.7069 +2023-09-10 15:40:27 - train: epoch 0005, iter [05000, 05004], lr: 0.100000, loss: 2.3609 +2023-09-10 15:40:28 - train: epoch 005, train_loss: 2.5783 +2023-09-10 15:41:50 - eval: epoch: 005, acc1: 46.524%, acc5: 72.460%, test_loss: 2.3468, per_image_load_time: 1.327ms, per_image_inference_time: 0.294ms +2023-09-10 15:41:50 - until epoch: 005, best_acc1: 46.524% +2023-09-10 15:41:50 - epoch 006 lr: 0.100000 +2023-09-10 15:42:35 - train: epoch 0006, iter [00100, 05004], lr: 0.100000, loss: 2.5602 +2023-09-10 15:43:11 - train: epoch 0006, iter [00200, 05004], lr: 0.100000, loss: 2.7236 +2023-09-10 15:43:47 - train: epoch 0006, iter [00300, 05004], lr: 0.100000, loss: 2.3072 +2023-09-10 15:44:24 - train: epoch 0006, iter [00400, 05004], lr: 0.100000, loss: 2.5152 +2023-09-10 15:45:01 - train: epoch 0006, iter [00500, 05004], lr: 0.100000, loss: 2.3604 +2023-09-10 15:45:35 - train: epoch 0006, iter [00600, 05004], lr: 0.100000, loss: 2.5412 +2023-09-10 15:46:14 - train: epoch 0006, iter [00700, 05004], lr: 0.100000, loss: 2.6862 +2023-09-10 15:46:50 - train: epoch 0006, iter [00800, 05004], lr: 0.100000, loss: 2.6090 +2023-09-10 15:47:27 - train: epoch 0006, iter [00900, 05004], lr: 0.100000, loss: 2.4673 +2023-09-10 15:48:02 - train: epoch 0006, iter [01000, 05004], lr: 0.100000, loss: 2.3673 +2023-09-10 15:48:39 - train: epoch 0006, iter [01100, 05004], lr: 0.100000, loss: 2.6044 +2023-09-10 15:49:16 - train: epoch 0006, iter [01200, 05004], lr: 0.100000, loss: 2.5036 +2023-09-10 15:49:52 - train: epoch 0006, iter [01300, 05004], lr: 0.100000, loss: 2.4251 +2023-09-10 15:50:29 - train: epoch 0006, iter [01400, 05004], lr: 0.100000, loss: 2.4162 +2023-09-10 15:51:05 - train: epoch 0006, iter [01500, 05004], lr: 0.100000, loss: 2.7829 +2023-09-10 15:51:42 - train: epoch 0006, iter [01600, 05004], lr: 0.100000, loss: 2.1988 +2023-09-10 15:52:18 - train: epoch 0006, iter [01700, 05004], lr: 0.100000, loss: 2.4139 +2023-09-10 15:52:56 - train: epoch 0006, iter [01800, 05004], lr: 0.100000, loss: 2.5050 +2023-09-10 15:53:30 - train: epoch 0006, iter [01900, 05004], lr: 0.100000, loss: 2.4934 +2023-09-10 15:54:06 - train: epoch 0006, iter [02000, 05004], lr: 0.100000, loss: 2.6659 +2023-09-10 15:54:43 - train: epoch 0006, iter [02100, 05004], lr: 0.100000, loss: 2.4148 +2023-09-10 15:55:17 - train: epoch 0006, iter [02200, 05004], lr: 0.100000, loss: 2.4920 +2023-09-10 15:55:52 - train: epoch 0006, iter [02300, 05004], lr: 0.100000, loss: 2.4030 +2023-09-10 15:56:30 - train: epoch 0006, iter [02400, 05004], lr: 0.100000, loss: 2.3987 +2023-09-10 15:57:04 - train: epoch 0006, iter [02500, 05004], lr: 0.100000, loss: 2.4720 +2023-09-10 15:57:40 - train: epoch 0006, iter [02600, 05004], lr: 0.100000, loss: 2.4472 +2023-09-10 15:58:18 - train: epoch 0006, iter [02700, 05004], lr: 0.100000, loss: 2.7492 +2023-09-10 15:58:53 - train: epoch 0006, iter [02800, 05004], lr: 0.100000, loss: 2.3303 +2023-09-10 15:59:28 - train: epoch 0006, iter [02900, 05004], lr: 0.100000, loss: 2.7689 +2023-09-10 16:00:06 - train: epoch 0006, iter [03000, 05004], lr: 0.100000, loss: 2.5408 +2023-09-10 16:00:42 - train: epoch 0006, iter [03100, 05004], lr: 0.100000, loss: 2.4556 +2023-09-10 16:01:17 - train: epoch 0006, iter [03200, 05004], lr: 0.100000, loss: 2.5012 +2023-09-10 16:01:54 - train: epoch 0006, iter [03300, 05004], lr: 0.100000, loss: 2.6740 +2023-09-10 16:02:31 - train: epoch 0006, iter [03400, 05004], lr: 0.100000, loss: 2.6966 +2023-09-10 16:03:07 - train: epoch 0006, iter [03500, 05004], lr: 0.100000, loss: 2.6377 +2023-09-10 16:03:42 - train: epoch 0006, iter [03600, 05004], lr: 0.100000, loss: 2.5741 +2023-09-10 16:04:19 - train: epoch 0006, iter [03700, 05004], lr: 0.100000, loss: 2.5497 +2023-09-10 16:04:55 - train: epoch 0006, iter [03800, 05004], lr: 0.100000, loss: 2.2034 +2023-09-10 16:05:31 - train: epoch 0006, iter [03900, 05004], lr: 0.100000, loss: 2.6578 +2023-09-10 16:06:08 - train: epoch 0006, iter [04000, 05004], lr: 0.100000, loss: 2.5714 +2023-09-10 16:06:44 - train: epoch 0006, iter [04100, 05004], lr: 0.100000, loss: 2.5476 +2023-09-10 16:07:18 - train: epoch 0006, iter [04200, 05004], lr: 0.100000, loss: 2.4392 +2023-09-10 16:07:56 - train: epoch 0006, iter [04300, 05004], lr: 0.100000, loss: 2.5569 +2023-09-10 16:08:31 - train: epoch 0006, iter [04400, 05004], lr: 0.100000, loss: 2.4611 +2023-09-10 16:09:07 - train: epoch 0006, iter [04500, 05004], lr: 0.100000, loss: 2.6488 +2023-09-10 16:09:45 - train: epoch 0006, iter [04600, 05004], lr: 0.100000, loss: 2.5608 +2023-09-10 16:10:19 - train: epoch 0006, iter [04700, 05004], lr: 0.100000, loss: 2.3810 +2023-09-10 16:10:56 - train: epoch 0006, iter [04800, 05004], lr: 0.100000, loss: 2.5272 +2023-09-10 16:11:32 - train: epoch 0006, iter [04900, 05004], lr: 0.100000, loss: 2.5388 +2023-09-10 16:12:05 - train: epoch 0006, iter [05000, 05004], lr: 0.100000, loss: 2.4446 +2023-09-10 16:12:06 - train: epoch 006, train_loss: 2.5231 +2023-09-10 16:13:30 - eval: epoch: 006, acc1: 46.146%, acc5: 72.280%, test_loss: 2.3809, per_image_load_time: 1.371ms, per_image_inference_time: 0.295ms +2023-09-10 16:13:30 - until epoch: 006, best_acc1: 46.524% +2023-09-10 16:13:30 - epoch 007 lr: 0.100000 +2023-09-10 16:14:13 - train: epoch 0007, iter [00100, 05004], lr: 0.100000, loss: 2.3395 +2023-09-10 16:14:50 - train: epoch 0007, iter [00200, 05004], lr: 0.100000, loss: 2.6072 +2023-09-10 16:15:27 - train: epoch 0007, iter [00300, 05004], lr: 0.100000, loss: 2.6680 +2023-09-10 16:16:05 - train: epoch 0007, iter [00400, 05004], lr: 0.100000, loss: 2.3744 +2023-09-10 16:16:40 - train: epoch 0007, iter [00500, 05004], lr: 0.100000, loss: 2.1604 +2023-09-10 16:17:16 - train: epoch 0007, iter [00600, 05004], lr: 0.100000, loss: 2.4631 +2023-09-10 16:17:54 - train: epoch 0007, iter [00700, 05004], lr: 0.100000, loss: 2.2739 +2023-09-10 16:18:29 - train: epoch 0007, iter [00800, 05004], lr: 0.100000, loss: 2.5719 +2023-09-10 16:19:05 - train: epoch 0007, iter [00900, 05004], lr: 0.100000, loss: 2.5408 +2023-09-10 16:19:43 - train: epoch 0007, iter [01000, 05004], lr: 0.100000, loss: 2.3248 +2023-09-10 16:20:17 - train: epoch 0007, iter [01100, 05004], lr: 0.100000, loss: 2.5948 +2023-09-10 16:20:53 - train: epoch 0007, iter [01200, 05004], lr: 0.100000, loss: 2.4004 +2023-09-10 16:21:31 - train: epoch 0007, iter [01300, 05004], lr: 0.100000, loss: 2.4434 +2023-09-10 16:22:06 - train: epoch 0007, iter [01400, 05004], lr: 0.100000, loss: 2.4206 +2023-09-10 16:22:42 - train: epoch 0007, iter [01500, 05004], lr: 0.100000, loss: 2.5791 +2023-09-10 16:23:20 - train: epoch 0007, iter [01600, 05004], lr: 0.100000, loss: 2.4737 +2023-09-10 16:23:56 - train: epoch 0007, iter [01700, 05004], lr: 0.100000, loss: 2.6944 +2023-09-10 16:24:33 - train: epoch 0007, iter [01800, 05004], lr: 0.100000, loss: 2.6057 +2023-09-10 16:25:09 - train: epoch 0007, iter [01900, 05004], lr: 0.100000, loss: 2.7110 +2023-09-10 16:25:45 - train: epoch 0007, iter [02000, 05004], lr: 0.100000, loss: 2.2347 +2023-09-10 16:26:21 - train: epoch 0007, iter [02100, 05004], lr: 0.100000, loss: 2.5610 +2023-09-10 16:26:58 - train: epoch 0007, iter [02200, 05004], lr: 0.100000, loss: 2.3912 +2023-09-10 16:27:34 - train: epoch 0007, iter [02300, 05004], lr: 0.100000, loss: 2.4838 +2023-09-10 16:28:11 - train: epoch 0007, iter [02400, 05004], lr: 0.100000, loss: 2.4900 +2023-09-10 16:28:49 - train: epoch 0007, iter [02500, 05004], lr: 0.100000, loss: 2.4391 +2023-09-10 16:29:25 - train: epoch 0007, iter [02600, 05004], lr: 0.100000, loss: 2.4083 +2023-09-10 16:30:00 - train: epoch 0007, iter [02700, 05004], lr: 0.100000, loss: 2.2280 +2023-09-10 16:30:39 - train: epoch 0007, iter [02800, 05004], lr: 0.100000, loss: 2.3840 +2023-09-10 16:31:14 - train: epoch 0007, iter [02900, 05004], lr: 0.100000, loss: 2.3643 +2023-09-10 16:31:49 - train: epoch 0007, iter [03000, 05004], lr: 0.100000, loss: 2.5136 +2023-09-10 16:32:28 - train: epoch 0007, iter [03100, 05004], lr: 0.100000, loss: 2.2967 +2023-09-10 16:33:03 - train: epoch 0007, iter [03200, 05004], lr: 0.100000, loss: 2.4727 +2023-09-10 16:33:39 - train: epoch 0007, iter [03300, 05004], lr: 0.100000, loss: 2.6421 +2023-09-10 16:34:17 - train: epoch 0007, iter [03400, 05004], lr: 0.100000, loss: 2.7403 +2023-09-10 16:34:51 - train: epoch 0007, iter [03500, 05004], lr: 0.100000, loss: 2.5728 +2023-09-10 16:35:27 - train: epoch 0007, iter [03600, 05004], lr: 0.100000, loss: 2.1889 +2023-09-10 16:36:05 - train: epoch 0007, iter [03700, 05004], lr: 0.100000, loss: 2.3479 +2023-09-10 16:36:40 - train: epoch 0007, iter [03800, 05004], lr: 0.100000, loss: 2.7227 +2023-09-10 16:37:17 - train: epoch 0007, iter [03900, 05004], lr: 0.100000, loss: 2.6947 +2023-09-10 16:37:53 - train: epoch 0007, iter [04000, 05004], lr: 0.100000, loss: 2.7622 +2023-09-10 16:38:29 - train: epoch 0007, iter [04100, 05004], lr: 0.100000, loss: 2.5160 +2023-09-10 16:39:07 - train: epoch 0007, iter [04200, 05004], lr: 0.100000, loss: 2.4043 +2023-09-10 16:39:42 - train: epoch 0007, iter [04300, 05004], lr: 0.100000, loss: 2.6191 +2023-09-10 16:40:18 - train: epoch 0007, iter [04400, 05004], lr: 0.100000, loss: 2.3116 +2023-09-10 16:40:56 - train: epoch 0007, iter [04500, 05004], lr: 0.100000, loss: 2.5199 +2023-09-10 16:41:32 - train: epoch 0007, iter [04600, 05004], lr: 0.100000, loss: 2.4498 +2023-09-10 16:42:10 - train: epoch 0007, iter [04700, 05004], lr: 0.100000, loss: 2.5591 +2023-09-10 16:42:46 - train: epoch 0007, iter [04800, 05004], lr: 0.100000, loss: 2.5879 +2023-09-10 16:43:21 - train: epoch 0007, iter [04900, 05004], lr: 0.100000, loss: 2.6291 +2023-09-10 16:43:55 - train: epoch 0007, iter [05000, 05004], lr: 0.100000, loss: 2.3614 +2023-09-10 16:43:57 - train: epoch 007, train_loss: 2.4811 +2023-09-10 16:45:21 - eval: epoch: 007, acc1: 46.158%, acc5: 72.284%, test_loss: 2.3772, per_image_load_time: 1.385ms, per_image_inference_time: 0.285ms +2023-09-10 16:45:21 - until epoch: 007, best_acc1: 46.524% +2023-09-10 16:45:21 - epoch 008 lr: 0.100000 +2023-09-10 16:46:06 - train: epoch 0008, iter [00100, 05004], lr: 0.100000, loss: 2.5394 +2023-09-10 16:46:42 - train: epoch 0008, iter [00200, 05004], lr: 0.100000, loss: 2.4721 +2023-09-10 16:47:18 - train: epoch 0008, iter [00300, 05004], lr: 0.100000, loss: 2.3763 +2023-09-10 16:47:57 - train: epoch 0008, iter [00400, 05004], lr: 0.100000, loss: 2.3143 +2023-09-10 16:48:33 - train: epoch 0008, iter [00500, 05004], lr: 0.100000, loss: 2.4575 +2023-09-10 16:49:08 - train: epoch 0008, iter [00600, 05004], lr: 0.100000, loss: 2.3995 +2023-09-10 16:49:48 - train: epoch 0008, iter [00700, 05004], lr: 0.100000, loss: 2.5943 +2023-09-10 16:50:24 - train: epoch 0008, iter [00800, 05004], lr: 0.100000, loss: 2.3684 +2023-09-10 16:50:59 - train: epoch 0008, iter [00900, 05004], lr: 0.100000, loss: 2.2566 +2023-09-10 16:51:40 - train: epoch 0008, iter [01000, 05004], lr: 0.100000, loss: 2.4149 +2023-09-10 16:52:15 - train: epoch 0008, iter [01100, 05004], lr: 0.100000, loss: 2.7000 +2023-09-10 16:52:51 - train: epoch 0008, iter [01200, 05004], lr: 0.100000, loss: 2.4326 +2023-09-10 16:53:29 - train: epoch 0008, iter [01300, 05004], lr: 0.100000, loss: 2.5515 +2023-09-10 16:54:07 - train: epoch 0008, iter [01400, 05004], lr: 0.100000, loss: 2.3938 +2023-09-10 16:54:44 - train: epoch 0008, iter [01500, 05004], lr: 0.100000, loss: 2.4640 +2023-09-10 16:55:22 - train: epoch 0008, iter [01600, 05004], lr: 0.100000, loss: 2.5422 +2023-09-10 16:55:59 - train: epoch 0008, iter [01700, 05004], lr: 0.100000, loss: 2.5972 +2023-09-10 16:56:33 - train: epoch 0008, iter [01800, 05004], lr: 0.100000, loss: 2.5365 +2023-09-10 16:57:11 - train: epoch 0008, iter [01900, 05004], lr: 0.100000, loss: 2.2196 +2023-09-10 16:57:48 - train: epoch 0008, iter [02000, 05004], lr: 0.100000, loss: 2.4435 +2023-09-10 16:58:25 - train: epoch 0008, iter [02100, 05004], lr: 0.100000, loss: 2.4360 +2023-09-10 16:59:00 - train: epoch 0008, iter [02200, 05004], lr: 0.100000, loss: 2.3074 +2023-09-10 16:59:38 - train: epoch 0008, iter [02300, 05004], lr: 0.100000, loss: 2.4856 +2023-09-10 17:00:14 - train: epoch 0008, iter [02400, 05004], lr: 0.100000, loss: 2.4188 +2023-09-10 17:00:51 - train: epoch 0008, iter [02500, 05004], lr: 0.100000, loss: 2.3388 +2023-09-10 17:01:28 - train: epoch 0008, iter [02600, 05004], lr: 0.100000, loss: 2.3961 +2023-09-10 17:02:04 - train: epoch 0008, iter [02700, 05004], lr: 0.100000, loss: 2.6560 +2023-09-10 17:02:39 - train: epoch 0008, iter [02800, 05004], lr: 0.100000, loss: 2.5300 +2023-09-10 17:03:18 - train: epoch 0008, iter [02900, 05004], lr: 0.100000, loss: 2.4551 +2023-09-10 17:03:54 - train: epoch 0008, iter [03000, 05004], lr: 0.100000, loss: 2.4513 +2023-09-10 17:04:30 - train: epoch 0008, iter [03100, 05004], lr: 0.100000, loss: 2.4327 +2023-09-10 17:05:07 - train: epoch 0008, iter [03200, 05004], lr: 0.100000, loss: 2.6798 +2023-09-10 17:05:43 - train: epoch 0008, iter [03300, 05004], lr: 0.100000, loss: 2.6175 +2023-09-10 17:06:21 - train: epoch 0008, iter [03400, 05004], lr: 0.100000, loss: 2.2486 +2023-09-10 17:06:57 - train: epoch 0008, iter [03500, 05004], lr: 0.100000, loss: 2.5252 +2023-09-10 17:07:33 - train: epoch 0008, iter [03600, 05004], lr: 0.100000, loss: 2.5598 +2023-09-10 17:08:11 - train: epoch 0008, iter [03700, 05004], lr: 0.100000, loss: 2.5899 +2023-09-10 17:08:46 - train: epoch 0008, iter [03800, 05004], lr: 0.100000, loss: 2.2905 +2023-09-10 17:09:23 - train: epoch 0008, iter [03900, 05004], lr: 0.100000, loss: 2.4939 +2023-09-10 17:10:00 - train: epoch 0008, iter [04000, 05004], lr: 0.100000, loss: 2.7946 +2023-09-10 17:10:37 - train: epoch 0008, iter [04100, 05004], lr: 0.100000, loss: 2.3449 +2023-09-10 17:11:13 - train: epoch 0008, iter [04200, 05004], lr: 0.100000, loss: 2.3648 +2023-09-10 17:11:47 - train: epoch 0008, iter [04300, 05004], lr: 0.100000, loss: 2.0725 +2023-09-10 17:12:25 - train: epoch 0008, iter [04400, 05004], lr: 0.100000, loss: 2.3615 +2023-09-10 17:13:01 - train: epoch 0008, iter [04500, 05004], lr: 0.100000, loss: 2.6466 +2023-09-10 17:13:35 - train: epoch 0008, iter [04600, 05004], lr: 0.100000, loss: 2.6024 +2023-09-10 17:14:13 - train: epoch 0008, iter [04700, 05004], lr: 0.100000, loss: 2.4296 +2023-09-10 17:14:48 - train: epoch 0008, iter [04800, 05004], lr: 0.100000, loss: 2.5155 +2023-09-10 17:15:25 - train: epoch 0008, iter [04900, 05004], lr: 0.100000, loss: 2.5522 +2023-09-10 17:15:59 - train: epoch 0008, iter [05000, 05004], lr: 0.100000, loss: 2.4433 +2023-09-10 17:16:00 - train: epoch 008, train_loss: 2.4505 +2023-09-10 17:17:26 - eval: epoch: 008, acc1: 49.534%, acc5: 74.920%, test_loss: 2.2168, per_image_load_time: 1.403ms, per_image_inference_time: 0.293ms +2023-09-10 17:17:26 - until epoch: 008, best_acc1: 49.534% +2023-09-10 17:17:26 - epoch 009 lr: 0.100000 +2023-09-10 17:18:10 - train: epoch 0009, iter [00100, 05004], lr: 0.100000, loss: 2.2421 +2023-09-10 17:18:47 - train: epoch 0009, iter [00200, 05004], lr: 0.100000, loss: 2.4519 +2023-09-10 17:19:24 - train: epoch 0009, iter [00300, 05004], lr: 0.100000, loss: 2.2699 +2023-09-10 17:20:03 - train: epoch 0009, iter [00400, 05004], lr: 0.100000, loss: 2.5405 +2023-09-10 17:20:39 - train: epoch 0009, iter [00500, 05004], lr: 0.100000, loss: 2.3112 +2023-09-10 17:21:17 - train: epoch 0009, iter [00600, 05004], lr: 0.100000, loss: 2.4025 +2023-09-10 17:21:54 - train: epoch 0009, iter [00700, 05004], lr: 0.100000, loss: 2.3233 +2023-09-10 17:22:31 - train: epoch 0009, iter [00800, 05004], lr: 0.100000, loss: 2.3762 +2023-09-10 17:23:06 - train: epoch 0009, iter [00900, 05004], lr: 0.100000, loss: 2.2242 +2023-09-10 17:23:44 - train: epoch 0009, iter [01000, 05004], lr: 0.100000, loss: 2.3699 +2023-09-10 17:24:20 - train: epoch 0009, iter [01100, 05004], lr: 0.100000, loss: 2.4246 +2023-09-10 17:24:55 - train: epoch 0009, iter [01200, 05004], lr: 0.100000, loss: 2.6291 +2023-09-10 17:25:34 - train: epoch 0009, iter [01300, 05004], lr: 0.100000, loss: 2.4572 +2023-09-10 17:26:10 - train: epoch 0009, iter [01400, 05004], lr: 0.100000, loss: 2.1904 +2023-09-10 17:26:45 - train: epoch 0009, iter [01500, 05004], lr: 0.100000, loss: 2.4112 +2023-09-10 17:27:23 - train: epoch 0009, iter [01600, 05004], lr: 0.100000, loss: 2.3166 +2023-09-10 17:27:58 - train: epoch 0009, iter [01700, 05004], lr: 0.100000, loss: 2.6170 +2023-09-10 17:28:33 - train: epoch 0009, iter [01800, 05004], lr: 0.100000, loss: 2.3696 +2023-09-10 17:29:12 - train: epoch 0009, iter [01900, 05004], lr: 0.100000, loss: 2.1935 +2023-09-10 17:29:46 - train: epoch 0009, iter [02000, 05004], lr: 0.100000, loss: 2.2709 +2023-09-10 17:30:21 - train: epoch 0009, iter [02100, 05004], lr: 0.100000, loss: 2.5163 +2023-09-10 17:31:00 - train: epoch 0009, iter [02200, 05004], lr: 0.100000, loss: 2.3876 +2023-09-10 17:31:35 - train: epoch 0009, iter [02300, 05004], lr: 0.100000, loss: 2.4387 +2023-09-10 17:32:10 - train: epoch 0009, iter [02400, 05004], lr: 0.100000, loss: 2.4651 +2023-09-10 17:32:48 - train: epoch 0009, iter [02500, 05004], lr: 0.100000, loss: 2.4013 +2023-09-10 17:33:23 - train: epoch 0009, iter [02600, 05004], lr: 0.100000, loss: 2.2876 +2023-09-10 17:33:59 - train: epoch 0009, iter [02700, 05004], lr: 0.100000, loss: 2.1170 +2023-09-10 17:34:37 - train: epoch 0009, iter [02800, 05004], lr: 0.100000, loss: 2.5365 +2023-09-10 17:35:12 - train: epoch 0009, iter [02900, 05004], lr: 0.100000, loss: 2.3632 +2023-09-10 17:35:47 - train: epoch 0009, iter [03000, 05004], lr: 0.100000, loss: 2.3611 +2023-09-10 17:36:25 - train: epoch 0009, iter [03100, 05004], lr: 0.100000, loss: 2.4266 +2023-09-10 17:37:00 - train: epoch 0009, iter [03200, 05004], lr: 0.100000, loss: 2.5267 +2023-09-10 17:37:36 - train: epoch 0009, iter [03300, 05004], lr: 0.100000, loss: 2.7160 +2023-09-10 17:38:14 - train: epoch 0009, iter [03400, 05004], lr: 0.100000, loss: 2.5415 +2023-09-10 17:38:49 - train: epoch 0009, iter [03500, 05004], lr: 0.100000, loss: 2.4664 +2023-09-10 17:39:24 - train: epoch 0009, iter [03600, 05004], lr: 0.100000, loss: 2.1514 +2023-09-10 17:40:02 - train: epoch 0009, iter [03700, 05004], lr: 0.100000, loss: 2.5473 +2023-09-10 17:40:37 - train: epoch 0009, iter [03800, 05004], lr: 0.100000, loss: 2.6564 +2023-09-10 17:41:12 - train: epoch 0009, iter [03900, 05004], lr: 0.100000, loss: 2.1247 +2023-09-10 17:41:51 - train: epoch 0009, iter [04000, 05004], lr: 0.100000, loss: 2.4499 +2023-09-10 17:42:25 - train: epoch 0009, iter [04100, 05004], lr: 0.100000, loss: 2.3784 +2023-09-10 17:43:01 - train: epoch 0009, iter [04200, 05004], lr: 0.100000, loss: 2.1823 +2023-09-10 17:43:39 - train: epoch 0009, iter [04300, 05004], lr: 0.100000, loss: 2.2432 +2023-09-10 17:44:14 - train: epoch 0009, iter [04400, 05004], lr: 0.100000, loss: 2.5486 +2023-09-10 17:44:50 - train: epoch 0009, iter [04500, 05004], lr: 0.100000, loss: 2.5922 +2023-09-10 17:45:30 - train: epoch 0009, iter [04600, 05004], lr: 0.100000, loss: 2.5228 +2023-09-10 17:46:04 - train: epoch 0009, iter [04700, 05004], lr: 0.100000, loss: 2.4965 +2023-09-10 17:46:41 - train: epoch 0009, iter [04800, 05004], lr: 0.100000, loss: 2.4781 +2023-09-10 17:47:17 - train: epoch 0009, iter [04900, 05004], lr: 0.100000, loss: 2.6131 +2023-09-10 17:47:51 - train: epoch 0009, iter [05000, 05004], lr: 0.100000, loss: 2.3262 +2023-09-10 17:47:52 - train: epoch 009, train_loss: 2.4299 +2023-09-10 17:49:15 - eval: epoch: 009, acc1: 48.874%, acc5: 74.490%, test_loss: 2.2495, per_image_load_time: 1.351ms, per_image_inference_time: 0.280ms +2023-09-10 17:49:15 - until epoch: 009, best_acc1: 49.534% +2023-09-10 17:49:15 - epoch 010 lr: 0.100000 +2023-09-10 17:49:59 - train: epoch 0010, iter [00100, 05004], lr: 0.100000, loss: 2.2789 +2023-09-10 17:50:35 - train: epoch 0010, iter [00200, 05004], lr: 0.100000, loss: 2.3668 +2023-09-10 17:51:09 - train: epoch 0010, iter [00300, 05004], lr: 0.100000, loss: 2.4296 +2023-09-10 17:51:45 - train: epoch 0010, iter [00400, 05004], lr: 0.100000, loss: 2.4250 +2023-09-10 17:52:22 - train: epoch 0010, iter [00500, 05004], lr: 0.100000, loss: 2.4045 +2023-09-10 17:52:58 - train: epoch 0010, iter [00600, 05004], lr: 0.100000, loss: 2.4581 +2023-09-10 17:53:34 - train: epoch 0010, iter [00700, 05004], lr: 0.100000, loss: 2.4771 +2023-09-10 17:54:12 - train: epoch 0010, iter [00800, 05004], lr: 0.100000, loss: 2.3703 +2023-09-10 17:54:45 - train: epoch 0010, iter [00900, 05004], lr: 0.100000, loss: 2.2365 +2023-09-10 17:55:21 - train: epoch 0010, iter [01000, 05004], lr: 0.100000, loss: 2.2381 +2023-09-10 17:55:58 - train: epoch 0010, iter [01100, 05004], lr: 0.100000, loss: 2.2726 +2023-09-10 17:56:35 - train: epoch 0010, iter [01200, 05004], lr: 0.100000, loss: 2.3795 +2023-09-10 17:57:10 - train: epoch 0010, iter [01300, 05004], lr: 0.100000, loss: 2.2879 +2023-09-10 17:57:46 - train: epoch 0010, iter [01400, 05004], lr: 0.100000, loss: 2.7249 +2023-09-10 17:58:24 - train: epoch 0010, iter [01500, 05004], lr: 0.100000, loss: 2.1327 +2023-09-10 17:58:58 - train: epoch 0010, iter [01600, 05004], lr: 0.100000, loss: 2.6148 +2023-09-10 17:59:33 - train: epoch 0010, iter [01700, 05004], lr: 0.100000, loss: 2.3564 +2023-09-10 18:00:10 - train: epoch 0010, iter [01800, 05004], lr: 0.100000, loss: 2.2080 +2023-09-10 18:00:44 - train: epoch 0010, iter [01900, 05004], lr: 0.100000, loss: 2.5299 +2023-09-10 18:01:20 - train: epoch 0010, iter [02000, 05004], lr: 0.100000, loss: 2.3533 +2023-09-10 18:01:56 - train: epoch 0010, iter [02100, 05004], lr: 0.100000, loss: 2.1985 +2023-09-10 18:02:32 - train: epoch 0010, iter [02200, 05004], lr: 0.100000, loss: 2.5135 +2023-09-10 18:03:07 - train: epoch 0010, iter [02300, 05004], lr: 0.100000, loss: 2.4991 +2023-09-10 18:03:44 - train: epoch 0010, iter [02400, 05004], lr: 0.100000, loss: 2.5115 +2023-09-10 18:04:20 - train: epoch 0010, iter [02500, 05004], lr: 0.100000, loss: 2.4876 +2023-09-10 18:04:55 - train: epoch 0010, iter [02600, 05004], lr: 0.100000, loss: 2.5075 +2023-09-10 18:05:34 - train: epoch 0010, iter [02700, 05004], lr: 0.100000, loss: 2.2401 +2023-09-10 18:06:08 - train: epoch 0010, iter [02800, 05004], lr: 0.100000, loss: 2.5634 +2023-09-10 18:06:43 - train: epoch 0010, iter [02900, 05004], lr: 0.100000, loss: 2.5540 +2023-09-10 18:07:20 - train: epoch 0010, iter [03000, 05004], lr: 0.100000, loss: 2.2674 +2023-09-10 18:07:56 - train: epoch 0010, iter [03100, 05004], lr: 0.100000, loss: 2.6748 +2023-09-10 18:08:29 - train: epoch 0010, iter [03200, 05004], lr: 0.100000, loss: 2.4637 +2023-09-10 18:09:06 - train: epoch 0010, iter [03300, 05004], lr: 0.100000, loss: 2.3256 +2023-09-10 18:09:41 - train: epoch 0010, iter [03400, 05004], lr: 0.100000, loss: 2.5667 +2023-09-10 18:10:16 - train: epoch 0010, iter [03500, 05004], lr: 0.100000, loss: 2.6108 +2023-09-10 18:10:54 - train: epoch 0010, iter [03600, 05004], lr: 0.100000, loss: 2.6182 +2023-09-10 18:11:29 - train: epoch 0010, iter [03700, 05004], lr: 0.100000, loss: 2.5474 +2023-09-10 18:12:04 - train: epoch 0010, iter [03800, 05004], lr: 0.100000, loss: 2.5568 +2023-09-10 18:12:40 - train: epoch 0010, iter [03900, 05004], lr: 0.100000, loss: 2.1192 +2023-09-10 18:13:14 - train: epoch 0010, iter [04000, 05004], lr: 0.100000, loss: 2.4459 +2023-09-10 18:13:51 - train: epoch 0010, iter [04100, 05004], lr: 0.100000, loss: 2.3886 +2023-09-10 18:14:29 - train: epoch 0010, iter [04200, 05004], lr: 0.100000, loss: 2.3595 +2023-09-10 18:15:02 - train: epoch 0010, iter [04300, 05004], lr: 0.100000, loss: 2.3752 +2023-09-10 18:15:37 - train: epoch 0010, iter [04400, 05004], lr: 0.100000, loss: 2.1777 +2023-09-10 18:16:14 - train: epoch 0010, iter [04500, 05004], lr: 0.100000, loss: 2.3339 +2023-09-10 18:16:48 - train: epoch 0010, iter [04600, 05004], lr: 0.100000, loss: 2.3331 +2023-09-10 18:17:23 - train: epoch 0010, iter [04700, 05004], lr: 0.100000, loss: 2.6168 +2023-09-10 18:18:01 - train: epoch 0010, iter [04800, 05004], lr: 0.100000, loss: 2.2937 +2023-09-10 18:18:34 - train: epoch 0010, iter [04900, 05004], lr: 0.100000, loss: 2.4744 +2023-09-10 18:19:07 - train: epoch 0010, iter [05000, 05004], lr: 0.100000, loss: 2.3143 +2023-09-10 18:19:09 - train: epoch 010, train_loss: 2.4102 +2023-09-10 18:20:32 - eval: epoch: 010, acc1: 48.270%, acc5: 74.250%, test_loss: 2.2618, per_image_load_time: 1.358ms, per_image_inference_time: 0.298ms +2023-09-10 18:20:32 - until epoch: 010, best_acc1: 49.534% +2023-09-10 18:20:32 - epoch 011 lr: 0.100000 +2023-09-10 18:21:17 - train: epoch 0011, iter [00100, 05004], lr: 0.100000, loss: 2.1019 +2023-09-10 18:21:54 - train: epoch 0011, iter [00200, 05004], lr: 0.100000, loss: 2.5353 +2023-09-10 18:22:28 - train: epoch 0011, iter [00300, 05004], lr: 0.100000, loss: 2.4735 +2023-09-10 18:23:07 - train: epoch 0011, iter [00400, 05004], lr: 0.100000, loss: 2.4961 +2023-09-10 18:23:41 - train: epoch 0011, iter [00500, 05004], lr: 0.100000, loss: 2.2762 +2023-09-10 18:24:17 - train: epoch 0011, iter [00600, 05004], lr: 0.100000, loss: 2.5372 +2023-09-10 18:24:51 - train: epoch 0011, iter [00700, 05004], lr: 0.100000, loss: 2.3782 +2023-09-10 18:25:30 - train: epoch 0011, iter [00800, 05004], lr: 0.100000, loss: 2.4909 +2023-09-10 18:26:05 - train: epoch 0011, iter [00900, 05004], lr: 0.100000, loss: 2.4335 +2023-09-10 18:26:40 - train: epoch 0011, iter [01000, 05004], lr: 0.100000, loss: 2.1489 +2023-09-10 18:27:17 - train: epoch 0011, iter [01100, 05004], lr: 0.100000, loss: 2.5788 +2023-09-10 18:27:52 - train: epoch 0011, iter [01200, 05004], lr: 0.100000, loss: 2.5894 +2023-09-10 18:28:28 - train: epoch 0011, iter [01300, 05004], lr: 0.100000, loss: 2.5196 +2023-09-10 18:29:05 - train: epoch 0011, iter [01400, 05004], lr: 0.100000, loss: 2.2948 +2023-09-10 18:29:39 - train: epoch 0011, iter [01500, 05004], lr: 0.100000, loss: 2.1371 +2023-09-10 18:30:15 - train: epoch 0011, iter [01600, 05004], lr: 0.100000, loss: 2.4846 +2023-09-10 18:30:52 - train: epoch 0011, iter [01700, 05004], lr: 0.100000, loss: 2.3683 +2023-09-10 18:31:27 - train: epoch 0011, iter [01800, 05004], lr: 0.100000, loss: 2.1544 +2023-09-10 18:32:01 - train: epoch 0011, iter [01900, 05004], lr: 0.100000, loss: 2.3110 +2023-09-10 18:32:38 - train: epoch 0011, iter [02000, 05004], lr: 0.100000, loss: 2.3617 +2023-09-10 18:33:15 - train: epoch 0011, iter [02100, 05004], lr: 0.100000, loss: 2.0973 +2023-09-10 18:33:49 - train: epoch 0011, iter [02200, 05004], lr: 0.100000, loss: 2.2229 +2023-09-10 18:34:25 - train: epoch 0011, iter [02300, 05004], lr: 0.100000, loss: 2.5280 +2023-09-10 18:35:00 - train: epoch 0011, iter [02400, 05004], lr: 0.100000, loss: 2.3321 +2023-09-10 18:35:35 - train: epoch 0011, iter [02500, 05004], lr: 0.100000, loss: 2.3634 +2023-09-10 18:36:13 - train: epoch 0011, iter [02600, 05004], lr: 0.100000, loss: 2.3993 +2023-09-10 18:36:48 - train: epoch 0011, iter [02700, 05004], lr: 0.100000, loss: 2.3614 +2023-09-10 18:37:22 - train: epoch 0011, iter [02800, 05004], lr: 0.100000, loss: 2.2592 +2023-09-10 18:38:01 - train: epoch 0011, iter [02900, 05004], lr: 0.100000, loss: 2.4171 +2023-09-10 18:38:35 - train: epoch 0011, iter [03000, 05004], lr: 0.100000, loss: 2.7006 +2023-09-10 18:39:10 - train: epoch 0011, iter [03100, 05004], lr: 0.100000, loss: 2.3022 +2023-09-10 18:39:48 - train: epoch 0011, iter [03200, 05004], lr: 0.100000, loss: 2.3709 +2023-09-10 18:40:22 - train: epoch 0011, iter [03300, 05004], lr: 0.100000, loss: 2.5283 +2023-09-10 18:40:57 - train: epoch 0011, iter [03400, 05004], lr: 0.100000, loss: 2.4126 +2023-09-10 18:41:34 - train: epoch 0011, iter [03500, 05004], lr: 0.100000, loss: 2.3796 +2023-09-10 18:42:09 - train: epoch 0011, iter [03600, 05004], lr: 0.100000, loss: 2.3717 +2023-09-10 18:42:44 - train: epoch 0011, iter [03700, 05004], lr: 0.100000, loss: 2.3952 +2023-09-10 18:43:20 - train: epoch 0011, iter [03800, 05004], lr: 0.100000, loss: 2.3299 +2023-09-10 18:43:56 - train: epoch 0011, iter [03900, 05004], lr: 0.100000, loss: 2.2446 +2023-09-10 18:44:30 - train: epoch 0011, iter [04000, 05004], lr: 0.100000, loss: 2.1411 +2023-09-10 18:45:08 - train: epoch 0011, iter [04100, 05004], lr: 0.100000, loss: 2.2447 +2023-09-10 18:45:43 - train: epoch 0011, iter [04200, 05004], lr: 0.100000, loss: 2.3217 +2023-09-10 18:46:19 - train: epoch 0011, iter [04300, 05004], lr: 0.100000, loss: 2.4053 +2023-09-10 18:46:55 - train: epoch 0011, iter [04400, 05004], lr: 0.100000, loss: 2.2635 +2023-09-10 18:47:29 - train: epoch 0011, iter [04500, 05004], lr: 0.100000, loss: 2.3336 +2023-09-10 18:48:06 - train: epoch 0011, iter [04600, 05004], lr: 0.100000, loss: 2.4362 +2023-09-10 18:48:41 - train: epoch 0011, iter [04700, 05004], lr: 0.100000, loss: 2.1303 +2023-09-10 18:49:16 - train: epoch 0011, iter [04800, 05004], lr: 0.100000, loss: 2.3106 +2023-09-10 18:49:53 - train: epoch 0011, iter [04900, 05004], lr: 0.100000, loss: 2.5434 +2023-09-10 18:50:26 - train: epoch 0011, iter [05000, 05004], lr: 0.100000, loss: 2.3127 +2023-09-10 18:50:26 - train: epoch 011, train_loss: 2.3959 +2023-09-10 18:51:47 - eval: epoch: 011, acc1: 50.810%, acc5: 76.324%, test_loss: 2.1309, per_image_load_time: 1.301ms, per_image_inference_time: 0.303ms +2023-09-10 18:51:47 - until epoch: 011, best_acc1: 50.810% +2023-09-10 18:51:47 - epoch 012 lr: 0.100000 +2023-09-10 18:52:31 - train: epoch 0012, iter [00100, 05004], lr: 0.100000, loss: 2.2167 +2023-09-10 18:53:08 - train: epoch 0012, iter [00200, 05004], lr: 0.100000, loss: 2.2024 +2023-09-10 18:53:43 - train: epoch 0012, iter [00300, 05004], lr: 0.100000, loss: 2.2648 +2023-09-10 18:54:17 - train: epoch 0012, iter [00400, 05004], lr: 0.100000, loss: 2.2923 +2023-09-10 18:54:52 - train: epoch 0012, iter [00500, 05004], lr: 0.100000, loss: 2.5873 +2023-09-10 18:55:27 - train: epoch 0012, iter [00600, 05004], lr: 0.100000, loss: 2.1132 +2023-09-10 18:56:02 - train: epoch 0012, iter [00700, 05004], lr: 0.100000, loss: 2.1111 +2023-09-10 18:56:39 - train: epoch 0012, iter [00800, 05004], lr: 0.100000, loss: 2.6424 +2023-09-10 18:57:14 - train: epoch 0012, iter [00900, 05004], lr: 0.100000, loss: 2.4781 +2023-09-10 18:57:49 - train: epoch 0012, iter [01000, 05004], lr: 0.100000, loss: 2.1310 +2023-09-10 18:58:26 - train: epoch 0012, iter [01100, 05004], lr: 0.100000, loss: 2.6247 +2023-09-10 18:59:01 - train: epoch 0012, iter [01200, 05004], lr: 0.100000, loss: 2.1253 +2023-09-10 18:59:35 - train: epoch 0012, iter [01300, 05004], lr: 0.100000, loss: 2.1351 +2023-09-10 19:00:12 - train: epoch 0012, iter [01400, 05004], lr: 0.100000, loss: 2.4712 +2023-09-10 19:00:47 - train: epoch 0012, iter [01500, 05004], lr: 0.100000, loss: 2.2789 +2023-09-10 19:01:22 - train: epoch 0012, iter [01600, 05004], lr: 0.100000, loss: 2.4200 +2023-09-10 19:01:59 - train: epoch 0012, iter [01700, 05004], lr: 0.100000, loss: 2.0285 +2023-09-10 19:02:34 - train: epoch 0012, iter [01800, 05004], lr: 0.100000, loss: 2.3839 +2023-09-10 19:03:09 - train: epoch 0012, iter [01900, 05004], lr: 0.100000, loss: 2.5648 +2023-09-10 19:03:48 - train: epoch 0012, iter [02000, 05004], lr: 0.100000, loss: 2.4942 +2023-09-10 19:04:22 - train: epoch 0012, iter [02100, 05004], lr: 0.100000, loss: 2.5384 +2023-09-10 19:04:56 - train: epoch 0012, iter [02200, 05004], lr: 0.100000, loss: 2.3321 +2023-09-10 19:05:33 - train: epoch 0012, iter [02300, 05004], lr: 0.100000, loss: 2.3840 +2023-09-10 19:06:09 - train: epoch 0012, iter [02400, 05004], lr: 0.100000, loss: 2.4951 +2023-09-10 19:06:44 - train: epoch 0012, iter [02500, 05004], lr: 0.100000, loss: 2.1479 +2023-09-10 19:07:21 - train: epoch 0012, iter [02600, 05004], lr: 0.100000, loss: 2.2885 +2023-09-10 19:07:56 - train: epoch 0012, iter [02700, 05004], lr: 0.100000, loss: 2.4815 +2023-09-10 19:08:32 - train: epoch 0012, iter [02800, 05004], lr: 0.100000, loss: 2.2093 +2023-09-10 19:09:09 - train: epoch 0012, iter [02900, 05004], lr: 0.100000, loss: 2.2680 +2023-09-10 19:09:45 - train: epoch 0012, iter [03000, 05004], lr: 0.100000, loss: 2.1669 +2023-09-10 19:10:20 - train: epoch 0012, iter [03100, 05004], lr: 0.100000, loss: 2.3648 +2023-09-10 19:10:57 - train: epoch 0012, iter [03200, 05004], lr: 0.100000, loss: 2.2253 +2023-09-10 19:11:33 - train: epoch 0012, iter [03300, 05004], lr: 0.100000, loss: 2.5144 +2023-09-10 19:12:07 - train: epoch 0012, iter [03400, 05004], lr: 0.100000, loss: 2.3402 +2023-09-10 19:12:45 - train: epoch 0012, iter [03500, 05004], lr: 0.100000, loss: 2.3492 +2023-09-10 19:13:20 - train: epoch 0012, iter [03600, 05004], lr: 0.100000, loss: 2.1890 +2023-09-10 19:13:55 - train: epoch 0012, iter [03700, 05004], lr: 0.100000, loss: 2.4902 +2023-09-10 19:14:32 - train: epoch 0012, iter [03800, 05004], lr: 0.100000, loss: 2.3467 +2023-09-10 19:15:07 - train: epoch 0012, iter [03900, 05004], lr: 0.100000, loss: 2.3338 +2023-09-10 19:15:42 - train: epoch 0012, iter [04000, 05004], lr: 0.100000, loss: 2.5722 +2023-09-10 19:16:20 - train: epoch 0012, iter [04100, 05004], lr: 0.100000, loss: 2.2670 +2023-09-10 19:16:56 - train: epoch 0012, iter [04200, 05004], lr: 0.100000, loss: 2.2781 +2023-09-10 19:17:30 - train: epoch 0012, iter [04300, 05004], lr: 0.100000, loss: 2.5880 +2023-09-10 19:18:08 - train: epoch 0012, iter [04400, 05004], lr: 0.100000, loss: 2.2381 +2023-09-10 19:18:43 - train: epoch 0012, iter [04500, 05004], lr: 0.100000, loss: 2.3000 +2023-09-10 19:19:18 - train: epoch 0012, iter [04600, 05004], lr: 0.100000, loss: 2.4235 +2023-09-10 19:19:55 - train: epoch 0012, iter [04700, 05004], lr: 0.100000, loss: 2.2386 +2023-09-10 19:20:32 - train: epoch 0012, iter [04800, 05004], lr: 0.100000, loss: 2.3841 +2023-09-10 19:21:07 - train: epoch 0012, iter [04900, 05004], lr: 0.100000, loss: 2.3628 +2023-09-10 19:21:40 - train: epoch 0012, iter [05000, 05004], lr: 0.100000, loss: 2.1219 +2023-09-10 19:21:41 - train: epoch 012, train_loss: 2.3780 +2023-09-10 19:23:03 - eval: epoch: 012, acc1: 45.352%, acc5: 71.356%, test_loss: 2.4320, per_image_load_time: 1.326ms, per_image_inference_time: 0.291ms +2023-09-10 19:23:03 - until epoch: 012, best_acc1: 50.810% +2023-09-10 19:23:03 - epoch 013 lr: 0.100000 +2023-09-10 19:23:47 - train: epoch 0013, iter [00100, 05004], lr: 0.100000, loss: 2.2979 +2023-09-10 19:24:22 - train: epoch 0013, iter [00200, 05004], lr: 0.100000, loss: 2.1331 +2023-09-10 19:24:58 - train: epoch 0013, iter [00300, 05004], lr: 0.100000, loss: 2.2338 +2023-09-10 19:25:36 - train: epoch 0013, iter [00400, 05004], lr: 0.100000, loss: 2.3460 +2023-09-10 19:26:11 - train: epoch 0013, iter [00500, 05004], lr: 0.100000, loss: 2.4402 +2023-09-10 19:26:45 - train: epoch 0013, iter [00600, 05004], lr: 0.100000, loss: 2.2975 +2023-09-10 19:27:22 - train: epoch 0013, iter [00700, 05004], lr: 0.100000, loss: 2.3659 +2023-09-10 19:27:58 - train: epoch 0013, iter [00800, 05004], lr: 0.100000, loss: 2.4672 +2023-09-10 19:28:34 - train: epoch 0013, iter [00900, 05004], lr: 0.100000, loss: 2.2110 +2023-09-10 19:29:12 - train: epoch 0013, iter [01000, 05004], lr: 0.100000, loss: 2.2845 +2023-09-10 19:29:47 - train: epoch 0013, iter [01100, 05004], lr: 0.100000, loss: 2.3880 +2023-09-10 19:30:25 - train: epoch 0013, iter [01200, 05004], lr: 0.100000, loss: 2.3596 +2023-09-10 19:31:00 - train: epoch 0013, iter [01300, 05004], lr: 0.100000, loss: 2.0714 +2023-09-10 19:31:36 - train: epoch 0013, iter [01400, 05004], lr: 0.100000, loss: 2.4489 +2023-09-10 19:32:13 - train: epoch 0013, iter [01500, 05004], lr: 0.100000, loss: 2.6902 +2023-09-10 19:32:48 - train: epoch 0013, iter [01600, 05004], lr: 0.100000, loss: 2.2073 +2023-09-10 19:33:24 - train: epoch 0013, iter [01700, 05004], lr: 0.100000, loss: 2.3738 +2023-09-10 19:34:01 - train: epoch 0013, iter [01800, 05004], lr: 0.100000, loss: 2.3075 +2023-09-10 19:34:37 - train: epoch 0013, iter [01900, 05004], lr: 0.100000, loss: 2.4912 +2023-09-10 19:35:13 - train: epoch 0013, iter [02000, 05004], lr: 0.100000, loss: 2.5422 +2023-09-10 19:35:49 - train: epoch 0013, iter [02100, 05004], lr: 0.100000, loss: 2.6203 +2023-09-10 19:36:24 - train: epoch 0013, iter [02200, 05004], lr: 0.100000, loss: 2.2134 +2023-09-10 19:37:01 - train: epoch 0013, iter [02300, 05004], lr: 0.100000, loss: 2.1805 +2023-09-10 19:37:38 - train: epoch 0013, iter [02400, 05004], lr: 0.100000, loss: 2.4434 +2023-09-10 19:38:13 - train: epoch 0013, iter [02500, 05004], lr: 0.100000, loss: 2.2689 +2023-09-10 19:38:48 - train: epoch 0013, iter [02600, 05004], lr: 0.100000, loss: 2.2796 +2023-09-10 19:39:25 - train: epoch 0013, iter [02700, 05004], lr: 0.100000, loss: 2.2664 +2023-09-10 19:40:00 - train: epoch 0013, iter [02800, 05004], lr: 0.100000, loss: 2.2694 +2023-09-10 19:40:37 - train: epoch 0013, iter [02900, 05004], lr: 0.100000, loss: 2.4436 +2023-09-10 19:41:12 - train: epoch 0013, iter [03000, 05004], lr: 0.100000, loss: 2.2164 +2023-09-10 19:41:49 - train: epoch 0013, iter [03100, 05004], lr: 0.100000, loss: 2.3237 +2023-09-10 19:42:24 - train: epoch 0013, iter [03200, 05004], lr: 0.100000, loss: 2.2606 +2023-09-10 19:42:58 - train: epoch 0013, iter [03300, 05004], lr: 0.100000, loss: 2.2052 +2023-09-10 19:43:35 - train: epoch 0013, iter [03400, 05004], lr: 0.100000, loss: 2.4317 +2023-09-10 19:44:10 - train: epoch 0013, iter [03500, 05004], lr: 0.100000, loss: 2.2663 +2023-09-10 19:44:45 - train: epoch 0013, iter [03600, 05004], lr: 0.100000, loss: 2.4328 +2023-09-10 19:45:22 - train: epoch 0013, iter [03700, 05004], lr: 0.100000, loss: 2.1395 +2023-09-10 19:45:57 - train: epoch 0013, iter [03800, 05004], lr: 0.100000, loss: 2.5197 +2023-09-10 19:46:32 - train: epoch 0013, iter [03900, 05004], lr: 0.100000, loss: 2.3292 +2023-09-10 19:47:09 - train: epoch 0013, iter [04000, 05004], lr: 0.100000, loss: 2.3927 +2023-09-10 19:47:44 - train: epoch 0013, iter [04100, 05004], lr: 0.100000, loss: 2.2807 +2023-09-10 19:48:20 - train: epoch 0013, iter [04200, 05004], lr: 0.100000, loss: 2.3903 +2023-09-10 19:48:57 - train: epoch 0013, iter [04300, 05004], lr: 0.100000, loss: 2.2752 +2023-09-10 19:49:32 - train: epoch 0013, iter [04400, 05004], lr: 0.100000, loss: 2.4112 +2023-09-10 19:50:06 - train: epoch 0013, iter [04500, 05004], lr: 0.100000, loss: 2.5125 +2023-09-10 19:50:44 - train: epoch 0013, iter [04600, 05004], lr: 0.100000, loss: 2.4112 +2023-09-10 19:51:20 - train: epoch 0013, iter [04700, 05004], lr: 0.100000, loss: 2.5010 +2023-09-10 19:51:54 - train: epoch 0013, iter [04800, 05004], lr: 0.100000, loss: 2.4691 +2023-09-10 19:52:31 - train: epoch 0013, iter [04900, 05004], lr: 0.100000, loss: 2.5146 +2023-09-10 19:53:04 - train: epoch 0013, iter [05000, 05004], lr: 0.100000, loss: 2.5084 +2023-09-10 19:53:05 - train: epoch 013, train_loss: 2.3712 +2023-09-10 19:54:27 - eval: epoch: 013, acc1: 50.454%, acc5: 75.880%, test_loss: 2.1607, per_image_load_time: 1.332ms, per_image_inference_time: 0.297ms +2023-09-10 19:54:27 - until epoch: 013, best_acc1: 50.810% +2023-09-10 19:54:27 - epoch 014 lr: 0.100000 +2023-09-10 19:55:11 - train: epoch 0014, iter [00100, 05004], lr: 0.100000, loss: 2.6176 +2023-09-10 19:55:47 - train: epoch 0014, iter [00200, 05004], lr: 0.100000, loss: 2.5504 +2023-09-10 19:56:23 - train: epoch 0014, iter [00300, 05004], lr: 0.100000, loss: 2.1913 +2023-09-10 19:57:01 - train: epoch 0014, iter [00400, 05004], lr: 0.100000, loss: 2.3730 +2023-09-10 19:57:36 - train: epoch 0014, iter [00500, 05004], lr: 0.100000, loss: 2.1313 +2023-09-10 19:58:11 - train: epoch 0014, iter [00600, 05004], lr: 0.100000, loss: 2.4702 +2023-09-10 19:58:49 - train: epoch 0014, iter [00700, 05004], lr: 0.100000, loss: 2.2134 +2023-09-10 19:59:23 - train: epoch 0014, iter [00800, 05004], lr: 0.100000, loss: 2.3063 +2023-09-10 19:59:58 - train: epoch 0014, iter [00900, 05004], lr: 0.100000, loss: 2.2711 +2023-09-10 20:00:36 - train: epoch 0014, iter [01000, 05004], lr: 0.100000, loss: 2.4974 +2023-09-10 20:01:11 - train: epoch 0014, iter [01100, 05004], lr: 0.100000, loss: 2.3557 +2023-09-10 20:01:46 - train: epoch 0014, iter [01200, 05004], lr: 0.100000, loss: 2.4071 +2023-09-10 20:02:24 - train: epoch 0014, iter [01300, 05004], lr: 0.100000, loss: 2.5432 +2023-09-10 20:02:59 - train: epoch 0014, iter [01400, 05004], lr: 0.100000, loss: 2.5125 +2023-09-10 20:03:35 - train: epoch 0014, iter [01500, 05004], lr: 0.100000, loss: 2.4316 +2023-09-10 20:04:12 - train: epoch 0014, iter [01600, 05004], lr: 0.100000, loss: 2.3458 +2023-09-10 20:04:48 - train: epoch 0014, iter [01700, 05004], lr: 0.100000, loss: 2.5084 +2023-09-10 20:05:23 - train: epoch 0014, iter [01800, 05004], lr: 0.100000, loss: 2.6464 +2023-09-10 20:06:00 - train: epoch 0014, iter [01900, 05004], lr: 0.100000, loss: 2.3528 +2023-09-10 20:06:36 - train: epoch 0014, iter [02000, 05004], lr: 0.100000, loss: 2.3792 +2023-09-10 20:07:11 - train: epoch 0014, iter [02100, 05004], lr: 0.100000, loss: 2.3579 +2023-09-10 20:07:50 - train: epoch 0014, iter [02200, 05004], lr: 0.100000, loss: 2.1867 +2023-09-10 20:08:25 - train: epoch 0014, iter [02300, 05004], lr: 0.100000, loss: 2.5167 +2023-09-10 20:08:59 - train: epoch 0014, iter [02400, 05004], lr: 0.100000, loss: 2.5132 +2023-09-10 20:09:38 - train: epoch 0014, iter [02500, 05004], lr: 0.100000, loss: 2.3305 +2023-09-10 20:10:12 - train: epoch 0014, iter [02600, 05004], lr: 0.100000, loss: 2.2530 +2023-09-10 20:10:47 - train: epoch 0014, iter [02700, 05004], lr: 0.100000, loss: 2.2081 +2023-09-10 20:11:25 - train: epoch 0014, iter [02800, 05004], lr: 0.100000, loss: 2.4335 +2023-09-10 20:11:59 - train: epoch 0014, iter [02900, 05004], lr: 0.100000, loss: 2.4234 +2023-09-10 20:12:35 - train: epoch 0014, iter [03000, 05004], lr: 0.100000, loss: 2.4301 +2023-09-10 20:13:12 - train: epoch 0014, iter [03100, 05004], lr: 0.100000, loss: 2.3617 +2023-09-10 20:13:47 - train: epoch 0014, iter [03200, 05004], lr: 0.100000, loss: 2.7128 +2023-09-10 20:14:23 - train: epoch 0014, iter [03300, 05004], lr: 0.100000, loss: 2.3395 +2023-09-10 20:15:00 - train: epoch 0014, iter [03400, 05004], lr: 0.100000, loss: 2.2933 +2023-09-10 20:15:35 - train: epoch 0014, iter [03500, 05004], lr: 0.100000, loss: 2.2329 +2023-09-10 20:16:10 - train: epoch 0014, iter [03600, 05004], lr: 0.100000, loss: 2.1832 +2023-09-10 20:16:48 - train: epoch 0014, iter [03700, 05004], lr: 0.100000, loss: 2.3192 +2023-09-10 20:17:23 - train: epoch 0014, iter [03800, 05004], lr: 0.100000, loss: 2.5018 +2023-09-10 20:17:57 - train: epoch 0014, iter [03900, 05004], lr: 0.100000, loss: 2.3440 +2023-09-10 20:18:34 - train: epoch 0014, iter [04000, 05004], lr: 0.100000, loss: 2.3507 +2023-09-10 20:19:09 - train: epoch 0014, iter [04100, 05004], lr: 0.100000, loss: 2.3871 +2023-09-10 20:19:48 - train: epoch 0014, iter [04200, 05004], lr: 0.100000, loss: 2.1614 +2023-09-10 20:20:22 - train: epoch 0014, iter [04300, 05004], lr: 0.100000, loss: 2.4293 +2023-09-10 20:20:58 - train: epoch 0014, iter [04400, 05004], lr: 0.100000, loss: 2.3092 +2023-09-10 20:21:36 - train: epoch 0014, iter [04500, 05004], lr: 0.100000, loss: 2.6503 +2023-09-10 20:22:10 - train: epoch 0014, iter [04600, 05004], lr: 0.100000, loss: 2.3265 +2023-09-10 20:22:45 - train: epoch 0014, iter [04700, 05004], lr: 0.100000, loss: 2.4038 +2023-09-10 20:23:22 - train: epoch 0014, iter [04800, 05004], lr: 0.100000, loss: 2.3584 +2023-09-10 20:24:00 - train: epoch 0014, iter [04900, 05004], lr: 0.100000, loss: 2.3091 +2023-09-10 20:24:31 - train: epoch 0014, iter [05000, 05004], lr: 0.100000, loss: 2.4036 +2023-09-10 20:24:32 - train: epoch 014, train_loss: 2.3604 +2023-09-10 20:25:56 - eval: epoch: 014, acc1: 50.362%, acc5: 76.154%, test_loss: 2.1528, per_image_load_time: 1.348ms, per_image_inference_time: 0.299ms +2023-09-10 20:25:56 - until epoch: 014, best_acc1: 50.810% +2023-09-10 20:25:56 - epoch 015 lr: 0.100000 +2023-09-10 20:26:41 - train: epoch 0015, iter [00100, 05004], lr: 0.100000, loss: 2.1020 +2023-09-10 20:27:16 - train: epoch 0015, iter [00200, 05004], lr: 0.100000, loss: 2.4344 +2023-09-10 20:27:51 - train: epoch 0015, iter [00300, 05004], lr: 0.100000, loss: 2.6288 +2023-09-10 20:28:30 - train: epoch 0015, iter [00400, 05004], lr: 0.100000, loss: 2.3703 +2023-09-10 20:29:04 - train: epoch 0015, iter [00500, 05004], lr: 0.100000, loss: 2.2595 +2023-09-10 20:29:39 - train: epoch 0015, iter [00600, 05004], lr: 0.100000, loss: 2.2815 +2023-09-10 20:30:17 - train: epoch 0015, iter [00700, 05004], lr: 0.100000, loss: 2.3085 +2023-09-10 20:30:52 - train: epoch 0015, iter [00800, 05004], lr: 0.100000, loss: 2.2177 +2023-09-10 20:31:28 - train: epoch 0015, iter [00900, 05004], lr: 0.100000, loss: 2.3545 +2023-09-10 20:32:03 - train: epoch 0015, iter [01000, 05004], lr: 0.100000, loss: 2.1582 +2023-09-10 20:32:40 - train: epoch 0015, iter [01100, 05004], lr: 0.100000, loss: 2.2849 +2023-09-10 20:33:15 - train: epoch 0015, iter [01200, 05004], lr: 0.100000, loss: 2.3838 +2023-09-10 20:33:52 - train: epoch 0015, iter [01300, 05004], lr: 0.100000, loss: 2.5340 +2023-09-10 20:34:29 - train: epoch 0015, iter [01400, 05004], lr: 0.100000, loss: 2.0982 +2023-09-10 20:35:04 - train: epoch 0015, iter [01500, 05004], lr: 0.100000, loss: 2.3201 +2023-09-10 20:35:40 - train: epoch 0015, iter [01600, 05004], lr: 0.100000, loss: 2.3091 +2023-09-10 20:36:18 - train: epoch 0015, iter [01700, 05004], lr: 0.100000, loss: 2.3455 +2023-09-10 20:36:53 - train: epoch 0015, iter [01800, 05004], lr: 0.100000, loss: 2.1607 +2023-09-10 20:37:28 - train: epoch 0015, iter [01900, 05004], lr: 0.100000, loss: 2.2379 +2023-09-10 20:38:06 - train: epoch 0015, iter [02000, 05004], lr: 0.100000, loss: 2.1915 +2023-09-10 20:38:42 - train: epoch 0015, iter [02100, 05004], lr: 0.100000, loss: 2.1281 +2023-09-10 20:39:16 - train: epoch 0015, iter [02200, 05004], lr: 0.100000, loss: 2.4784 +2023-09-10 20:39:54 - train: epoch 0015, iter [02300, 05004], lr: 0.100000, loss: 2.3986 +2023-09-10 20:40:30 - train: epoch 0015, iter [02400, 05004], lr: 0.100000, loss: 2.3843 +2023-09-10 20:41:05 - train: epoch 0015, iter [02500, 05004], lr: 0.100000, loss: 2.4848 +2023-09-10 20:41:42 - train: epoch 0015, iter [02600, 05004], lr: 0.100000, loss: 2.3486 +2023-09-10 20:42:19 - train: epoch 0015, iter [02700, 05004], lr: 0.100000, loss: 2.5347 +2023-09-10 20:42:55 - train: epoch 0015, iter [02800, 05004], lr: 0.100000, loss: 2.2995 +2023-09-10 20:43:32 - train: epoch 0015, iter [02900, 05004], lr: 0.100000, loss: 2.4539 +2023-09-10 20:44:08 - train: epoch 0015, iter [03000, 05004], lr: 0.100000, loss: 2.2429 +2023-09-10 20:44:42 - train: epoch 0015, iter [03100, 05004], lr: 0.100000, loss: 2.3168 +2023-09-10 20:45:20 - train: epoch 0015, iter [03200, 05004], lr: 0.100000, loss: 2.4231 +2023-09-10 20:45:55 - train: epoch 0015, iter [03300, 05004], lr: 0.100000, loss: 2.1176 +2023-09-10 20:46:30 - train: epoch 0015, iter [03400, 05004], lr: 0.100000, loss: 2.2774 +2023-09-10 20:47:06 - train: epoch 0015, iter [03500, 05004], lr: 0.100000, loss: 2.5932 +2023-09-10 20:47:43 - train: epoch 0015, iter [03600, 05004], lr: 0.100000, loss: 2.3913 +2023-09-10 20:48:18 - train: epoch 0015, iter [03700, 05004], lr: 0.100000, loss: 2.1771 +2023-09-10 20:48:56 - train: epoch 0015, iter [03800, 05004], lr: 0.100000, loss: 2.4787 +2023-09-10 20:49:32 - train: epoch 0015, iter [03900, 05004], lr: 0.100000, loss: 2.5606 +2023-09-10 20:50:08 - train: epoch 0015, iter [04000, 05004], lr: 0.100000, loss: 2.2709 +2023-09-10 20:50:46 - train: epoch 0015, iter [04100, 05004], lr: 0.100000, loss: 2.1555 +2023-09-10 20:51:20 - train: epoch 0015, iter [04200, 05004], lr: 0.100000, loss: 2.1860 +2023-09-10 20:51:56 - train: epoch 0015, iter [04300, 05004], lr: 0.100000, loss: 2.1761 +2023-09-10 20:52:33 - train: epoch 0015, iter [04400, 05004], lr: 0.100000, loss: 2.2928 +2023-09-10 20:53:09 - train: epoch 0015, iter [04500, 05004], lr: 0.100000, loss: 2.2134 +2023-09-10 20:53:44 - train: epoch 0015, iter [04600, 05004], lr: 0.100000, loss: 2.3578 +2023-09-10 20:54:21 - train: epoch 0015, iter [04700, 05004], lr: 0.100000, loss: 2.5571 +2023-09-10 20:54:56 - train: epoch 0015, iter [04800, 05004], lr: 0.100000, loss: 2.3347 +2023-09-10 20:55:31 - train: epoch 0015, iter [04900, 05004], lr: 0.100000, loss: 2.4444 +2023-09-10 20:56:07 - train: epoch 0015, iter [05000, 05004], lr: 0.100000, loss: 2.3884 +2023-09-10 20:56:07 - train: epoch 015, train_loss: 2.3516 +2023-09-10 20:57:31 - eval: epoch: 015, acc1: 51.530%, acc5: 76.788%, test_loss: 2.0921, per_image_load_time: 1.354ms, per_image_inference_time: 0.296ms +2023-09-10 20:57:31 - until epoch: 015, best_acc1: 51.530% +2023-09-10 20:57:31 - epoch 016 lr: 0.100000 +2023-09-10 20:58:16 - train: epoch 0016, iter [00100, 05004], lr: 0.100000, loss: 2.2557 +2023-09-10 20:58:53 - train: epoch 0016, iter [00200, 05004], lr: 0.100000, loss: 2.1427 +2023-09-10 20:59:30 - train: epoch 0016, iter [00300, 05004], lr: 0.100000, loss: 2.2207 +2023-09-10 21:00:09 - train: epoch 0016, iter [00400, 05004], lr: 0.100000, loss: 2.4728 +2023-09-10 21:00:44 - train: epoch 0016, iter [00500, 05004], lr: 0.100000, loss: 2.1707 +2023-09-10 21:01:19 - train: epoch 0016, iter [00600, 05004], lr: 0.100000, loss: 2.4604 +2023-09-10 21:01:59 - train: epoch 0016, iter [00700, 05004], lr: 0.100000, loss: 2.1101 +2023-09-10 21:02:35 - train: epoch 0016, iter [00800, 05004], lr: 0.100000, loss: 2.2862 +2023-09-10 21:03:11 - train: epoch 0016, iter [00900, 05004], lr: 0.100000, loss: 2.4724 +2023-09-10 21:03:48 - train: epoch 0016, iter [01000, 05004], lr: 0.100000, loss: 2.2356 +2023-09-10 21:04:25 - train: epoch 0016, iter [01100, 05004], lr: 0.100000, loss: 2.2177 +2023-09-10 21:05:00 - train: epoch 0016, iter [01200, 05004], lr: 0.100000, loss: 2.3421 +2023-09-10 21:05:37 - train: epoch 0016, iter [01300, 05004], lr: 0.100000, loss: 2.3916 +2023-09-10 21:06:13 - train: epoch 0016, iter [01400, 05004], lr: 0.100000, loss: 2.2608 +2023-09-10 21:06:51 - train: epoch 0016, iter [01500, 05004], lr: 0.100000, loss: 2.4637 +2023-09-10 21:07:26 - train: epoch 0016, iter [01600, 05004], lr: 0.100000, loss: 2.4851 +2023-09-10 21:08:05 - train: epoch 0016, iter [01700, 05004], lr: 0.100000, loss: 2.3558 +2023-09-10 21:08:41 - train: epoch 0016, iter [01800, 05004], lr: 0.100000, loss: 2.4261 +2023-09-10 21:09:15 - train: epoch 0016, iter [01900, 05004], lr: 0.100000, loss: 2.2731 +2023-09-10 21:09:54 - train: epoch 0016, iter [02000, 05004], lr: 0.100000, loss: 2.1766 +2023-09-10 21:10:30 - train: epoch 0016, iter [02100, 05004], lr: 0.100000, loss: 2.4079 +2023-09-10 21:11:07 - train: epoch 0016, iter [02200, 05004], lr: 0.100000, loss: 2.2376 +2023-09-10 21:11:43 - train: epoch 0016, iter [02300, 05004], lr: 0.100000, loss: 2.4390 +2023-09-10 21:12:22 - train: epoch 0016, iter [02400, 05004], lr: 0.100000, loss: 2.2846 +2023-09-10 21:12:55 - train: epoch 0016, iter [02500, 05004], lr: 0.100000, loss: 2.4152 +2023-09-10 21:13:33 - train: epoch 0016, iter [02600, 05004], lr: 0.100000, loss: 2.3124 +2023-09-10 21:14:10 - train: epoch 0016, iter [02700, 05004], lr: 0.100000, loss: 2.2909 +2023-09-10 21:14:46 - train: epoch 0016, iter [02800, 05004], lr: 0.100000, loss: 2.2562 +2023-09-10 21:15:24 - train: epoch 0016, iter [02900, 05004], lr: 0.100000, loss: 2.3010 +2023-09-10 21:15:59 - train: epoch 0016, iter [03000, 05004], lr: 0.100000, loss: 2.5663 +2023-09-10 21:16:35 - train: epoch 0016, iter [03100, 05004], lr: 0.100000, loss: 2.4217 +2023-09-10 21:17:12 - train: epoch 0016, iter [03200, 05004], lr: 0.100000, loss: 2.4016 +2023-09-10 21:17:49 - train: epoch 0016, iter [03300, 05004], lr: 0.100000, loss: 2.5740 +2023-09-10 21:18:24 - train: epoch 0016, iter [03400, 05004], lr: 0.100000, loss: 2.3079 +2023-09-10 21:19:01 - train: epoch 0016, iter [03500, 05004], lr: 0.100000, loss: 2.2199 +2023-09-10 21:19:38 - train: epoch 0016, iter [03600, 05004], lr: 0.100000, loss: 2.2632 +2023-09-10 21:20:13 - train: epoch 0016, iter [03700, 05004], lr: 0.100000, loss: 2.3562 +2023-09-10 21:20:50 - train: epoch 0016, iter [03800, 05004], lr: 0.100000, loss: 2.5129 +2023-09-10 21:21:26 - train: epoch 0016, iter [03900, 05004], lr: 0.100000, loss: 2.2849 +2023-09-10 21:22:02 - train: epoch 0016, iter [04000, 05004], lr: 0.100000, loss: 2.5396 +2023-09-10 21:22:39 - train: epoch 0016, iter [04100, 05004], lr: 0.100000, loss: 2.4405 +2023-09-10 21:23:15 - train: epoch 0016, iter [04200, 05004], lr: 0.100000, loss: 2.3531 +2023-09-10 21:23:50 - train: epoch 0016, iter [04300, 05004], lr: 0.100000, loss: 2.2853 +2023-09-10 21:24:29 - train: epoch 0016, iter [04400, 05004], lr: 0.100000, loss: 2.3050 +2023-09-10 21:25:05 - train: epoch 0016, iter [04500, 05004], lr: 0.100000, loss: 2.3984 +2023-09-10 21:25:40 - train: epoch 0016, iter [04600, 05004], lr: 0.100000, loss: 2.1186 +2023-09-10 21:26:19 - train: epoch 0016, iter [04700, 05004], lr: 0.100000, loss: 2.4980 +2023-09-10 21:26:53 - train: epoch 0016, iter [04800, 05004], lr: 0.100000, loss: 2.2783 +2023-09-10 21:27:32 - train: epoch 0016, iter [04900, 05004], lr: 0.100000, loss: 2.3551 +2023-09-10 21:28:05 - train: epoch 0016, iter [05000, 05004], lr: 0.100000, loss: 2.2986 +2023-09-10 21:28:06 - train: epoch 016, train_loss: 2.3425 +2023-09-10 21:29:30 - eval: epoch: 016, acc1: 50.490%, acc5: 76.194%, test_loss: 2.1306, per_image_load_time: 1.378ms, per_image_inference_time: 0.280ms +2023-09-10 21:29:30 - until epoch: 016, best_acc1: 51.530% +2023-09-10 21:29:30 - epoch 017 lr: 0.100000 +2023-09-10 21:30:15 - train: epoch 0017, iter [00100, 05004], lr: 0.100000, loss: 2.5908 +2023-09-10 21:30:51 - train: epoch 0017, iter [00200, 05004], lr: 0.100000, loss: 2.4235 +2023-09-10 21:31:27 - train: epoch 0017, iter [00300, 05004], lr: 0.100000, loss: 2.4594 +2023-09-10 21:32:06 - train: epoch 0017, iter [00400, 05004], lr: 0.100000, loss: 2.1951 +2023-09-10 21:32:42 - train: epoch 0017, iter [00500, 05004], lr: 0.100000, loss: 2.2124 +2023-09-10 21:33:18 - train: epoch 0017, iter [00600, 05004], lr: 0.100000, loss: 2.6579 +2023-09-10 21:33:57 - train: epoch 0017, iter [00700, 05004], lr: 0.100000, loss: 2.2280 +2023-09-10 21:34:34 - train: epoch 0017, iter [00800, 05004], lr: 0.100000, loss: 2.3792 +2023-09-10 21:35:10 - train: epoch 0017, iter [00900, 05004], lr: 0.100000, loss: 2.0789 +2023-09-10 21:35:45 - train: epoch 0017, iter [01000, 05004], lr: 0.100000, loss: 2.3207 +2023-09-10 21:36:23 - train: epoch 0017, iter [01100, 05004], lr: 0.100000, loss: 2.5078 +2023-09-10 21:36:59 - train: epoch 0017, iter [01200, 05004], lr: 0.100000, loss: 2.2086 +2023-09-10 21:37:34 - train: epoch 0017, iter [01300, 05004], lr: 0.100000, loss: 2.4283 +2023-09-10 21:38:12 - train: epoch 0017, iter [01400, 05004], lr: 0.100000, loss: 2.2992 +2023-09-10 21:38:48 - train: epoch 0017, iter [01500, 05004], lr: 0.100000, loss: 2.3739 +2023-09-10 21:39:24 - train: epoch 0017, iter [01600, 05004], lr: 0.100000, loss: 2.2212 +2023-09-10 21:40:03 - train: epoch 0017, iter [01700, 05004], lr: 0.100000, loss: 2.2394 +2023-09-10 21:40:38 - train: epoch 0017, iter [01800, 05004], lr: 0.100000, loss: 2.1856 +2023-09-10 21:41:13 - train: epoch 0017, iter [01900, 05004], lr: 0.100000, loss: 2.0996 +2023-09-10 21:41:51 - train: epoch 0017, iter [02000, 05004], lr: 0.100000, loss: 2.3218 +2023-09-10 21:42:26 - train: epoch 0017, iter [02100, 05004], lr: 0.100000, loss: 2.2509 +2023-09-10 21:43:02 - train: epoch 0017, iter [02200, 05004], lr: 0.100000, loss: 2.2091 +2023-09-10 21:43:41 - train: epoch 0017, iter [02300, 05004], lr: 0.100000, loss: 2.5915 +2023-09-10 21:44:16 - train: epoch 0017, iter [02400, 05004], lr: 0.100000, loss: 2.3985 +2023-09-10 21:44:51 - train: epoch 0017, iter [02500, 05004], lr: 0.100000, loss: 2.5609 +2023-09-10 21:45:29 - train: epoch 0017, iter [02600, 05004], lr: 0.100000, loss: 2.1777 +2023-09-10 21:46:04 - train: epoch 0017, iter [02700, 05004], lr: 0.100000, loss: 2.2080 +2023-09-10 21:46:39 - train: epoch 0017, iter [02800, 05004], lr: 0.100000, loss: 2.4092 +2023-09-10 21:47:17 - train: epoch 0017, iter [02900, 05004], lr: 0.100000, loss: 2.5833 +2023-09-10 21:47:51 - train: epoch 0017, iter [03000, 05004], lr: 0.100000, loss: 2.1156 +2023-09-10 21:48:27 - train: epoch 0017, iter [03100, 05004], lr: 0.100000, loss: 2.5337 +2023-09-10 21:49:06 - train: epoch 0017, iter [03200, 05004], lr: 0.100000, loss: 2.1755 +2023-09-10 21:49:42 - train: epoch 0017, iter [03300, 05004], lr: 0.100000, loss: 2.3571 +2023-09-10 21:50:18 - train: epoch 0017, iter [03400, 05004], lr: 0.100000, loss: 2.2413 +2023-09-10 21:50:53 - train: epoch 0017, iter [03500, 05004], lr: 0.100000, loss: 2.4515 +2023-09-10 21:51:29 - train: epoch 0017, iter [03600, 05004], lr: 0.100000, loss: 2.3489 +2023-09-10 21:52:05 - train: epoch 0017, iter [03700, 05004], lr: 0.100000, loss: 2.3165 +2023-09-10 21:52:44 - train: epoch 0017, iter [03800, 05004], lr: 0.100000, loss: 2.3706 +2023-09-10 21:53:20 - train: epoch 0017, iter [03900, 05004], lr: 0.100000, loss: 2.3116 +2023-09-10 21:53:55 - train: epoch 0017, iter [04000, 05004], lr: 0.100000, loss: 2.5075 +2023-09-10 21:54:31 - train: epoch 0017, iter [04100, 05004], lr: 0.100000, loss: 2.4100 +2023-09-10 21:55:07 - train: epoch 0017, iter [04200, 05004], lr: 0.100000, loss: 2.2523 +2023-09-10 21:55:42 - train: epoch 0017, iter [04300, 05004], lr: 0.100000, loss: 2.3160 +2023-09-10 21:56:19 - train: epoch 0017, iter [04400, 05004], lr: 0.100000, loss: 2.3510 +2023-09-10 21:56:54 - train: epoch 0017, iter [04500, 05004], lr: 0.100000, loss: 2.3558 +2023-09-10 21:57:29 - train: epoch 0017, iter [04600, 05004], lr: 0.100000, loss: 2.1898 +2023-09-10 21:58:04 - train: epoch 0017, iter [04700, 05004], lr: 0.100000, loss: 2.5030 +2023-09-10 21:58:40 - train: epoch 0017, iter [04800, 05004], lr: 0.100000, loss: 2.4352 +2023-09-10 21:59:18 - train: epoch 0017, iter [04900, 05004], lr: 0.100000, loss: 2.4420 +2023-09-10 21:59:50 - train: epoch 0017, iter [05000, 05004], lr: 0.100000, loss: 2.0841 +2023-09-10 21:59:51 - train: epoch 017, train_loss: 2.3360 +2023-09-10 22:01:15 - eval: epoch: 017, acc1: 49.652%, acc5: 75.108%, test_loss: 2.2082, per_image_load_time: 1.346ms, per_image_inference_time: 0.319ms +2023-09-10 22:01:16 - until epoch: 017, best_acc1: 51.530% +2023-09-10 22:01:16 - epoch 018 lr: 0.100000 +2023-09-10 22:02:00 - train: epoch 0018, iter [00100, 05004], lr: 0.100000, loss: 2.2457 +2023-09-10 22:02:36 - train: epoch 0018, iter [00200, 05004], lr: 0.100000, loss: 2.4304 +2023-09-10 22:03:14 - train: epoch 0018, iter [00300, 05004], lr: 0.100000, loss: 2.4403 +2023-09-10 22:03:50 - train: epoch 0018, iter [00400, 05004], lr: 0.100000, loss: 2.4877 +2023-09-10 22:04:28 - train: epoch 0018, iter [00500, 05004], lr: 0.100000, loss: 2.3551 +2023-09-10 22:05:04 - train: epoch 0018, iter [00600, 05004], lr: 0.100000, loss: 2.3669 +2023-09-10 22:05:42 - train: epoch 0018, iter [00700, 05004], lr: 0.100000, loss: 2.4301 +2023-09-10 22:06:19 - train: epoch 0018, iter [00800, 05004], lr: 0.100000, loss: 2.3663 +2023-09-10 22:06:54 - train: epoch 0018, iter [00900, 05004], lr: 0.100000, loss: 2.4849 +2023-09-10 22:07:31 - train: epoch 0018, iter [01000, 05004], lr: 0.100000, loss: 2.3859 +2023-09-10 22:08:08 - train: epoch 0018, iter [01100, 05004], lr: 0.100000, loss: 2.6523 +2023-09-10 22:08:44 - train: epoch 0018, iter [01200, 05004], lr: 0.100000, loss: 2.3581 +2023-09-10 22:09:21 - train: epoch 0018, iter [01300, 05004], lr: 0.100000, loss: 2.5083 +2023-09-10 22:09:57 - train: epoch 0018, iter [01400, 05004], lr: 0.100000, loss: 2.3125 +2023-09-10 22:10:35 - train: epoch 0018, iter [01500, 05004], lr: 0.100000, loss: 2.3520 +2023-09-10 22:11:11 - train: epoch 0018, iter [01600, 05004], lr: 0.100000, loss: 2.3993 +2023-09-10 22:11:49 - train: epoch 0018, iter [01700, 05004], lr: 0.100000, loss: 2.3375 +2023-09-10 22:12:26 - train: epoch 0018, iter [01800, 05004], lr: 0.100000, loss: 2.2650 +2023-09-10 22:13:02 - train: epoch 0018, iter [01900, 05004], lr: 0.100000, loss: 2.4026 +2023-09-10 22:13:40 - train: epoch 0018, iter [02000, 05004], lr: 0.100000, loss: 2.7283 +2023-09-10 22:14:16 - train: epoch 0018, iter [02100, 05004], lr: 0.100000, loss: 2.5106 +2023-09-10 22:14:51 - train: epoch 0018, iter [02200, 05004], lr: 0.100000, loss: 2.4672 +2023-09-10 22:15:30 - train: epoch 0018, iter [02300, 05004], lr: 0.100000, loss: 2.3911 +2023-09-10 22:16:07 - train: epoch 0018, iter [02400, 05004], lr: 0.100000, loss: 2.2606 +2023-09-10 22:16:44 - train: epoch 0018, iter [02500, 05004], lr: 0.100000, loss: 2.1961 +2023-09-10 22:17:22 - train: epoch 0018, iter [02600, 05004], lr: 0.100000, loss: 2.1141 +2023-09-10 22:17:57 - train: epoch 0018, iter [02700, 05004], lr: 0.100000, loss: 2.4127 +2023-09-10 22:18:33 - train: epoch 0018, iter [02800, 05004], lr: 0.100000, loss: 2.1413 +2023-09-10 22:19:12 - train: epoch 0018, iter [02900, 05004], lr: 0.100000, loss: 2.3464 +2023-09-10 22:19:46 - train: epoch 0018, iter [03000, 05004], lr: 0.100000, loss: 2.2062 +2023-09-10 22:20:22 - train: epoch 0018, iter [03100, 05004], lr: 0.100000, loss: 2.6721 +2023-09-10 22:21:01 - train: epoch 0018, iter [03200, 05004], lr: 0.100000, loss: 2.3368 +2023-09-10 22:21:36 - train: epoch 0018, iter [03300, 05004], lr: 0.100000, loss: 2.3616 +2023-09-10 22:22:12 - train: epoch 0018, iter [03400, 05004], lr: 0.100000, loss: 2.3341 +2023-09-10 22:22:50 - train: epoch 0018, iter [03500, 05004], lr: 0.100000, loss: 2.2909 +2023-09-10 22:23:27 - train: epoch 0018, iter [03600, 05004], lr: 0.100000, loss: 2.2424 +2023-09-10 22:24:01 - train: epoch 0018, iter [03700, 05004], lr: 0.100000, loss: 2.5861 +2023-09-10 22:24:40 - train: epoch 0018, iter [03800, 05004], lr: 0.100000, loss: 2.4680 +2023-09-10 22:25:15 - train: epoch 0018, iter [03900, 05004], lr: 0.100000, loss: 2.5105 +2023-09-10 22:25:51 - train: epoch 0018, iter [04000, 05004], lr: 0.100000, loss: 2.3737 +2023-09-10 22:26:29 - train: epoch 0018, iter [04100, 05004], lr: 0.100000, loss: 2.3321 +2023-09-10 22:27:06 - train: epoch 0018, iter [04200, 05004], lr: 0.100000, loss: 2.4093 +2023-09-10 22:27:42 - train: epoch 0018, iter [04300, 05004], lr: 0.100000, loss: 2.3224 +2023-09-10 22:28:19 - train: epoch 0018, iter [04400, 05004], lr: 0.100000, loss: 2.1904 +2023-09-10 22:28:56 - train: epoch 0018, iter [04500, 05004], lr: 0.100000, loss: 2.3889 +2023-09-10 22:29:32 - train: epoch 0018, iter [04600, 05004], lr: 0.100000, loss: 2.3491 +2023-09-10 22:30:11 - train: epoch 0018, iter [04700, 05004], lr: 0.100000, loss: 2.3575 +2023-09-10 22:30:46 - train: epoch 0018, iter [04800, 05004], lr: 0.100000, loss: 2.4806 +2023-09-10 22:31:22 - train: epoch 0018, iter [04900, 05004], lr: 0.100000, loss: 2.3107 +2023-09-10 22:31:57 - train: epoch 0018, iter [05000, 05004], lr: 0.100000, loss: 2.3385 +2023-09-10 22:31:58 - train: epoch 018, train_loss: 2.3296 +2023-09-10 22:33:23 - eval: epoch: 018, acc1: 51.218%, acc5: 76.648%, test_loss: 2.1185, per_image_load_time: 1.392ms, per_image_inference_time: 0.294ms +2023-09-10 22:33:23 - until epoch: 018, best_acc1: 51.530% +2023-09-10 22:33:23 - epoch 019 lr: 0.100000 +2023-09-10 22:34:08 - train: epoch 0019, iter [00100, 05004], lr: 0.100000, loss: 2.2485 +2023-09-10 22:34:43 - train: epoch 0019, iter [00200, 05004], lr: 0.100000, loss: 2.2796 +2023-09-10 22:35:20 - train: epoch 0019, iter [00300, 05004], lr: 0.100000, loss: 2.2776 +2023-09-10 22:35:57 - train: epoch 0019, iter [00400, 05004], lr: 0.100000, loss: 2.3671 +2023-09-10 22:36:32 - train: epoch 0019, iter [00500, 05004], lr: 0.100000, loss: 2.1593 +2023-09-10 22:37:08 - train: epoch 0019, iter [00600, 05004], lr: 0.100000, loss: 2.4763 +2023-09-10 22:37:46 - train: epoch 0019, iter [00700, 05004], lr: 0.100000, loss: 2.1502 +2023-09-10 22:38:22 - train: epoch 0019, iter [00800, 05004], lr: 0.100000, loss: 2.4855 +2023-09-10 22:39:00 - train: epoch 0019, iter [00900, 05004], lr: 0.100000, loss: 2.3771 +2023-09-10 22:39:36 - train: epoch 0019, iter [01000, 05004], lr: 0.100000, loss: 2.3053 +2023-09-10 22:40:13 - train: epoch 0019, iter [01100, 05004], lr: 0.100000, loss: 2.3124 +2023-09-10 22:40:50 - train: epoch 0019, iter [01200, 05004], lr: 0.100000, loss: 2.2586 +2023-09-10 22:41:26 - train: epoch 0019, iter [01300, 05004], lr: 0.100000, loss: 2.3485 +2023-09-10 22:42:02 - train: epoch 0019, iter [01400, 05004], lr: 0.100000, loss: 2.1664 +2023-09-10 22:42:40 - train: epoch 0019, iter [01500, 05004], lr: 0.100000, loss: 2.5705 +2023-09-10 22:43:16 - train: epoch 0019, iter [01600, 05004], lr: 0.100000, loss: 2.1568 +2023-09-10 22:43:51 - train: epoch 0019, iter [01700, 05004], lr: 0.100000, loss: 2.4700 +2023-09-10 22:44:31 - train: epoch 0019, iter [01800, 05004], lr: 0.100000, loss: 2.1548 +2023-09-10 22:45:06 - train: epoch 0019, iter [01900, 05004], lr: 0.100000, loss: 2.8215 +2023-09-10 22:45:44 - train: epoch 0019, iter [02000, 05004], lr: 0.100000, loss: 2.2101 +2023-09-10 22:46:22 - train: epoch 0019, iter [02100, 05004], lr: 0.100000, loss: 2.1695 +2023-09-10 22:46:56 - train: epoch 0019, iter [02200, 05004], lr: 0.100000, loss: 2.2473 +2023-09-10 22:47:33 - train: epoch 0019, iter [02300, 05004], lr: 0.100000, loss: 2.5070 +2023-09-10 22:48:09 - train: epoch 0019, iter [02400, 05004], lr: 0.100000, loss: 2.6699 +2023-09-10 22:48:46 - train: epoch 0019, iter [02500, 05004], lr: 0.100000, loss: 2.2310 +2023-09-10 22:49:23 - train: epoch 0019, iter [02600, 05004], lr: 0.100000, loss: 2.3225 +2023-09-10 22:49:58 - train: epoch 0019, iter [02700, 05004], lr: 0.100000, loss: 2.2636 +2023-09-10 22:50:37 - train: epoch 0019, iter [02800, 05004], lr: 0.100000, loss: 2.1273 +2023-09-10 22:51:11 - train: epoch 0019, iter [02900, 05004], lr: 0.100000, loss: 2.5439 +2023-09-10 22:51:49 - train: epoch 0019, iter [03000, 05004], lr: 0.100000, loss: 2.4041 +2023-09-10 22:52:25 - train: epoch 0019, iter [03100, 05004], lr: 0.100000, loss: 2.4407 +2023-09-10 22:53:01 - train: epoch 0019, iter [03200, 05004], lr: 0.100000, loss: 1.9599 +2023-09-10 22:53:38 - train: epoch 0019, iter [03300, 05004], lr: 0.100000, loss: 2.2856 +2023-09-10 22:54:13 - train: epoch 0019, iter [03400, 05004], lr: 0.100000, loss: 2.1952 +2023-09-10 22:54:51 - train: epoch 0019, iter [03500, 05004], lr: 0.100000, loss: 2.3860 +2023-09-10 22:55:26 - train: epoch 0019, iter [03600, 05004], lr: 0.100000, loss: 2.1516 +2023-09-10 22:56:04 - train: epoch 0019, iter [03700, 05004], lr: 0.100000, loss: 2.4593 +2023-09-10 22:56:39 - train: epoch 0019, iter [03800, 05004], lr: 0.100000, loss: 2.3869 +2023-09-10 22:57:14 - train: epoch 0019, iter [03900, 05004], lr: 0.100000, loss: 2.1080 +2023-09-10 22:57:52 - train: epoch 0019, iter [04000, 05004], lr: 0.100000, loss: 2.3760 +2023-09-10 22:58:28 - train: epoch 0019, iter [04100, 05004], lr: 0.100000, loss: 2.4564 +2023-09-10 22:59:04 - train: epoch 0019, iter [04200, 05004], lr: 0.100000, loss: 2.2984 +2023-09-10 22:59:42 - train: epoch 0019, iter [04300, 05004], lr: 0.100000, loss: 2.2999 +2023-09-10 23:00:17 - train: epoch 0019, iter [04400, 05004], lr: 0.100000, loss: 2.3747 +2023-09-10 23:00:53 - train: epoch 0019, iter [04500, 05004], lr: 0.100000, loss: 2.6920 +2023-09-10 23:01:30 - train: epoch 0019, iter [04600, 05004], lr: 0.100000, loss: 2.4385 +2023-09-10 23:02:07 - train: epoch 0019, iter [04700, 05004], lr: 0.100000, loss: 2.6202 +2023-09-10 23:02:42 - train: epoch 0019, iter [04800, 05004], lr: 0.100000, loss: 2.3902 +2023-09-10 23:03:19 - train: epoch 0019, iter [04900, 05004], lr: 0.100000, loss: 2.2854 +2023-09-10 23:03:53 - train: epoch 0019, iter [05000, 05004], lr: 0.100000, loss: 2.5739 +2023-09-10 23:03:53 - train: epoch 019, train_loss: 2.3251 +2023-09-10 23:05:18 - eval: epoch: 019, acc1: 51.748%, acc5: 77.266%, test_loss: 2.0723, per_image_load_time: 1.363ms, per_image_inference_time: 0.318ms +2023-09-10 23:05:18 - until epoch: 019, best_acc1: 51.748% +2023-09-10 23:05:18 - epoch 020 lr: 0.100000 +2023-09-10 23:06:02 - train: epoch 0020, iter [00100, 05004], lr: 0.100000, loss: 2.3496 +2023-09-10 23:06:38 - train: epoch 0020, iter [00200, 05004], lr: 0.100000, loss: 2.1822 +2023-09-10 23:07:14 - train: epoch 0020, iter [00300, 05004], lr: 0.100000, loss: 2.1646 +2023-09-10 23:07:53 - train: epoch 0020, iter [00400, 05004], lr: 0.100000, loss: 2.1767 +2023-09-10 23:08:28 - train: epoch 0020, iter [00500, 05004], lr: 0.100000, loss: 2.1323 +2023-09-10 23:09:03 - train: epoch 0020, iter [00600, 05004], lr: 0.100000, loss: 2.3995 +2023-09-10 23:09:41 - train: epoch 0020, iter [00700, 05004], lr: 0.100000, loss: 2.2909 +2023-09-10 23:10:15 - train: epoch 0020, iter [00800, 05004], lr: 0.100000, loss: 2.4911 +2023-09-10 23:10:52 - train: epoch 0020, iter [00900, 05004], lr: 0.100000, loss: 2.4874 +2023-09-10 23:11:30 - train: epoch 0020, iter [01000, 05004], lr: 0.100000, loss: 2.3591 +2023-09-10 23:12:05 - train: epoch 0020, iter [01100, 05004], lr: 0.100000, loss: 2.2315 +2023-09-10 23:12:40 - train: epoch 0020, iter [01200, 05004], lr: 0.100000, loss: 2.1556 +2023-09-10 23:13:19 - train: epoch 0020, iter [01300, 05004], lr: 0.100000, loss: 2.1719 +2023-09-10 23:13:54 - train: epoch 0020, iter [01400, 05004], lr: 0.100000, loss: 2.2580 +2023-09-10 23:14:29 - train: epoch 0020, iter [01500, 05004], lr: 0.100000, loss: 2.3442 +2023-09-10 23:15:07 - train: epoch 0020, iter [01600, 05004], lr: 0.100000, loss: 2.3838 +2023-09-10 23:15:42 - train: epoch 0020, iter [01700, 05004], lr: 0.100000, loss: 2.0673 +2023-09-10 23:16:19 - train: epoch 0020, iter [01800, 05004], lr: 0.100000, loss: 2.3815 +2023-09-10 23:16:53 - train: epoch 0020, iter [01900, 05004], lr: 0.100000, loss: 2.1339 +2023-09-10 23:17:32 - train: epoch 0020, iter [02000, 05004], lr: 0.100000, loss: 2.1900 +2023-09-10 23:18:07 - train: epoch 0020, iter [02100, 05004], lr: 0.100000, loss: 2.2895 +2023-09-10 23:18:44 - train: epoch 0020, iter [02200, 05004], lr: 0.100000, loss: 2.2076 +2023-09-10 23:19:21 - train: epoch 0020, iter [02300, 05004], lr: 0.100000, loss: 2.1013 +2023-09-10 23:19:57 - train: epoch 0020, iter [02400, 05004], lr: 0.100000, loss: 2.4470 +2023-09-10 23:20:32 - train: epoch 0020, iter [02500, 05004], lr: 0.100000, loss: 2.1044 +2023-09-10 23:21:10 - train: epoch 0020, iter [02600, 05004], lr: 0.100000, loss: 2.2219 +2023-09-10 23:21:46 - train: epoch 0020, iter [02700, 05004], lr: 0.100000, loss: 2.5158 +2023-09-10 23:22:22 - train: epoch 0020, iter [02800, 05004], lr: 0.100000, loss: 2.4159 +2023-09-10 23:22:58 - train: epoch 0020, iter [02900, 05004], lr: 0.100000, loss: 2.5201 +2023-09-10 23:23:37 - train: epoch 0020, iter [03000, 05004], lr: 0.100000, loss: 2.4937 +2023-09-10 23:24:11 - train: epoch 0020, iter [03100, 05004], lr: 0.100000, loss: 2.4466 +2023-09-10 23:24:48 - train: epoch 0020, iter [03200, 05004], lr: 0.100000, loss: 2.5568 +2023-09-10 23:25:24 - train: epoch 0020, iter [03300, 05004], lr: 0.100000, loss: 2.1819 +2023-09-10 23:26:03 - train: epoch 0020, iter [03400, 05004], lr: 0.100000, loss: 2.1849 +2023-09-10 23:26:38 - train: epoch 0020, iter [03500, 05004], lr: 0.100000, loss: 2.3556 +2023-09-10 23:27:14 - train: epoch 0020, iter [03600, 05004], lr: 0.100000, loss: 2.3536 +2023-09-10 23:27:52 - train: epoch 0020, iter [03700, 05004], lr: 0.100000, loss: 2.2249 +2023-09-10 23:28:28 - train: epoch 0020, iter [03800, 05004], lr: 0.100000, loss: 2.2581 +2023-09-10 23:29:03 - train: epoch 0020, iter [03900, 05004], lr: 0.100000, loss: 2.4725 +2023-09-10 23:29:41 - train: epoch 0020, iter [04000, 05004], lr: 0.100000, loss: 2.1942 +2023-09-10 23:30:16 - train: epoch 0020, iter [04100, 05004], lr: 0.100000, loss: 2.2027 +2023-09-10 23:30:52 - train: epoch 0020, iter [04200, 05004], lr: 0.100000, loss: 2.3517 +2023-09-10 23:31:28 - train: epoch 0020, iter [04300, 05004], lr: 0.100000, loss: 2.5423 +2023-09-10 23:32:06 - train: epoch 0020, iter [04400, 05004], lr: 0.100000, loss: 2.2597 +2023-09-10 23:32:42 - train: epoch 0020, iter [04500, 05004], lr: 0.100000, loss: 2.5438 +2023-09-10 23:33:20 - train: epoch 0020, iter [04600, 05004], lr: 0.100000, loss: 2.3182 +2023-09-10 23:33:56 - train: epoch 0020, iter [04700, 05004], lr: 0.100000, loss: 2.0962 +2023-09-10 23:34:31 - train: epoch 0020, iter [04800, 05004], lr: 0.100000, loss: 2.4258 +2023-09-10 23:35:09 - train: epoch 0020, iter [04900, 05004], lr: 0.100000, loss: 2.4782 +2023-09-10 23:35:42 - train: epoch 0020, iter [05000, 05004], lr: 0.100000, loss: 2.1861 +2023-09-10 23:35:42 - train: epoch 020, train_loss: 2.3177 +2023-09-10 23:37:06 - eval: epoch: 020, acc1: 48.958%, acc5: 74.446%, test_loss: 2.2497, per_image_load_time: 1.354ms, per_image_inference_time: 0.309ms +2023-09-10 23:37:07 - until epoch: 020, best_acc1: 51.748% +2023-09-10 23:37:07 - epoch 021 lr: 0.100000 +2023-09-10 23:37:51 - train: epoch 0021, iter [00100, 05004], lr: 0.100000, loss: 2.2895 +2023-09-10 23:38:28 - train: epoch 0021, iter [00200, 05004], lr: 0.100000, loss: 2.2163 +2023-09-10 23:39:04 - train: epoch 0021, iter [00300, 05004], lr: 0.100000, loss: 2.0343 +2023-09-10 23:39:40 - train: epoch 0021, iter [00400, 05004], lr: 0.100000, loss: 2.2685 +2023-09-10 23:40:17 - train: epoch 0021, iter [00500, 05004], lr: 0.100000, loss: 2.3319 +2023-09-10 23:40:53 - train: epoch 0021, iter [00600, 05004], lr: 0.100000, loss: 2.0697 +2023-09-10 23:41:28 - train: epoch 0021, iter [00700, 05004], lr: 0.100000, loss: 2.2212 +2023-09-10 23:42:06 - train: epoch 0021, iter [00800, 05004], lr: 0.100000, loss: 2.1588 +2023-09-10 23:42:42 - train: epoch 0021, iter [00900, 05004], lr: 0.100000, loss: 2.2555 +2023-09-10 23:43:20 - train: epoch 0021, iter [01000, 05004], lr: 0.100000, loss: 1.9656 +2023-09-10 23:43:56 - train: epoch 0021, iter [01100, 05004], lr: 0.100000, loss: 2.2539 +2023-09-10 23:44:31 - train: epoch 0021, iter [01200, 05004], lr: 0.100000, loss: 2.0539 +2023-09-10 23:45:09 - train: epoch 0021, iter [01300, 05004], lr: 0.100000, loss: 2.1661 +2023-09-10 23:45:46 - train: epoch 0021, iter [01400, 05004], lr: 0.100000, loss: 2.1474 +2023-09-10 23:46:23 - train: epoch 0021, iter [01500, 05004], lr: 0.100000, loss: 2.2001 +2023-09-10 23:46:59 - train: epoch 0021, iter [01600, 05004], lr: 0.100000, loss: 2.1299 +2023-09-10 23:47:36 - train: epoch 0021, iter [01700, 05004], lr: 0.100000, loss: 2.4336 +2023-09-10 23:48:12 - train: epoch 0021, iter [01800, 05004], lr: 0.100000, loss: 2.0688 +2023-09-10 23:48:47 - train: epoch 0021, iter [01900, 05004], lr: 0.100000, loss: 2.4871 +2023-09-10 23:49:26 - train: epoch 0021, iter [02000, 05004], lr: 0.100000, loss: 2.3583 +2023-09-10 23:50:02 - train: epoch 0021, iter [02100, 05004], lr: 0.100000, loss: 2.1823 +2023-09-10 23:50:38 - train: epoch 0021, iter [02200, 05004], lr: 0.100000, loss: 2.4282 +2023-09-10 23:51:15 - train: epoch 0021, iter [02300, 05004], lr: 0.100000, loss: 2.4042 +2023-09-10 23:51:53 - train: epoch 0021, iter [02400, 05004], lr: 0.100000, loss: 2.0536 +2023-09-10 23:52:27 - train: epoch 0021, iter [02500, 05004], lr: 0.100000, loss: 2.2362 +2023-09-10 23:53:05 - train: epoch 0021, iter [02600, 05004], lr: 0.100000, loss: 2.3551 +2023-09-10 23:53:40 - train: epoch 0021, iter [02700, 05004], lr: 0.100000, loss: 2.1545 +2023-09-10 23:54:16 - train: epoch 0021, iter [02800, 05004], lr: 0.100000, loss: 2.4098 +2023-09-10 23:54:54 - train: epoch 0021, iter [02900, 05004], lr: 0.100000, loss: 2.4439 +2023-09-10 23:55:29 - train: epoch 0021, iter [03000, 05004], lr: 0.100000, loss: 2.5235 +2023-09-10 23:56:06 - train: epoch 0021, iter [03100, 05004], lr: 0.100000, loss: 2.3055 +2023-09-10 23:56:43 - train: epoch 0021, iter [03200, 05004], lr: 0.100000, loss: 2.1634 +2023-09-10 23:57:19 - train: epoch 0021, iter [03300, 05004], lr: 0.100000, loss: 2.4577 +2023-09-10 23:57:56 - train: epoch 0021, iter [03400, 05004], lr: 0.100000, loss: 2.4722 +2023-09-10 23:58:31 - train: epoch 0021, iter [03500, 05004], lr: 0.100000, loss: 2.4617 +2023-09-10 23:59:08 - train: epoch 0021, iter [03600, 05004], lr: 0.100000, loss: 2.1860 +2023-09-10 23:59:45 - train: epoch 0021, iter [03700, 05004], lr: 0.100000, loss: 2.4073 +2023-09-11 00:00:23 - train: epoch 0021, iter [03800, 05004], lr: 0.100000, loss: 2.4991 +2023-09-11 00:00:58 - train: epoch 0021, iter [03900, 05004], lr: 0.100000, loss: 2.2582 +2023-09-11 00:01:35 - train: epoch 0021, iter [04000, 05004], lr: 0.100000, loss: 2.5018 +2023-09-11 00:02:12 - train: epoch 0021, iter [04100, 05004], lr: 0.100000, loss: 2.1627 +2023-09-11 00:02:50 - train: epoch 0021, iter [04200, 05004], lr: 0.100000, loss: 2.1780 +2023-09-11 00:03:25 - train: epoch 0021, iter [04300, 05004], lr: 0.100000, loss: 2.4372 +2023-09-11 00:04:05 - train: epoch 0021, iter [04400, 05004], lr: 0.100000, loss: 2.3557 +2023-09-11 00:04:39 - train: epoch 0021, iter [04500, 05004], lr: 0.100000, loss: 2.4238 +2023-09-11 00:05:14 - train: epoch 0021, iter [04600, 05004], lr: 0.100000, loss: 2.2074 +2023-09-11 00:05:51 - train: epoch 0021, iter [04700, 05004], lr: 0.100000, loss: 2.4067 +2023-09-11 00:06:29 - train: epoch 0021, iter [04800, 05004], lr: 0.100000, loss: 2.3581 +2023-09-11 00:07:05 - train: epoch 0021, iter [04900, 05004], lr: 0.100000, loss: 2.1002 +2023-09-11 00:07:38 - train: epoch 0021, iter [05000, 05004], lr: 0.100000, loss: 2.2371 +2023-09-11 00:07:40 - train: epoch 021, train_loss: 2.3135 +2023-09-11 00:09:05 - eval: epoch: 021, acc1: 51.354%, acc5: 76.870%, test_loss: 2.0948, per_image_load_time: 1.367ms, per_image_inference_time: 0.316ms +2023-09-11 00:09:05 - until epoch: 021, best_acc1: 51.748% +2023-09-11 00:09:05 - epoch 022 lr: 0.100000 +2023-09-11 00:09:48 - train: epoch 0022, iter [00100, 05004], lr: 0.100000, loss: 1.9199 +2023-09-11 00:10:26 - train: epoch 0022, iter [00200, 05004], lr: 0.100000, loss: 2.3075 +2023-09-11 00:11:01 - train: epoch 0022, iter [00300, 05004], lr: 0.100000, loss: 2.0206 +2023-09-11 00:11:40 - train: epoch 0022, iter [00400, 05004], lr: 0.100000, loss: 2.1611 +2023-09-11 00:12:15 - train: epoch 0022, iter [00500, 05004], lr: 0.100000, loss: 2.4573 +2023-09-11 00:12:54 - train: epoch 0022, iter [00600, 05004], lr: 0.100000, loss: 2.4777 +2023-09-11 00:13:30 - train: epoch 0022, iter [00700, 05004], lr: 0.100000, loss: 2.3189 +2023-09-11 00:14:07 - train: epoch 0022, iter [00800, 05004], lr: 0.100000, loss: 2.4443 +2023-09-11 00:14:43 - train: epoch 0022, iter [00900, 05004], lr: 0.100000, loss: 2.4343 +2023-09-11 00:15:21 - train: epoch 0022, iter [01000, 05004], lr: 0.100000, loss: 2.2883 +2023-09-11 00:15:58 - train: epoch 0022, iter [01100, 05004], lr: 0.100000, loss: 2.2878 +2023-09-11 00:16:38 - train: epoch 0022, iter [01200, 05004], lr: 0.100000, loss: 2.0901 +2023-09-11 00:17:15 - train: epoch 0022, iter [01300, 05004], lr: 0.100000, loss: 2.1521 +2023-09-11 00:17:52 - train: epoch 0022, iter [01400, 05004], lr: 0.100000, loss: 2.2265 +2023-09-11 00:18:31 - train: epoch 0022, iter [01500, 05004], lr: 0.100000, loss: 2.2543 +2023-09-11 00:19:08 - train: epoch 0022, iter [01600, 05004], lr: 0.100000, loss: 2.0447 +2023-09-11 00:19:45 - train: epoch 0022, iter [01700, 05004], lr: 0.100000, loss: 2.3038 +2023-09-11 00:20:25 - train: epoch 0022, iter [01800, 05004], lr: 0.100000, loss: 2.5930 +2023-09-11 00:21:02 - train: epoch 0022, iter [01900, 05004], lr: 0.100000, loss: 2.5487 +2023-09-11 00:21:39 - train: epoch 0022, iter [02000, 05004], lr: 0.100000, loss: 2.2559 +2023-09-11 00:22:15 - train: epoch 0022, iter [02100, 05004], lr: 0.100000, loss: 2.2904 +2023-09-11 00:22:54 - train: epoch 0022, iter [02200, 05004], lr: 0.100000, loss: 1.9644 +2023-09-11 00:23:34 - train: epoch 0022, iter [02300, 05004], lr: 0.100000, loss: 2.3130 +2023-09-11 00:24:11 - train: epoch 0022, iter [02400, 05004], lr: 0.100000, loss: 2.5797 +2023-09-11 00:24:47 - train: epoch 0022, iter [02500, 05004], lr: 0.100000, loss: 2.5715 +2023-09-11 00:25:26 - train: epoch 0022, iter [02600, 05004], lr: 0.100000, loss: 2.1686 +2023-09-11 00:26:03 - train: epoch 0022, iter [02700, 05004], lr: 0.100000, loss: 2.1912 +2023-09-11 00:26:38 - train: epoch 0022, iter [02800, 05004], lr: 0.100000, loss: 2.3702 +2023-09-11 00:27:16 - train: epoch 0022, iter [02900, 05004], lr: 0.100000, loss: 2.2572 +2023-09-11 00:27:51 - train: epoch 0022, iter [03000, 05004], lr: 0.100000, loss: 2.4528 +2023-09-11 00:28:27 - train: epoch 0022, iter [03100, 05004], lr: 0.100000, loss: 2.4142 +2023-09-11 00:29:05 - train: epoch 0022, iter [03200, 05004], lr: 0.100000, loss: 2.2881 +2023-09-11 00:29:41 - train: epoch 0022, iter [03300, 05004], lr: 0.100000, loss: 2.4109 +2023-09-11 00:30:15 - train: epoch 0022, iter [03400, 05004], lr: 0.100000, loss: 2.1087 +2023-09-11 00:30:51 - train: epoch 0022, iter [03500, 05004], lr: 0.100000, loss: 2.1470 +2023-09-11 00:31:27 - train: epoch 0022, iter [03600, 05004], lr: 0.100000, loss: 2.5094 +2023-09-11 00:32:05 - train: epoch 0022, iter [03700, 05004], lr: 0.100000, loss: 2.5644 +2023-09-11 00:32:42 - train: epoch 0022, iter [03800, 05004], lr: 0.100000, loss: 2.4455 +2023-09-11 00:33:18 - train: epoch 0022, iter [03900, 05004], lr: 0.100000, loss: 2.2027 +2023-09-11 00:33:57 - train: epoch 0022, iter [04000, 05004], lr: 0.100000, loss: 2.3015 +2023-09-11 00:34:31 - train: epoch 0022, iter [04100, 05004], lr: 0.100000, loss: 2.2870 +2023-09-11 00:35:08 - train: epoch 0022, iter [04200, 05004], lr: 0.100000, loss: 2.5123 +2023-09-11 00:35:44 - train: epoch 0022, iter [04300, 05004], lr: 0.100000, loss: 2.4268 +2023-09-11 00:36:21 - train: epoch 0022, iter [04400, 05004], lr: 0.100000, loss: 2.2615 +2023-09-11 00:36:57 - train: epoch 0022, iter [04500, 05004], lr: 0.100000, loss: 2.1066 +2023-09-11 00:37:34 - train: epoch 0022, iter [04600, 05004], lr: 0.100000, loss: 2.4329 +2023-09-11 00:38:10 - train: epoch 0022, iter [04700, 05004], lr: 0.100000, loss: 2.2065 +2023-09-11 00:38:46 - train: epoch 0022, iter [04800, 05004], lr: 0.100000, loss: 2.2715 +2023-09-11 00:39:25 - train: epoch 0022, iter [04900, 05004], lr: 0.100000, loss: 2.4108 +2023-09-11 00:39:57 - train: epoch 0022, iter [05000, 05004], lr: 0.100000, loss: 2.2304 +2023-09-11 00:39:58 - train: epoch 022, train_loss: 2.3099 +2023-09-11 00:41:22 - eval: epoch: 022, acc1: 52.548%, acc5: 77.878%, test_loss: 2.0342, per_image_load_time: 1.343ms, per_image_inference_time: 0.313ms +2023-09-11 00:41:22 - until epoch: 022, best_acc1: 52.548% +2023-09-11 00:41:22 - epoch 023 lr: 0.100000 +2023-09-11 00:42:05 - train: epoch 0023, iter [00100, 05004], lr: 0.100000, loss: 2.1625 +2023-09-11 00:42:41 - train: epoch 0023, iter [00200, 05004], lr: 0.100000, loss: 2.1647 +2023-09-11 00:43:16 - train: epoch 0023, iter [00300, 05004], lr: 0.100000, loss: 2.2772 +2023-09-11 00:43:54 - train: epoch 0023, iter [00400, 05004], lr: 0.100000, loss: 2.2742 +2023-09-11 00:44:30 - train: epoch 0023, iter [00500, 05004], lr: 0.100000, loss: 2.4009 +2023-09-11 00:45:07 - train: epoch 0023, iter [00600, 05004], lr: 0.100000, loss: 2.3503 +2023-09-11 00:45:44 - train: epoch 0023, iter [00700, 05004], lr: 0.100000, loss: 2.2710 +2023-09-11 00:46:20 - train: epoch 0023, iter [00800, 05004], lr: 0.100000, loss: 2.4395 +2023-09-11 00:46:56 - train: epoch 0023, iter [00900, 05004], lr: 0.100000, loss: 2.3213 +2023-09-11 00:47:34 - train: epoch 0023, iter [01000, 05004], lr: 0.100000, loss: 2.2743 +2023-09-11 00:48:10 - train: epoch 0023, iter [01100, 05004], lr: 0.100000, loss: 2.1016 +2023-09-11 00:48:46 - train: epoch 0023, iter [01200, 05004], lr: 0.100000, loss: 2.2581 +2023-09-11 00:49:26 - train: epoch 0023, iter [01300, 05004], lr: 0.100000, loss: 2.3102 +2023-09-11 00:50:00 - train: epoch 0023, iter [01400, 05004], lr: 0.100000, loss: 2.2797 +2023-09-11 00:50:36 - train: epoch 0023, iter [01500, 05004], lr: 0.100000, loss: 2.1793 +2023-09-11 00:51:15 - train: epoch 0023, iter [01600, 05004], lr: 0.100000, loss: 2.2580 +2023-09-11 00:51:51 - train: epoch 0023, iter [01700, 05004], lr: 0.100000, loss: 2.0826 +2023-09-11 00:52:26 - train: epoch 0023, iter [01800, 05004], lr: 0.100000, loss: 2.3307 +2023-09-11 00:53:05 - train: epoch 0023, iter [01900, 05004], lr: 0.100000, loss: 2.4334 +2023-09-11 00:53:42 - train: epoch 0023, iter [02000, 05004], lr: 0.100000, loss: 2.0393 +2023-09-11 00:54:17 - train: epoch 0023, iter [02100, 05004], lr: 0.100000, loss: 2.2085 +2023-09-11 00:54:55 - train: epoch 0023, iter [02200, 05004], lr: 0.100000, loss: 2.1104 +2023-09-11 00:55:30 - train: epoch 0023, iter [02300, 05004], lr: 0.100000, loss: 2.2581 +2023-09-11 00:56:05 - train: epoch 0023, iter [02400, 05004], lr: 0.100000, loss: 2.3010 +2023-09-11 00:56:42 - train: epoch 0023, iter [02500, 05004], lr: 0.100000, loss: 2.4690 +2023-09-11 00:57:19 - train: epoch 0023, iter [02600, 05004], lr: 0.100000, loss: 2.1996 +2023-09-11 00:57:56 - train: epoch 0023, iter [02700, 05004], lr: 0.100000, loss: 2.4205 +2023-09-11 00:58:31 - train: epoch 0023, iter [02800, 05004], lr: 0.100000, loss: 2.2243 +2023-09-11 00:59:09 - train: epoch 0023, iter [02900, 05004], lr: 0.100000, loss: 2.3418 +2023-09-11 00:59:44 - train: epoch 0023, iter [03000, 05004], lr: 0.100000, loss: 2.3055 +2023-09-11 01:00:19 - train: epoch 0023, iter [03100, 05004], lr: 0.100000, loss: 2.3998 +2023-09-11 01:00:58 - train: epoch 0023, iter [03200, 05004], lr: 0.100000, loss: 2.5719 +2023-09-11 01:01:32 - train: epoch 0023, iter [03300, 05004], lr: 0.100000, loss: 2.3390 +2023-09-11 01:02:08 - train: epoch 0023, iter [03400, 05004], lr: 0.100000, loss: 2.4095 +2023-09-11 01:02:46 - train: epoch 0023, iter [03500, 05004], lr: 0.100000, loss: 2.2777 +2023-09-11 01:03:21 - train: epoch 0023, iter [03600, 05004], lr: 0.100000, loss: 2.3692 +2023-09-11 01:03:58 - train: epoch 0023, iter [03700, 05004], lr: 0.100000, loss: 2.1233 +2023-09-11 01:04:36 - train: epoch 0023, iter [03800, 05004], lr: 0.100000, loss: 2.4376 +2023-09-11 01:05:10 - train: epoch 0023, iter [03900, 05004], lr: 0.100000, loss: 2.3040 +2023-09-11 01:05:47 - train: epoch 0023, iter [04000, 05004], lr: 0.100000, loss: 2.2203 +2023-09-11 01:06:25 - train: epoch 0023, iter [04100, 05004], lr: 0.100000, loss: 2.1691 +2023-09-11 01:07:01 - train: epoch 0023, iter [04200, 05004], lr: 0.100000, loss: 2.0493 +2023-09-11 01:07:36 - train: epoch 0023, iter [04300, 05004], lr: 0.100000, loss: 2.2098 +2023-09-11 01:08:12 - train: epoch 0023, iter [04400, 05004], lr: 0.100000, loss: 2.2278 +2023-09-11 01:08:50 - train: epoch 0023, iter [04500, 05004], lr: 0.100000, loss: 2.1787 +2023-09-11 01:09:26 - train: epoch 0023, iter [04600, 05004], lr: 0.100000, loss: 2.3700 +2023-09-11 01:10:01 - train: epoch 0023, iter [04700, 05004], lr: 0.100000, loss: 2.1336 +2023-09-11 01:10:40 - train: epoch 0023, iter [04800, 05004], lr: 0.100000, loss: 2.2575 +2023-09-11 01:11:17 - train: epoch 0023, iter [04900, 05004], lr: 0.100000, loss: 2.1011 +2023-09-11 01:11:49 - train: epoch 0023, iter [05000, 05004], lr: 0.100000, loss: 2.2359 +2023-09-11 01:11:50 - train: epoch 023, train_loss: 2.3079 +2023-09-11 01:13:14 - eval: epoch: 023, acc1: 52.596%, acc5: 78.026%, test_loss: 2.0341, per_image_load_time: 1.344ms, per_image_inference_time: 0.317ms +2023-09-11 01:13:14 - until epoch: 023, best_acc1: 52.596% +2023-09-11 01:13:14 - epoch 024 lr: 0.100000 +2023-09-11 01:13:58 - train: epoch 0024, iter [00100, 05004], lr: 0.100000, loss: 2.3542 +2023-09-11 01:14:34 - train: epoch 0024, iter [00200, 05004], lr: 0.100000, loss: 2.2321 +2023-09-11 01:15:11 - train: epoch 0024, iter [00300, 05004], lr: 0.100000, loss: 2.1814 +2023-09-11 01:15:48 - train: epoch 0024, iter [00400, 05004], lr: 0.100000, loss: 2.2986 +2023-09-11 01:16:27 - train: epoch 0024, iter [00500, 05004], lr: 0.100000, loss: 2.2767 +2023-09-11 01:17:02 - train: epoch 0024, iter [00600, 05004], lr: 0.100000, loss: 2.1890 +2023-09-11 01:17:38 - train: epoch 0024, iter [00700, 05004], lr: 0.100000, loss: 2.4165 +2023-09-11 01:18:16 - train: epoch 0024, iter [00800, 05004], lr: 0.100000, loss: 2.2550 +2023-09-11 01:18:54 - train: epoch 0024, iter [00900, 05004], lr: 0.100000, loss: 2.2557 +2023-09-11 01:19:30 - train: epoch 0024, iter [01000, 05004], lr: 0.100000, loss: 2.2286 +2023-09-11 01:20:07 - train: epoch 0024, iter [01100, 05004], lr: 0.100000, loss: 1.9045 +2023-09-11 01:20:44 - train: epoch 0024, iter [01200, 05004], lr: 0.100000, loss: 2.2966 +2023-09-11 01:21:19 - train: epoch 0024, iter [01300, 05004], lr: 0.100000, loss: 2.5654 +2023-09-11 01:21:58 - train: epoch 0024, iter [01400, 05004], lr: 0.100000, loss: 2.0269 +2023-09-11 01:22:33 - train: epoch 0024, iter [01500, 05004], lr: 0.100000, loss: 2.3708 +2023-09-11 01:23:09 - train: epoch 0024, iter [01600, 05004], lr: 0.100000, loss: 2.3713 +2023-09-11 01:23:48 - train: epoch 0024, iter [01700, 05004], lr: 0.100000, loss: 2.2586 +2023-09-11 01:24:23 - train: epoch 0024, iter [01800, 05004], lr: 0.100000, loss: 2.3874 +2023-09-11 01:24:59 - train: epoch 0024, iter [01900, 05004], lr: 0.100000, loss: 2.2059 +2023-09-11 01:25:35 - train: epoch 0024, iter [02000, 05004], lr: 0.100000, loss: 2.4154 +2023-09-11 01:26:13 - train: epoch 0024, iter [02100, 05004], lr: 0.100000, loss: 2.3237 +2023-09-11 01:26:49 - train: epoch 0024, iter [02200, 05004], lr: 0.100000, loss: 2.0775 +2023-09-11 01:27:23 - train: epoch 0024, iter [02300, 05004], lr: 0.100000, loss: 2.3932 +2023-09-11 01:28:04 - train: epoch 0024, iter [02400, 05004], lr: 0.100000, loss: 2.4252 +2023-09-11 01:28:40 - train: epoch 0024, iter [02500, 05004], lr: 0.100000, loss: 2.4257 +2023-09-11 01:29:15 - train: epoch 0024, iter [02600, 05004], lr: 0.100000, loss: 2.3009 +2023-09-11 01:29:53 - train: epoch 0024, iter [02700, 05004], lr: 0.100000, loss: 2.6418 +2023-09-11 01:30:28 - train: epoch 0024, iter [02800, 05004], lr: 0.100000, loss: 2.3595 +2023-09-11 01:31:04 - train: epoch 0024, iter [02900, 05004], lr: 0.100000, loss: 2.3509 +2023-09-11 01:31:41 - train: epoch 0024, iter [03000, 05004], lr: 0.100000, loss: 2.1149 +2023-09-11 01:32:17 - train: epoch 0024, iter [03100, 05004], lr: 0.100000, loss: 2.0295 +2023-09-11 01:32:53 - train: epoch 0024, iter [03200, 05004], lr: 0.100000, loss: 2.4468 +2023-09-11 01:33:30 - train: epoch 0024, iter [03300, 05004], lr: 0.100000, loss: 2.0899 +2023-09-11 01:34:05 - train: epoch 0024, iter [03400, 05004], lr: 0.100000, loss: 2.2270 +2023-09-11 01:34:42 - train: epoch 0024, iter [03500, 05004], lr: 0.100000, loss: 2.4826 +2023-09-11 01:35:19 - train: epoch 0024, iter [03600, 05004], lr: 0.100000, loss: 2.3066 +2023-09-11 01:35:55 - train: epoch 0024, iter [03700, 05004], lr: 0.100000, loss: 2.3325 +2023-09-11 01:36:29 - train: epoch 0024, iter [03800, 05004], lr: 0.100000, loss: 2.5088 +2023-09-11 01:37:08 - train: epoch 0024, iter [03900, 05004], lr: 0.100000, loss: 2.0306 +2023-09-11 01:37:44 - train: epoch 0024, iter [04000, 05004], lr: 0.100000, loss: 2.5008 +2023-09-11 01:38:19 - train: epoch 0024, iter [04100, 05004], lr: 0.100000, loss: 2.2372 +2023-09-11 01:38:56 - train: epoch 0024, iter [04200, 05004], lr: 0.100000, loss: 2.2657 +2023-09-11 01:39:31 - train: epoch 0024, iter [04300, 05004], lr: 0.100000, loss: 2.2435 +2023-09-11 01:40:07 - train: epoch 0024, iter [04400, 05004], lr: 0.100000, loss: 2.3214 +2023-09-11 01:40:43 - train: epoch 0024, iter [04500, 05004], lr: 0.100000, loss: 2.3680 +2023-09-11 01:41:21 - train: epoch 0024, iter [04600, 05004], lr: 0.100000, loss: 2.4253 +2023-09-11 01:41:57 - train: epoch 0024, iter [04700, 05004], lr: 0.100000, loss: 2.2595 +2023-09-11 01:42:32 - train: epoch 0024, iter [04800, 05004], lr: 0.100000, loss: 2.2546 +2023-09-11 01:43:10 - train: epoch 0024, iter [04900, 05004], lr: 0.100000, loss: 2.4441 +2023-09-11 01:43:43 - train: epoch 0024, iter [05000, 05004], lr: 0.100000, loss: 2.3615 +2023-09-11 01:43:43 - train: epoch 024, train_loss: 2.3034 +2023-09-11 01:45:09 - eval: epoch: 024, acc1: 48.802%, acc5: 74.336%, test_loss: 2.2428, per_image_load_time: 1.376ms, per_image_inference_time: 0.320ms +2023-09-11 01:45:09 - until epoch: 024, best_acc1: 52.596% +2023-09-11 01:45:09 - epoch 025 lr: 0.100000 +2023-09-11 01:45:53 - train: epoch 0025, iter [00100, 05004], lr: 0.100000, loss: 2.3539 +2023-09-11 01:46:29 - train: epoch 0025, iter [00200, 05004], lr: 0.100000, loss: 2.0301 +2023-09-11 01:47:04 - train: epoch 0025, iter [00300, 05004], lr: 0.100000, loss: 2.3589 +2023-09-11 01:47:43 - train: epoch 0025, iter [00400, 05004], lr: 0.100000, loss: 2.3125 +2023-09-11 01:48:18 - train: epoch 0025, iter [00500, 05004], lr: 0.100000, loss: 1.9392 +2023-09-11 01:48:53 - train: epoch 0025, iter [00600, 05004], lr: 0.100000, loss: 2.2034 +2023-09-11 01:49:31 - train: epoch 0025, iter [00700, 05004], lr: 0.100000, loss: 2.2831 +2023-09-11 01:50:06 - train: epoch 0025, iter [00800, 05004], lr: 0.100000, loss: 2.2257 +2023-09-11 01:50:41 - train: epoch 0025, iter [00900, 05004], lr: 0.100000, loss: 2.1332 +2023-09-11 01:51:20 - train: epoch 0025, iter [01000, 05004], lr: 0.100000, loss: 2.2253 +2023-09-11 01:51:55 - train: epoch 0025, iter [01100, 05004], lr: 0.100000, loss: 2.3977 +2023-09-11 01:52:31 - train: epoch 0025, iter [01200, 05004], lr: 0.100000, loss: 2.4429 +2023-09-11 01:53:07 - train: epoch 0025, iter [01300, 05004], lr: 0.100000, loss: 2.3068 +2023-09-11 01:53:45 - train: epoch 0025, iter [01400, 05004], lr: 0.100000, loss: 2.2684 +2023-09-11 01:54:21 - train: epoch 0025, iter [01500, 05004], lr: 0.100000, loss: 2.3372 +2023-09-11 01:54:58 - train: epoch 0025, iter [01600, 05004], lr: 0.100000, loss: 2.0324 +2023-09-11 01:55:33 - train: epoch 0025, iter [01700, 05004], lr: 0.100000, loss: 2.2942 +2023-09-11 01:56:11 - train: epoch 0025, iter [01800, 05004], lr: 0.100000, loss: 2.1722 +2023-09-11 01:56:46 - train: epoch 0025, iter [01900, 05004], lr: 0.100000, loss: 2.1778 +2023-09-11 01:57:22 - train: epoch 0025, iter [02000, 05004], lr: 0.100000, loss: 2.2654 +2023-09-11 01:57:59 - train: epoch 0025, iter [02100, 05004], lr: 0.100000, loss: 2.0092 +2023-09-11 01:58:38 - train: epoch 0025, iter [02200, 05004], lr: 0.100000, loss: 2.0365 +2023-09-11 01:59:12 - train: epoch 0025, iter [02300, 05004], lr: 0.100000, loss: 2.2378 +2023-09-11 01:59:48 - train: epoch 0025, iter [02400, 05004], lr: 0.100000, loss: 2.3313 +2023-09-11 02:00:27 - train: epoch 0025, iter [02500, 05004], lr: 0.100000, loss: 2.4164 +2023-09-11 02:01:03 - train: epoch 0025, iter [02600, 05004], lr: 0.100000, loss: 2.4120 +2023-09-11 02:01:37 - train: epoch 0025, iter [02700, 05004], lr: 0.100000, loss: 2.3614 +2023-09-11 02:02:15 - train: epoch 0025, iter [02800, 05004], lr: 0.100000, loss: 2.2382 +2023-09-11 02:02:51 - train: epoch 0025, iter [02900, 05004], lr: 0.100000, loss: 2.3104 +2023-09-11 02:03:27 - train: epoch 0025, iter [03000, 05004], lr: 0.100000, loss: 2.5342 +2023-09-11 02:04:03 - train: epoch 0025, iter [03100, 05004], lr: 0.100000, loss: 2.2644 +2023-09-11 02:04:41 - train: epoch 0025, iter [03200, 05004], lr: 0.100000, loss: 2.1375 +2023-09-11 02:05:17 - train: epoch 0025, iter [03300, 05004], lr: 0.100000, loss: 2.2989 +2023-09-11 02:05:52 - train: epoch 0025, iter [03400, 05004], lr: 0.100000, loss: 2.3911 +2023-09-11 02:06:29 - train: epoch 0025, iter [03500, 05004], lr: 0.100000, loss: 2.2355 +2023-09-11 02:07:06 - train: epoch 0025, iter [03600, 05004], lr: 0.100000, loss: 2.1635 +2023-09-11 02:07:41 - train: epoch 0025, iter [03700, 05004], lr: 0.100000, loss: 2.3969 +2023-09-11 02:08:17 - train: epoch 0025, iter [03800, 05004], lr: 0.100000, loss: 2.4769 +2023-09-11 02:08:54 - train: epoch 0025, iter [03900, 05004], lr: 0.100000, loss: 2.4792 +2023-09-11 02:09:31 - train: epoch 0025, iter [04000, 05004], lr: 0.100000, loss: 2.3006 +2023-09-11 02:10:08 - train: epoch 0025, iter [04100, 05004], lr: 0.100000, loss: 2.4235 +2023-09-11 02:10:44 - train: epoch 0025, iter [04200, 05004], lr: 0.100000, loss: 2.2866 +2023-09-11 02:11:21 - train: epoch 0025, iter [04300, 05004], lr: 0.100000, loss: 2.2986 +2023-09-11 02:11:58 - train: epoch 0025, iter [04400, 05004], lr: 0.100000, loss: 2.2850 +2023-09-11 02:12:35 - train: epoch 0025, iter [04500, 05004], lr: 0.100000, loss: 2.5084 +2023-09-11 02:13:10 - train: epoch 0025, iter [04600, 05004], lr: 0.100000, loss: 1.8932 +2023-09-11 02:13:48 - train: epoch 0025, iter [04700, 05004], lr: 0.100000, loss: 2.3267 +2023-09-11 02:14:25 - train: epoch 0025, iter [04800, 05004], lr: 0.100000, loss: 2.1328 +2023-09-11 02:14:59 - train: epoch 0025, iter [04900, 05004], lr: 0.100000, loss: 2.5047 +2023-09-11 02:15:34 - train: epoch 0025, iter [05000, 05004], lr: 0.100000, loss: 2.3722 +2023-09-11 02:15:35 - train: epoch 025, train_loss: 2.2992 +2023-09-11 02:16:59 - eval: epoch: 025, acc1: 51.208%, acc5: 76.890%, test_loss: 2.1025, per_image_load_time: 1.366ms, per_image_inference_time: 0.301ms +2023-09-11 02:16:59 - until epoch: 025, best_acc1: 52.596% +2023-09-11 02:16:59 - epoch 026 lr: 0.100000 +2023-09-11 02:17:41 - train: epoch 0026, iter [00100, 05004], lr: 0.100000, loss: 2.1907 +2023-09-11 02:18:18 - train: epoch 0026, iter [00200, 05004], lr: 0.100000, loss: 2.2917 +2023-09-11 02:18:55 - train: epoch 0026, iter [00300, 05004], lr: 0.100000, loss: 2.2608 +2023-09-11 02:19:32 - train: epoch 0026, iter [00400, 05004], lr: 0.100000, loss: 1.9937 +2023-09-11 02:20:08 - train: epoch 0026, iter [00500, 05004], lr: 0.100000, loss: 2.1947 +2023-09-11 02:20:44 - train: epoch 0026, iter [00600, 05004], lr: 0.100000, loss: 2.4234 +2023-09-11 02:21:20 - train: epoch 0026, iter [00700, 05004], lr: 0.100000, loss: 2.3633 +2023-09-11 02:21:56 - train: epoch 0026, iter [00800, 05004], lr: 0.100000, loss: 2.1436 +2023-09-11 02:22:32 - train: epoch 0026, iter [00900, 05004], lr: 0.100000, loss: 2.2467 +2023-09-11 02:23:11 - train: epoch 0026, iter [01000, 05004], lr: 0.100000, loss: 2.4040 +2023-09-11 02:23:46 - train: epoch 0026, iter [01100, 05004], lr: 0.100000, loss: 2.3255 +2023-09-11 02:24:22 - train: epoch 0026, iter [01200, 05004], lr: 0.100000, loss: 2.4377 +2023-09-11 02:24:57 - train: epoch 0026, iter [01300, 05004], lr: 0.100000, loss: 2.2490 +2023-09-11 02:25:34 - train: epoch 0026, iter [01400, 05004], lr: 0.100000, loss: 2.3878 +2023-09-11 02:26:10 - train: epoch 0026, iter [01500, 05004], lr: 0.100000, loss: 2.1488 +2023-09-11 02:26:47 - train: epoch 0026, iter [01600, 05004], lr: 0.100000, loss: 2.3208 +2023-09-11 02:27:23 - train: epoch 0026, iter [01700, 05004], lr: 0.100000, loss: 2.3038 +2023-09-11 02:28:02 - train: epoch 0026, iter [01800, 05004], lr: 0.100000, loss: 2.5302 +2023-09-11 02:28:38 - train: epoch 0026, iter [01900, 05004], lr: 0.100000, loss: 2.5451 +2023-09-11 02:29:14 - train: epoch 0026, iter [02000, 05004], lr: 0.100000, loss: 2.3326 +2023-09-11 02:29:51 - train: epoch 0026, iter [02100, 05004], lr: 0.100000, loss: 2.5367 +2023-09-11 02:30:28 - train: epoch 0026, iter [02200, 05004], lr: 0.100000, loss: 2.4151 +2023-09-11 02:31:02 - train: epoch 0026, iter [02300, 05004], lr: 0.100000, loss: 2.3379 +2023-09-11 02:31:39 - train: epoch 0026, iter [02400, 05004], lr: 0.100000, loss: 2.5016 +2023-09-11 02:32:15 - train: epoch 0026, iter [02500, 05004], lr: 0.100000, loss: 2.2323 +2023-09-11 02:32:53 - train: epoch 0026, iter [02600, 05004], lr: 0.100000, loss: 2.2933 +2023-09-11 02:33:28 - train: epoch 0026, iter [02700, 05004], lr: 0.100000, loss: 2.4022 +2023-09-11 02:34:04 - train: epoch 0026, iter [02800, 05004], lr: 0.100000, loss: 2.3101 +2023-09-11 02:34:42 - train: epoch 0026, iter [02900, 05004], lr: 0.100000, loss: 2.3443 +2023-09-11 02:35:17 - train: epoch 0026, iter [03000, 05004], lr: 0.100000, loss: 2.2890 +2023-09-11 02:35:54 - train: epoch 0026, iter [03100, 05004], lr: 0.100000, loss: 2.2440 +2023-09-11 02:36:32 - train: epoch 0026, iter [03200, 05004], lr: 0.100000, loss: 2.2769 +2023-09-11 02:37:08 - train: epoch 0026, iter [03300, 05004], lr: 0.100000, loss: 2.4050 +2023-09-11 02:37:44 - train: epoch 0026, iter [03400, 05004], lr: 0.100000, loss: 2.3805 +2023-09-11 02:38:19 - train: epoch 0026, iter [03500, 05004], lr: 0.100000, loss: 2.4634 +2023-09-11 02:38:57 - train: epoch 0026, iter [03600, 05004], lr: 0.100000, loss: 2.4350 +2023-09-11 02:39:31 - train: epoch 0026, iter [03700, 05004], lr: 0.100000, loss: 2.3359 +2023-09-11 02:40:09 - train: epoch 0026, iter [03800, 05004], lr: 0.100000, loss: 2.5825 +2023-09-11 02:40:44 - train: epoch 0026, iter [03900, 05004], lr: 0.100000, loss: 2.5952 +2023-09-11 02:41:22 - train: epoch 0026, iter [04000, 05004], lr: 0.100000, loss: 2.3933 +2023-09-11 02:41:57 - train: epoch 0026, iter [04100, 05004], lr: 0.100000, loss: 2.2966 +2023-09-11 02:42:34 - train: epoch 0026, iter [04200, 05004], lr: 0.100000, loss: 2.3774 +2023-09-11 02:43:11 - train: epoch 0026, iter [04300, 05004], lr: 0.100000, loss: 2.4243 +2023-09-11 02:43:47 - train: epoch 0026, iter [04400, 05004], lr: 0.100000, loss: 2.3859 +2023-09-11 02:44:24 - train: epoch 0026, iter [04500, 05004], lr: 0.100000, loss: 2.4813 +2023-09-11 02:45:00 - train: epoch 0026, iter [04600, 05004], lr: 0.100000, loss: 2.3120 +2023-09-11 02:45:35 - train: epoch 0026, iter [04700, 05004], lr: 0.100000, loss: 2.2931 +2023-09-11 02:46:14 - train: epoch 0026, iter [04800, 05004], lr: 0.100000, loss: 2.2129 +2023-09-11 02:46:49 - train: epoch 0026, iter [04900, 05004], lr: 0.100000, loss: 2.4789 +2023-09-11 02:47:24 - train: epoch 0026, iter [05000, 05004], lr: 0.100000, loss: 2.3031 +2023-09-11 02:47:24 - train: epoch 026, train_loss: 2.2973 +2023-09-11 02:48:48 - eval: epoch: 026, acc1: 51.876%, acc5: 77.116%, test_loss: 2.0789, per_image_load_time: 1.343ms, per_image_inference_time: 0.321ms +2023-09-11 02:48:48 - until epoch: 026, best_acc1: 52.596% +2023-09-11 02:48:48 - epoch 027 lr: 0.100000 +2023-09-11 02:49:33 - train: epoch 0027, iter [00100, 05004], lr: 0.100000, loss: 2.3086 +2023-09-11 02:50:08 - train: epoch 0027, iter [00200, 05004], lr: 0.100000, loss: 2.2508 +2023-09-11 02:50:43 - train: epoch 0027, iter [00300, 05004], lr: 0.100000, loss: 2.3200 +2023-09-11 02:51:22 - train: epoch 0027, iter [00400, 05004], lr: 0.100000, loss: 2.2708 +2023-09-11 02:51:57 - train: epoch 0027, iter [00500, 05004], lr: 0.100000, loss: 2.3081 +2023-09-11 02:52:34 - train: epoch 0027, iter [00600, 05004], lr: 0.100000, loss: 2.4896 +2023-09-11 02:53:11 - train: epoch 0027, iter [00700, 05004], lr: 0.100000, loss: 2.2598 +2023-09-11 02:53:46 - train: epoch 0027, iter [00800, 05004], lr: 0.100000, loss: 2.1720 +2023-09-11 02:54:24 - train: epoch 0027, iter [00900, 05004], lr: 0.100000, loss: 2.2038 +2023-09-11 02:54:58 - train: epoch 0027, iter [01000, 05004], lr: 0.100000, loss: 2.4622 +2023-09-11 02:55:35 - train: epoch 0027, iter [01100, 05004], lr: 0.100000, loss: 2.0674 +2023-09-11 02:56:13 - train: epoch 0027, iter [01200, 05004], lr: 0.100000, loss: 2.2860 +2023-09-11 02:56:48 - train: epoch 0027, iter [01300, 05004], lr: 0.100000, loss: 2.4306 +2023-09-11 02:57:24 - train: epoch 0027, iter [01400, 05004], lr: 0.100000, loss: 2.3039 +2023-09-11 02:58:01 - train: epoch 0027, iter [01500, 05004], lr: 0.100000, loss: 2.4185 +2023-09-11 02:58:36 - train: epoch 0027, iter [01600, 05004], lr: 0.100000, loss: 2.3944 +2023-09-11 02:59:15 - train: epoch 0027, iter [01700, 05004], lr: 0.100000, loss: 2.0782 +2023-09-11 02:59:52 - train: epoch 0027, iter [01800, 05004], lr: 0.100000, loss: 2.2508 +2023-09-11 03:00:27 - train: epoch 0027, iter [01900, 05004], lr: 0.100000, loss: 2.3571 +2023-09-11 03:01:04 - train: epoch 0027, iter [02000, 05004], lr: 0.100000, loss: 2.0564 +2023-09-11 03:01:42 - train: epoch 0027, iter [02100, 05004], lr: 0.100000, loss: 2.3873 +2023-09-11 03:02:19 - train: epoch 0027, iter [02200, 05004], lr: 0.100000, loss: 2.3332 +2023-09-11 03:02:54 - train: epoch 0027, iter [02300, 05004], lr: 0.100000, loss: 2.5013 +2023-09-11 03:03:33 - train: epoch 0027, iter [02400, 05004], lr: 0.100000, loss: 2.1158 +2023-09-11 03:04:08 - train: epoch 0027, iter [02500, 05004], lr: 0.100000, loss: 2.3984 +2023-09-11 03:04:44 - train: epoch 0027, iter [02600, 05004], lr: 0.100000, loss: 2.2789 +2023-09-11 03:05:22 - train: epoch 0027, iter [02700, 05004], lr: 0.100000, loss: 2.2119 +2023-09-11 03:05:57 - train: epoch 0027, iter [02800, 05004], lr: 0.100000, loss: 2.3268 +2023-09-11 03:06:32 - train: epoch 0027, iter [02900, 05004], lr: 0.100000, loss: 2.5345 +2023-09-11 03:07:10 - train: epoch 0027, iter [03000, 05004], lr: 0.100000, loss: 2.4371 +2023-09-11 03:07:45 - train: epoch 0027, iter [03100, 05004], lr: 0.100000, loss: 2.3488 +2023-09-11 03:08:22 - train: epoch 0027, iter [03200, 05004], lr: 0.100000, loss: 2.3205 +2023-09-11 03:09:00 - train: epoch 0027, iter [03300, 05004], lr: 0.100000, loss: 2.4443 +2023-09-11 03:09:35 - train: epoch 0027, iter [03400, 05004], lr: 0.100000, loss: 2.1441 +2023-09-11 03:10:10 - train: epoch 0027, iter [03500, 05004], lr: 0.100000, loss: 2.5058 +2023-09-11 03:10:48 - train: epoch 0027, iter [03600, 05004], lr: 0.100000, loss: 2.4741 +2023-09-11 03:11:24 - train: epoch 0027, iter [03700, 05004], lr: 0.100000, loss: 2.4684 +2023-09-11 03:11:59 - train: epoch 0027, iter [03800, 05004], lr: 0.100000, loss: 2.2142 +2023-09-11 03:12:37 - train: epoch 0027, iter [03900, 05004], lr: 0.100000, loss: 2.3639 +2023-09-11 03:13:14 - train: epoch 0027, iter [04000, 05004], lr: 0.100000, loss: 2.1567 +2023-09-11 03:13:50 - train: epoch 0027, iter [04100, 05004], lr: 0.100000, loss: 2.3484 +2023-09-11 03:14:27 - train: epoch 0027, iter [04200, 05004], lr: 0.100000, loss: 2.1401 +2023-09-11 03:15:03 - train: epoch 0027, iter [04300, 05004], lr: 0.100000, loss: 2.4009 +2023-09-11 03:15:40 - train: epoch 0027, iter [04400, 05004], lr: 0.100000, loss: 2.3834 +2023-09-11 03:16:16 - train: epoch 0027, iter [04500, 05004], lr: 0.100000, loss: 2.0713 +2023-09-11 03:16:51 - train: epoch 0027, iter [04600, 05004], lr: 0.100000, loss: 2.2373 +2023-09-11 03:17:29 - train: epoch 0027, iter [04700, 05004], lr: 0.100000, loss: 2.5209 +2023-09-11 03:18:04 - train: epoch 0027, iter [04800, 05004], lr: 0.100000, loss: 2.3367 +2023-09-11 03:18:39 - train: epoch 0027, iter [04900, 05004], lr: 0.100000, loss: 2.2381 +2023-09-11 03:19:14 - train: epoch 0027, iter [05000, 05004], lr: 0.100000, loss: 2.2098 +2023-09-11 03:19:15 - train: epoch 027, train_loss: 2.2927 +2023-09-11 03:20:39 - eval: epoch: 027, acc1: 52.250%, acc5: 77.308%, test_loss: 2.0568, per_image_load_time: 1.350ms, per_image_inference_time: 0.322ms +2023-09-11 03:20:39 - until epoch: 027, best_acc1: 52.596% +2023-09-11 03:20:39 - epoch 028 lr: 0.100000 +2023-09-11 03:21:23 - train: epoch 0028, iter [00100, 05004], lr: 0.100000, loss: 2.0745 +2023-09-11 03:21:59 - train: epoch 0028, iter [00200, 05004], lr: 0.100000, loss: 2.4453 +2023-09-11 03:22:35 - train: epoch 0028, iter [00300, 05004], lr: 0.100000, loss: 2.2926 +2023-09-11 03:23:11 - train: epoch 0028, iter [00400, 05004], lr: 0.100000, loss: 2.4544 +2023-09-11 03:23:48 - train: epoch 0028, iter [00500, 05004], lr: 0.100000, loss: 2.2257 +2023-09-11 03:24:24 - train: epoch 0028, iter [00600, 05004], lr: 0.100000, loss: 2.4472 +2023-09-11 03:24:59 - train: epoch 0028, iter [00700, 05004], lr: 0.100000, loss: 2.5293 +2023-09-11 03:25:38 - train: epoch 0028, iter [00800, 05004], lr: 0.100000, loss: 2.1242 +2023-09-11 03:26:12 - train: epoch 0028, iter [00900, 05004], lr: 0.100000, loss: 2.1964 +2023-09-11 03:26:48 - train: epoch 0028, iter [01000, 05004], lr: 0.100000, loss: 2.4032 +2023-09-11 03:27:27 - train: epoch 0028, iter [01100, 05004], lr: 0.100000, loss: 2.0586 +2023-09-11 03:28:03 - train: epoch 0028, iter [01200, 05004], lr: 0.100000, loss: 2.3784 +2023-09-11 03:28:37 - train: epoch 0028, iter [01300, 05004], lr: 0.100000, loss: 2.3267 +2023-09-11 03:29:13 - train: epoch 0028, iter [01400, 05004], lr: 0.100000, loss: 2.6002 +2023-09-11 03:29:50 - train: epoch 0028, iter [01500, 05004], lr: 0.100000, loss: 2.2161 +2023-09-11 03:30:26 - train: epoch 0028, iter [01600, 05004], lr: 0.100000, loss: 2.3310 +2023-09-11 03:31:02 - train: epoch 0028, iter [01700, 05004], lr: 0.100000, loss: 2.2040 +2023-09-11 03:31:39 - train: epoch 0028, iter [01800, 05004], lr: 0.100000, loss: 2.1853 +2023-09-11 03:32:16 - train: epoch 0028, iter [01900, 05004], lr: 0.100000, loss: 2.3415 +2023-09-11 03:32:51 - train: epoch 0028, iter [02000, 05004], lr: 0.100000, loss: 2.4448 +2023-09-11 03:33:28 - train: epoch 0028, iter [02100, 05004], lr: 0.100000, loss: 2.4212 +2023-09-11 03:34:04 - train: epoch 0028, iter [02200, 05004], lr: 0.100000, loss: 2.4221 +2023-09-11 03:34:40 - train: epoch 0028, iter [02300, 05004], lr: 0.100000, loss: 2.3060 +2023-09-11 03:35:17 - train: epoch 0028, iter [02400, 05004], lr: 0.100000, loss: 2.3060 +2023-09-11 03:35:55 - train: epoch 0028, iter [02500, 05004], lr: 0.100000, loss: 2.4083 +2023-09-11 03:36:30 - train: epoch 0028, iter [02600, 05004], lr: 0.100000, loss: 2.4236 +2023-09-11 03:37:06 - train: epoch 0028, iter [02700, 05004], lr: 0.100000, loss: 2.2695 +2023-09-11 03:37:45 - train: epoch 0028, iter [02800, 05004], lr: 0.100000, loss: 2.2732 +2023-09-11 03:38:21 - train: epoch 0028, iter [02900, 05004], lr: 0.100000, loss: 2.4118 +2023-09-11 03:38:56 - train: epoch 0028, iter [03000, 05004], lr: 0.100000, loss: 2.2851 +2023-09-11 03:39:35 - train: epoch 0028, iter [03100, 05004], lr: 0.100000, loss: 2.5574 +2023-09-11 03:40:10 - train: epoch 0028, iter [03200, 05004], lr: 0.100000, loss: 2.2971 +2023-09-11 03:40:45 - train: epoch 0028, iter [03300, 05004], lr: 0.100000, loss: 2.1917 +2023-09-11 03:41:24 - train: epoch 0028, iter [03400, 05004], lr: 0.100000, loss: 2.4012 +2023-09-11 03:41:59 - train: epoch 0028, iter [03500, 05004], lr: 0.100000, loss: 2.3876 +2023-09-11 03:42:35 - train: epoch 0028, iter [03600, 05004], lr: 0.100000, loss: 2.1753 +2023-09-11 03:43:13 - train: epoch 0028, iter [03700, 05004], lr: 0.100000, loss: 2.5998 +2023-09-11 03:43:48 - train: epoch 0028, iter [03800, 05004], lr: 0.100000, loss: 2.0177 +2023-09-11 03:44:26 - train: epoch 0028, iter [03900, 05004], lr: 0.100000, loss: 2.4323 +2023-09-11 03:45:00 - train: epoch 0028, iter [04000, 05004], lr: 0.100000, loss: 2.4038 +2023-09-11 03:45:35 - train: epoch 0028, iter [04100, 05004], lr: 0.100000, loss: 2.3284 +2023-09-11 03:46:12 - train: epoch 0028, iter [04200, 05004], lr: 0.100000, loss: 2.2253 +2023-09-11 03:46:51 - train: epoch 0028, iter [04300, 05004], lr: 0.100000, loss: 2.2885 +2023-09-11 03:47:26 - train: epoch 0028, iter [04400, 05004], lr: 0.100000, loss: 2.2938 +2023-09-11 03:48:00 - train: epoch 0028, iter [04500, 05004], lr: 0.100000, loss: 2.4669 +2023-09-11 03:48:37 - train: epoch 0028, iter [04600, 05004], lr: 0.100000, loss: 2.2203 +2023-09-11 03:49:15 - train: epoch 0028, iter [04700, 05004], lr: 0.100000, loss: 2.4753 +2023-09-11 03:49:51 - train: epoch 0028, iter [04800, 05004], lr: 0.100000, loss: 2.3163 +2023-09-11 03:50:28 - train: epoch 0028, iter [04900, 05004], lr: 0.100000, loss: 2.1311 +2023-09-11 03:51:01 - train: epoch 0028, iter [05000, 05004], lr: 0.100000, loss: 2.1739 +2023-09-11 03:51:02 - train: epoch 028, train_loss: 2.2908 +2023-09-11 03:52:26 - eval: epoch: 028, acc1: 51.948%, acc5: 77.432%, test_loss: 2.0659, per_image_load_time: 1.352ms, per_image_inference_time: 0.314ms +2023-09-11 03:52:26 - until epoch: 028, best_acc1: 52.596% +2023-09-11 03:52:26 - epoch 029 lr: 0.100000 +2023-09-11 03:53:11 - train: epoch 0029, iter [00100, 05004], lr: 0.100000, loss: 2.2619 +2023-09-11 03:53:48 - train: epoch 0029, iter [00200, 05004], lr: 0.100000, loss: 2.1860 +2023-09-11 03:54:24 - train: epoch 0029, iter [00300, 05004], lr: 0.100000, loss: 2.3821 +2023-09-11 03:55:01 - train: epoch 0029, iter [00400, 05004], lr: 0.100000, loss: 2.3091 +2023-09-11 03:55:36 - train: epoch 0029, iter [00500, 05004], lr: 0.100000, loss: 2.2746 +2023-09-11 03:56:11 - train: epoch 0029, iter [00600, 05004], lr: 0.100000, loss: 2.1875 +2023-09-11 03:56:47 - train: epoch 0029, iter [00700, 05004], lr: 0.100000, loss: 2.0408 +2023-09-11 03:57:25 - train: epoch 0029, iter [00800, 05004], lr: 0.100000, loss: 2.2513 +2023-09-11 03:58:01 - train: epoch 0029, iter [00900, 05004], lr: 0.100000, loss: 2.3094 +2023-09-11 03:58:37 - train: epoch 0029, iter [01000, 05004], lr: 0.100000, loss: 2.1335 +2023-09-11 03:59:14 - train: epoch 0029, iter [01100, 05004], lr: 0.100000, loss: 2.2231 +2023-09-11 03:59:52 - train: epoch 0029, iter [01200, 05004], lr: 0.100000, loss: 2.2672 +2023-09-11 04:00:27 - train: epoch 0029, iter [01300, 05004], lr: 0.100000, loss: 2.2627 +2023-09-11 04:01:03 - train: epoch 0029, iter [01400, 05004], lr: 0.100000, loss: 2.2312 +2023-09-11 04:01:40 - train: epoch 0029, iter [01500, 05004], lr: 0.100000, loss: 2.4804 +2023-09-11 04:02:16 - train: epoch 0029, iter [01600, 05004], lr: 0.100000, loss: 2.3509 +2023-09-11 04:02:52 - train: epoch 0029, iter [01700, 05004], lr: 0.100000, loss: 2.2947 +2023-09-11 04:03:29 - train: epoch 0029, iter [01800, 05004], lr: 0.100000, loss: 2.2319 +2023-09-11 04:04:05 - train: epoch 0029, iter [01900, 05004], lr: 0.100000, loss: 2.2474 +2023-09-11 04:04:40 - train: epoch 0029, iter [02000, 05004], lr: 0.100000, loss: 2.2489 +2023-09-11 04:05:18 - train: epoch 0029, iter [02100, 05004], lr: 0.100000, loss: 2.2904 +2023-09-11 04:05:53 - train: epoch 0029, iter [02200, 05004], lr: 0.100000, loss: 2.4979 +2023-09-11 04:06:30 - train: epoch 0029, iter [02300, 05004], lr: 0.100000, loss: 2.1781 +2023-09-11 04:07:07 - train: epoch 0029, iter [02400, 05004], lr: 0.100000, loss: 2.2261 +2023-09-11 04:07:43 - train: epoch 0029, iter [02500, 05004], lr: 0.100000, loss: 2.5092 +2023-09-11 04:08:18 - train: epoch 0029, iter [02600, 05004], lr: 0.100000, loss: 2.4126 +2023-09-11 04:08:56 - train: epoch 0029, iter [02700, 05004], lr: 0.100000, loss: 2.1928 +2023-09-11 04:09:32 - train: epoch 0029, iter [02800, 05004], lr: 0.100000, loss: 2.2636 +2023-09-11 04:10:10 - train: epoch 0029, iter [02900, 05004], lr: 0.100000, loss: 2.2304 +2023-09-11 04:10:45 - train: epoch 0029, iter [03000, 05004], lr: 0.100000, loss: 2.2135 +2023-09-11 04:11:21 - train: epoch 0029, iter [03100, 05004], lr: 0.100000, loss: 2.3423 +2023-09-11 04:12:00 - train: epoch 0029, iter [03200, 05004], lr: 0.100000, loss: 2.3370 +2023-09-11 04:12:34 - train: epoch 0029, iter [03300, 05004], lr: 0.100000, loss: 2.1531 +2023-09-11 04:13:10 - train: epoch 0029, iter [03400, 05004], lr: 0.100000, loss: 2.2606 +2023-09-11 04:13:47 - train: epoch 0029, iter [03500, 05004], lr: 0.100000, loss: 2.3929 +2023-09-11 04:14:23 - train: epoch 0029, iter [03600, 05004], lr: 0.100000, loss: 2.2367 +2023-09-11 04:15:02 - train: epoch 0029, iter [03700, 05004], lr: 0.100000, loss: 2.2377 +2023-09-11 04:15:36 - train: epoch 0029, iter [03800, 05004], lr: 0.100000, loss: 2.1351 +2023-09-11 04:16:15 - train: epoch 0029, iter [03900, 05004], lr: 0.100000, loss: 2.1449 +2023-09-11 04:16:54 - train: epoch 0029, iter [04000, 05004], lr: 0.100000, loss: 2.3518 +2023-09-11 04:17:30 - train: epoch 0029, iter [04100, 05004], lr: 0.100000, loss: 2.2753 +2023-09-11 04:18:04 - train: epoch 0029, iter [04200, 05004], lr: 0.100000, loss: 1.9666 +2023-09-11 04:18:41 - train: epoch 0029, iter [04300, 05004], lr: 0.100000, loss: 2.2737 +2023-09-11 04:19:17 - train: epoch 0029, iter [04400, 05004], lr: 0.100000, loss: 2.4059 +2023-09-11 04:19:54 - train: epoch 0029, iter [04500, 05004], lr: 0.100000, loss: 2.2866 +2023-09-11 04:20:30 - train: epoch 0029, iter [04600, 05004], lr: 0.100000, loss: 2.4490 +2023-09-11 04:21:06 - train: epoch 0029, iter [04700, 05004], lr: 0.100000, loss: 2.0783 +2023-09-11 04:21:43 - train: epoch 0029, iter [04800, 05004], lr: 0.100000, loss: 2.3509 +2023-09-11 04:22:18 - train: epoch 0029, iter [04900, 05004], lr: 0.100000, loss: 2.4915 +2023-09-11 04:22:53 - train: epoch 0029, iter [05000, 05004], lr: 0.100000, loss: 2.0904 +2023-09-11 04:22:54 - train: epoch 029, train_loss: 2.2888 +2023-09-11 04:24:18 - eval: epoch: 029, acc1: 51.292%, acc5: 76.884%, test_loss: 2.1036, per_image_load_time: 1.346ms, per_image_inference_time: 0.320ms +2023-09-11 04:24:18 - until epoch: 029, best_acc1: 52.596% +2023-09-11 04:24:18 - epoch 030 lr: 0.100000 +2023-09-11 04:25:03 - train: epoch 0030, iter [00100, 05004], lr: 0.100000, loss: 2.2349 +2023-09-11 04:25:39 - train: epoch 0030, iter [00200, 05004], lr: 0.100000, loss: 2.3125 +2023-09-11 04:26:14 - train: epoch 0030, iter [00300, 05004], lr: 0.100000, loss: 2.0678 +2023-09-11 04:26:52 - train: epoch 0030, iter [00400, 05004], lr: 0.100000, loss: 2.1112 +2023-09-11 04:27:26 - train: epoch 0030, iter [00500, 05004], lr: 0.100000, loss: 2.2956 +2023-09-11 04:28:02 - train: epoch 0030, iter [00600, 05004], lr: 0.100000, loss: 2.1483 +2023-09-11 04:28:38 - train: epoch 0030, iter [00700, 05004], lr: 0.100000, loss: 2.1545 +2023-09-11 04:29:16 - train: epoch 0030, iter [00800, 05004], lr: 0.100000, loss: 2.3985 +2023-09-11 04:29:53 - train: epoch 0030, iter [00900, 05004], lr: 0.100000, loss: 2.2799 +2023-09-11 04:30:27 - train: epoch 0030, iter [01000, 05004], lr: 0.100000, loss: 1.9555 +2023-09-11 04:31:05 - train: epoch 0030, iter [01100, 05004], lr: 0.100000, loss: 2.3490 +2023-09-11 04:31:41 - train: epoch 0030, iter [01200, 05004], lr: 0.100000, loss: 2.2953 +2023-09-11 04:32:17 - train: epoch 0030, iter [01300, 05004], lr: 0.100000, loss: 1.9886 +2023-09-11 04:32:53 - train: epoch 0030, iter [01400, 05004], lr: 0.100000, loss: 2.2530 +2023-09-11 04:33:33 - train: epoch 0030, iter [01500, 05004], lr: 0.100000, loss: 2.2081 +2023-09-11 04:34:09 - train: epoch 0030, iter [01600, 05004], lr: 0.100000, loss: 2.2305 +2023-09-11 04:34:45 - train: epoch 0030, iter [01700, 05004], lr: 0.100000, loss: 2.6049 +2023-09-11 04:35:22 - train: epoch 0030, iter [01800, 05004], lr: 0.100000, loss: 2.3442 +2023-09-11 04:35:57 - train: epoch 0030, iter [01900, 05004], lr: 0.100000, loss: 2.3396 +2023-09-11 04:36:32 - train: epoch 0030, iter [02000, 05004], lr: 0.100000, loss: 2.1830 +2023-09-11 04:37:11 - train: epoch 0030, iter [02100, 05004], lr: 0.100000, loss: 2.3886 +2023-09-11 04:37:47 - train: epoch 0030, iter [02200, 05004], lr: 0.100000, loss: 2.2619 +2023-09-11 04:38:21 - train: epoch 0030, iter [02300, 05004], lr: 0.100000, loss: 2.3339 +2023-09-11 04:39:00 - train: epoch 0030, iter [02400, 05004], lr: 0.100000, loss: 2.3688 +2023-09-11 04:39:35 - train: epoch 0030, iter [02500, 05004], lr: 0.100000, loss: 2.4261 +2023-09-11 04:40:11 - train: epoch 0030, iter [02600, 05004], lr: 0.100000, loss: 2.2254 +2023-09-11 04:40:50 - train: epoch 0030, iter [02700, 05004], lr: 0.100000, loss: 2.2507 +2023-09-11 04:41:25 - train: epoch 0030, iter [02800, 05004], lr: 0.100000, loss: 2.0778 +2023-09-11 04:42:00 - train: epoch 0030, iter [02900, 05004], lr: 0.100000, loss: 2.2299 +2023-09-11 04:42:38 - train: epoch 0030, iter [03000, 05004], lr: 0.100000, loss: 2.1958 +2023-09-11 04:43:14 - train: epoch 0030, iter [03100, 05004], lr: 0.100000, loss: 2.3727 +2023-09-11 04:43:49 - train: epoch 0030, iter [03200, 05004], lr: 0.100000, loss: 2.3413 +2023-09-11 04:44:28 - train: epoch 0030, iter [03300, 05004], lr: 0.100000, loss: 2.4925 +2023-09-11 04:45:04 - train: epoch 0030, iter [03400, 05004], lr: 0.100000, loss: 2.0792 +2023-09-11 04:45:38 - train: epoch 0030, iter [03500, 05004], lr: 0.100000, loss: 2.2287 +2023-09-11 04:46:16 - train: epoch 0030, iter [03600, 05004], lr: 0.100000, loss: 2.2583 +2023-09-11 04:46:52 - train: epoch 0030, iter [03700, 05004], lr: 0.100000, loss: 2.3068 +2023-09-11 04:47:29 - train: epoch 0030, iter [03800, 05004], lr: 0.100000, loss: 2.4438 +2023-09-11 04:48:05 - train: epoch 0030, iter [03900, 05004], lr: 0.100000, loss: 2.3191 +2023-09-11 04:48:41 - train: epoch 0030, iter [04000, 05004], lr: 0.100000, loss: 2.2204 +2023-09-11 04:49:16 - train: epoch 0030, iter [04100, 05004], lr: 0.100000, loss: 2.2998 +2023-09-11 04:49:52 - train: epoch 0030, iter [04200, 05004], lr: 0.100000, loss: 2.5662 +2023-09-11 04:50:30 - train: epoch 0030, iter [04300, 05004], lr: 0.100000, loss: 2.3117 +2023-09-11 04:51:05 - train: epoch 0030, iter [04400, 05004], lr: 0.100000, loss: 2.3096 +2023-09-11 04:51:44 - train: epoch 0030, iter [04500, 05004], lr: 0.100000, loss: 2.4831 +2023-09-11 04:52:20 - train: epoch 0030, iter [04600, 05004], lr: 0.100000, loss: 2.0442 +2023-09-11 04:52:54 - train: epoch 0030, iter [04700, 05004], lr: 0.100000, loss: 2.2149 +2023-09-11 04:53:32 - train: epoch 0030, iter [04800, 05004], lr: 0.100000, loss: 2.2470 +2023-09-11 04:54:08 - train: epoch 0030, iter [04900, 05004], lr: 0.100000, loss: 2.3641 +2023-09-11 04:54:42 - train: epoch 0030, iter [05000, 05004], lr: 0.100000, loss: 2.4409 +2023-09-11 04:54:43 - train: epoch 030, train_loss: 2.2841 +2023-09-11 04:56:07 - eval: epoch: 030, acc1: 52.594%, acc5: 77.976%, test_loss: 2.0359, per_image_load_time: 1.338ms, per_image_inference_time: 0.324ms +2023-09-11 04:56:07 - until epoch: 030, best_acc1: 52.596% +2023-09-11 04:56:07 - epoch 031 lr: 0.010000 +2023-09-11 04:56:50 - train: epoch 0031, iter [00100, 05004], lr: 0.010000, loss: 2.0176 +2023-09-11 04:57:29 - train: epoch 0031, iter [00200, 05004], lr: 0.010000, loss: 1.8949 +2023-09-11 04:58:05 - train: epoch 0031, iter [00300, 05004], lr: 0.010000, loss: 1.8486 +2023-09-11 04:58:39 - train: epoch 0031, iter [00400, 05004], lr: 0.010000, loss: 2.0315 +2023-09-11 04:59:17 - train: epoch 0031, iter [00500, 05004], lr: 0.010000, loss: 1.7459 +2023-09-11 04:59:52 - train: epoch 0031, iter [00600, 05004], lr: 0.010000, loss: 1.7622 +2023-09-11 05:00:28 - train: epoch 0031, iter [00700, 05004], lr: 0.010000, loss: 2.1448 +2023-09-11 05:01:06 - train: epoch 0031, iter [00800, 05004], lr: 0.010000, loss: 1.9847 +2023-09-11 05:01:41 - train: epoch 0031, iter [00900, 05004], lr: 0.010000, loss: 1.8643 +2023-09-11 05:02:17 - train: epoch 0031, iter [01000, 05004], lr: 0.010000, loss: 2.0739 +2023-09-11 05:02:56 - train: epoch 0031, iter [01100, 05004], lr: 0.010000, loss: 1.9468 +2023-09-11 05:03:30 - train: epoch 0031, iter [01200, 05004], lr: 0.010000, loss: 1.9695 +2023-09-11 05:04:06 - train: epoch 0031, iter [01300, 05004], lr: 0.010000, loss: 1.5061 +2023-09-11 05:04:45 - train: epoch 0031, iter [01400, 05004], lr: 0.010000, loss: 1.4986 +2023-09-11 05:05:19 - train: epoch 0031, iter [01500, 05004], lr: 0.010000, loss: 1.9720 +2023-09-11 05:05:55 - train: epoch 0031, iter [01600, 05004], lr: 0.010000, loss: 1.8365 +2023-09-11 05:06:32 - train: epoch 0031, iter [01700, 05004], lr: 0.010000, loss: 1.8063 +2023-09-11 05:07:07 - train: epoch 0031, iter [01800, 05004], lr: 0.010000, loss: 1.6761 +2023-09-11 05:07:42 - train: epoch 0031, iter [01900, 05004], lr: 0.010000, loss: 1.6961 +2023-09-11 05:08:20 - train: epoch 0031, iter [02000, 05004], lr: 0.010000, loss: 1.8631 +2023-09-11 05:08:55 - train: epoch 0031, iter [02100, 05004], lr: 0.010000, loss: 1.7113 +2023-09-11 05:09:31 - train: epoch 0031, iter [02200, 05004], lr: 0.010000, loss: 1.5011 +2023-09-11 05:10:08 - train: epoch 0031, iter [02300, 05004], lr: 0.010000, loss: 1.6204 +2023-09-11 05:10:46 - train: epoch 0031, iter [02400, 05004], lr: 0.010000, loss: 1.8314 +2023-09-11 05:11:21 - train: epoch 0031, iter [02500, 05004], lr: 0.010000, loss: 1.6902 +2023-09-11 05:11:56 - train: epoch 0031, iter [02600, 05004], lr: 0.010000, loss: 1.7518 +2023-09-11 05:12:35 - train: epoch 0031, iter [02700, 05004], lr: 0.010000, loss: 1.7811 +2023-09-11 05:13:09 - train: epoch 0031, iter [02800, 05004], lr: 0.010000, loss: 1.9807 +2023-09-11 05:13:46 - train: epoch 0031, iter [02900, 05004], lr: 0.010000, loss: 1.9862 +2023-09-11 05:14:22 - train: epoch 0031, iter [03000, 05004], lr: 0.010000, loss: 2.0954 +2023-09-11 05:14:57 - train: epoch 0031, iter [03100, 05004], lr: 0.010000, loss: 1.8918 +2023-09-11 05:15:36 - train: epoch 0031, iter [03200, 05004], lr: 0.010000, loss: 1.7271 +2023-09-11 05:16:11 - train: epoch 0031, iter [03300, 05004], lr: 0.010000, loss: 1.7745 +2023-09-11 05:16:47 - train: epoch 0031, iter [03400, 05004], lr: 0.010000, loss: 1.8562 +2023-09-11 05:17:25 - train: epoch 0031, iter [03500, 05004], lr: 0.010000, loss: 1.8396 +2023-09-11 05:18:01 - train: epoch 0031, iter [03600, 05004], lr: 0.010000, loss: 1.7407 +2023-09-11 05:18:36 - train: epoch 0031, iter [03700, 05004], lr: 0.010000, loss: 1.7867 +2023-09-11 05:19:13 - train: epoch 0031, iter [03800, 05004], lr: 0.010000, loss: 1.7303 +2023-09-11 05:19:51 - train: epoch 0031, iter [03900, 05004], lr: 0.010000, loss: 1.8037 +2023-09-11 05:20:26 - train: epoch 0031, iter [04000, 05004], lr: 0.010000, loss: 1.6429 +2023-09-11 05:21:01 - train: epoch 0031, iter [04100, 05004], lr: 0.010000, loss: 1.8684 +2023-09-11 05:21:40 - train: epoch 0031, iter [04200, 05004], lr: 0.010000, loss: 1.8242 +2023-09-11 05:22:14 - train: epoch 0031, iter [04300, 05004], lr: 0.010000, loss: 1.6131 +2023-09-11 05:22:51 - train: epoch 0031, iter [04400, 05004], lr: 0.010000, loss: 1.7923 +2023-09-11 05:23:27 - train: epoch 0031, iter [04500, 05004], lr: 0.010000, loss: 1.6490 +2023-09-11 05:24:03 - train: epoch 0031, iter [04600, 05004], lr: 0.010000, loss: 1.8578 +2023-09-11 05:24:41 - train: epoch 0031, iter [04700, 05004], lr: 0.010000, loss: 1.5471 +2023-09-11 05:25:16 - train: epoch 0031, iter [04800, 05004], lr: 0.010000, loss: 1.7767 +2023-09-11 05:25:52 - train: epoch 0031, iter [04900, 05004], lr: 0.010000, loss: 1.7101 +2023-09-11 05:26:26 - train: epoch 0031, iter [05000, 05004], lr: 0.010000, loss: 1.7578 +2023-09-11 05:26:28 - train: epoch 031, train_loss: 1.8195 +2023-09-11 05:27:52 - eval: epoch: 031, acc1: 64.372%, acc5: 85.590%, test_loss: 1.4890, per_image_load_time: 1.345ms, per_image_inference_time: 0.325ms +2023-09-11 05:27:52 - until epoch: 031, best_acc1: 64.372% +2023-09-11 05:27:52 - epoch 032 lr: 0.010000 +2023-09-11 05:28:38 - train: epoch 0032, iter [00100, 05004], lr: 0.010000, loss: 1.7011 +2023-09-11 05:29:14 - train: epoch 0032, iter [00200, 05004], lr: 0.010000, loss: 1.6349 +2023-09-11 05:29:50 - train: epoch 0032, iter [00300, 05004], lr: 0.010000, loss: 1.4152 +2023-09-11 05:30:25 - train: epoch 0032, iter [00400, 05004], lr: 0.010000, loss: 1.8481 +2023-09-11 05:31:03 - train: epoch 0032, iter [00500, 05004], lr: 0.010000, loss: 1.5576 +2023-09-11 05:31:39 - train: epoch 0032, iter [00600, 05004], lr: 0.010000, loss: 1.6116 +2023-09-11 05:32:14 - train: epoch 0032, iter [00700, 05004], lr: 0.010000, loss: 1.6808 +2023-09-11 05:32:53 - train: epoch 0032, iter [00800, 05004], lr: 0.010000, loss: 1.5614 +2023-09-11 05:33:28 - train: epoch 0032, iter [00900, 05004], lr: 0.010000, loss: 1.7735 +2023-09-11 05:34:03 - train: epoch 0032, iter [01000, 05004], lr: 0.010000, loss: 1.9784 +2023-09-11 05:34:42 - train: epoch 0032, iter [01100, 05004], lr: 0.010000, loss: 1.8183 +2023-09-11 05:35:17 - train: epoch 0032, iter [01200, 05004], lr: 0.010000, loss: 1.7545 +2023-09-11 05:35:53 - train: epoch 0032, iter [01300, 05004], lr: 0.010000, loss: 1.6666 +2023-09-11 05:36:30 - train: epoch 0032, iter [01400, 05004], lr: 0.010000, loss: 1.7476 +2023-09-11 05:37:06 - train: epoch 0032, iter [01500, 05004], lr: 0.010000, loss: 1.6528 +2023-09-11 05:37:41 - train: epoch 0032, iter [01600, 05004], lr: 0.010000, loss: 1.7951 +2023-09-11 05:38:17 - train: epoch 0032, iter [01700, 05004], lr: 0.010000, loss: 1.7877 +2023-09-11 05:38:54 - train: epoch 0032, iter [01800, 05004], lr: 0.010000, loss: 1.9854 +2023-09-11 05:39:30 - train: epoch 0032, iter [01900, 05004], lr: 0.010000, loss: 1.4799 +2023-09-11 05:40:07 - train: epoch 0032, iter [02000, 05004], lr: 0.010000, loss: 1.7001 +2023-09-11 05:40:42 - train: epoch 0032, iter [02100, 05004], lr: 0.010000, loss: 1.5413 +2023-09-11 05:41:17 - train: epoch 0032, iter [02200, 05004], lr: 0.010000, loss: 1.6582 +2023-09-11 05:41:55 - train: epoch 0032, iter [02300, 05004], lr: 0.010000, loss: 1.6613 +2023-09-11 05:42:31 - train: epoch 0032, iter [02400, 05004], lr: 0.010000, loss: 1.6380 +2023-09-11 05:43:07 - train: epoch 0032, iter [02500, 05004], lr: 0.010000, loss: 1.6143 +2023-09-11 05:43:44 - train: epoch 0032, iter [02600, 05004], lr: 0.010000, loss: 1.4862 +2023-09-11 05:44:20 - train: epoch 0032, iter [02700, 05004], lr: 0.010000, loss: 1.5918 +2023-09-11 05:44:55 - train: epoch 0032, iter [02800, 05004], lr: 0.010000, loss: 1.6281 +2023-09-11 05:45:33 - train: epoch 0032, iter [02900, 05004], lr: 0.010000, loss: 1.7763 +2023-09-11 05:46:09 - train: epoch 0032, iter [03000, 05004], lr: 0.010000, loss: 1.6962 +2023-09-11 05:46:43 - train: epoch 0032, iter [03100, 05004], lr: 0.010000, loss: 1.8031 +2023-09-11 05:47:21 - train: epoch 0032, iter [03200, 05004], lr: 0.010000, loss: 1.8606 +2023-09-11 05:47:56 - train: epoch 0032, iter [03300, 05004], lr: 0.010000, loss: 1.4389 +2023-09-11 05:48:32 - train: epoch 0032, iter [03400, 05004], lr: 0.010000, loss: 1.4971 +2023-09-11 05:49:08 - train: epoch 0032, iter [03500, 05004], lr: 0.010000, loss: 1.7564 +2023-09-11 05:49:47 - train: epoch 0032, iter [03600, 05004], lr: 0.010000, loss: 1.7187 +2023-09-11 05:50:22 - train: epoch 0032, iter [03700, 05004], lr: 0.010000, loss: 1.6952 +2023-09-11 05:50:58 - train: epoch 0032, iter [03800, 05004], lr: 0.010000, loss: 1.5888 +2023-09-11 05:51:36 - train: epoch 0032, iter [03900, 05004], lr: 0.010000, loss: 1.6254 +2023-09-11 05:52:11 - train: epoch 0032, iter [04000, 05004], lr: 0.010000, loss: 1.6617 +2023-09-11 05:52:46 - train: epoch 0032, iter [04100, 05004], lr: 0.010000, loss: 1.6302 +2023-09-11 05:53:24 - train: epoch 0032, iter [04200, 05004], lr: 0.010000, loss: 1.5678 +2023-09-11 05:53:59 - train: epoch 0032, iter [04300, 05004], lr: 0.010000, loss: 1.8140 +2023-09-11 05:54:34 - train: epoch 0032, iter [04400, 05004], lr: 0.010000, loss: 1.5654 +2023-09-11 05:55:11 - train: epoch 0032, iter [04500, 05004], lr: 0.010000, loss: 1.8333 +2023-09-11 05:55:47 - train: epoch 0032, iter [04600, 05004], lr: 0.010000, loss: 1.5758 +2023-09-11 05:56:21 - train: epoch 0032, iter [04700, 05004], lr: 0.010000, loss: 1.6913 +2023-09-11 05:56:59 - train: epoch 0032, iter [04800, 05004], lr: 0.010000, loss: 1.6471 +2023-09-11 05:57:35 - train: epoch 0032, iter [04900, 05004], lr: 0.010000, loss: 1.5419 +2023-09-11 05:58:09 - train: epoch 0032, iter [05000, 05004], lr: 0.010000, loss: 1.7184 +2023-09-11 05:58:11 - train: epoch 032, train_loss: 1.7006 +2023-09-11 05:59:34 - eval: epoch: 032, acc1: 64.908%, acc5: 86.310%, test_loss: 1.4438, per_image_load_time: 1.323ms, per_image_inference_time: 0.324ms +2023-09-11 05:59:34 - until epoch: 032, best_acc1: 64.908% +2023-09-11 05:59:34 - epoch 033 lr: 0.010000 +2023-09-11 06:00:18 - train: epoch 0033, iter [00100, 05004], lr: 0.010000, loss: 1.4247 +2023-09-11 06:00:54 - train: epoch 0033, iter [00200, 05004], lr: 0.010000, loss: 1.8882 +2023-09-11 06:01:29 - train: epoch 0033, iter [00300, 05004], lr: 0.010000, loss: 1.6614 +2023-09-11 06:02:06 - train: epoch 0033, iter [00400, 05004], lr: 0.010000, loss: 1.6152 +2023-09-11 06:02:43 - train: epoch 0033, iter [00500, 05004], lr: 0.010000, loss: 1.7515 +2023-09-11 06:03:20 - train: epoch 0033, iter [00600, 05004], lr: 0.010000, loss: 1.5140 +2023-09-11 06:03:57 - train: epoch 0033, iter [00700, 05004], lr: 0.010000, loss: 1.8903 +2023-09-11 06:04:33 - train: epoch 0033, iter [00800, 05004], lr: 0.010000, loss: 1.8922 +2023-09-11 06:05:10 - train: epoch 0033, iter [00900, 05004], lr: 0.010000, loss: 1.7777 +2023-09-11 06:05:44 - train: epoch 0033, iter [01000, 05004], lr: 0.010000, loss: 1.7952 +2023-09-11 06:06:22 - train: epoch 0033, iter [01100, 05004], lr: 0.010000, loss: 1.5419 +2023-09-11 06:06:58 - train: epoch 0033, iter [01200, 05004], lr: 0.010000, loss: 1.7046 +2023-09-11 06:07:35 - train: epoch 0033, iter [01300, 05004], lr: 0.010000, loss: 1.6137 +2023-09-11 06:08:10 - train: epoch 0033, iter [01400, 05004], lr: 0.010000, loss: 1.6818 +2023-09-11 06:08:47 - train: epoch 0033, iter [01500, 05004], lr: 0.010000, loss: 1.7420 +2023-09-11 06:09:24 - train: epoch 0033, iter [01600, 05004], lr: 0.010000, loss: 1.9508 +2023-09-11 06:09:59 - train: epoch 0033, iter [01700, 05004], lr: 0.010000, loss: 1.6135 +2023-09-11 06:10:36 - train: epoch 0033, iter [01800, 05004], lr: 0.010000, loss: 1.8189 +2023-09-11 06:11:13 - train: epoch 0033, iter [01900, 05004], lr: 0.010000, loss: 1.6234 +2023-09-11 06:11:48 - train: epoch 0033, iter [02000, 05004], lr: 0.010000, loss: 1.6729 +2023-09-11 06:12:24 - train: epoch 0033, iter [02100, 05004], lr: 0.010000, loss: 1.7962 +2023-09-11 06:13:02 - train: epoch 0033, iter [02200, 05004], lr: 0.010000, loss: 1.6526 +2023-09-11 06:13:39 - train: epoch 0033, iter [02300, 05004], lr: 0.010000, loss: 1.5161 +2023-09-11 06:14:18 - train: epoch 0033, iter [02400, 05004], lr: 0.010000, loss: 1.6656 +2023-09-11 06:14:52 - train: epoch 0033, iter [02500, 05004], lr: 0.010000, loss: 1.7365 +2023-09-11 06:15:28 - train: epoch 0033, iter [02600, 05004], lr: 0.010000, loss: 1.4865 +2023-09-11 06:16:04 - train: epoch 0033, iter [02700, 05004], lr: 0.010000, loss: 1.6335 +2023-09-11 06:16:40 - train: epoch 0033, iter [02800, 05004], lr: 0.010000, loss: 1.7241 +2023-09-11 06:17:16 - train: epoch 0033, iter [02900, 05004], lr: 0.010000, loss: 1.6140 +2023-09-11 06:17:54 - train: epoch 0033, iter [03000, 05004], lr: 0.010000, loss: 1.6375 +2023-09-11 06:18:30 - train: epoch 0033, iter [03100, 05004], lr: 0.010000, loss: 1.5498 +2023-09-11 06:19:06 - train: epoch 0033, iter [03200, 05004], lr: 0.010000, loss: 1.8355 +2023-09-11 06:19:43 - train: epoch 0033, iter [03300, 05004], lr: 0.010000, loss: 1.7020 +2023-09-11 06:20:19 - train: epoch 0033, iter [03400, 05004], lr: 0.010000, loss: 1.5928 +2023-09-11 06:20:53 - train: epoch 0033, iter [03500, 05004], lr: 0.010000, loss: 1.6473 +2023-09-11 06:21:31 - train: epoch 0033, iter [03600, 05004], lr: 0.010000, loss: 1.7280 +2023-09-11 06:22:09 - train: epoch 0033, iter [03700, 05004], lr: 0.010000, loss: 1.5206 +2023-09-11 06:22:43 - train: epoch 0033, iter [03800, 05004], lr: 0.010000, loss: 1.7143 +2023-09-11 06:23:19 - train: epoch 0033, iter [03900, 05004], lr: 0.010000, loss: 1.8203 +2023-09-11 06:23:56 - train: epoch 0033, iter [04000, 05004], lr: 0.010000, loss: 1.7404 +2023-09-11 06:24:34 - train: epoch 0033, iter [04100, 05004], lr: 0.010000, loss: 1.5361 +2023-09-11 06:25:09 - train: epoch 0033, iter [04200, 05004], lr: 0.010000, loss: 1.5903 +2023-09-11 06:25:44 - train: epoch 0033, iter [04300, 05004], lr: 0.010000, loss: 1.7830 +2023-09-11 06:26:22 - train: epoch 0033, iter [04400, 05004], lr: 0.010000, loss: 1.6353 +2023-09-11 06:26:58 - train: epoch 0033, iter [04500, 05004], lr: 0.010000, loss: 1.8990 +2023-09-11 06:27:35 - train: epoch 0033, iter [04600, 05004], lr: 0.010000, loss: 1.8786 +2023-09-11 06:28:11 - train: epoch 0033, iter [04700, 05004], lr: 0.010000, loss: 1.5739 +2023-09-11 06:28:46 - train: epoch 0033, iter [04800, 05004], lr: 0.010000, loss: 1.9306 +2023-09-11 06:29:23 - train: epoch 0033, iter [04900, 05004], lr: 0.010000, loss: 1.4899 +2023-09-11 06:29:57 - train: epoch 0033, iter [05000, 05004], lr: 0.010000, loss: 1.7266 +2023-09-11 06:29:58 - train: epoch 033, train_loss: 1.6532 +2023-09-11 06:31:22 - eval: epoch: 033, acc1: 65.458%, acc5: 86.582%, test_loss: 1.4190, per_image_load_time: 1.359ms, per_image_inference_time: 0.294ms +2023-09-11 06:31:22 - until epoch: 033, best_acc1: 65.458% +2023-09-11 06:31:22 - epoch 034 lr: 0.010000 +2023-09-11 06:32:07 - train: epoch 0034, iter [00100, 05004], lr: 0.010000, loss: 1.4298 +2023-09-11 06:32:43 - train: epoch 0034, iter [00200, 05004], lr: 0.010000, loss: 1.6222 +2023-09-11 06:33:18 - train: epoch 0034, iter [00300, 05004], lr: 0.010000, loss: 1.5272 +2023-09-11 06:33:57 - train: epoch 0034, iter [00400, 05004], lr: 0.010000, loss: 1.3606 +2023-09-11 06:34:32 - train: epoch 0034, iter [00500, 05004], lr: 0.010000, loss: 1.5118 +2023-09-11 06:35:09 - train: epoch 0034, iter [00600, 05004], lr: 0.010000, loss: 1.8678 +2023-09-11 06:35:45 - train: epoch 0034, iter [00700, 05004], lr: 0.010000, loss: 1.4945 +2023-09-11 06:36:20 - train: epoch 0034, iter [00800, 05004], lr: 0.010000, loss: 1.8795 +2023-09-11 06:36:57 - train: epoch 0034, iter [00900, 05004], lr: 0.010000, loss: 1.6998 +2023-09-11 06:37:33 - train: epoch 0034, iter [01000, 05004], lr: 0.010000, loss: 1.6372 +2023-09-11 06:38:07 - train: epoch 0034, iter [01100, 05004], lr: 0.010000, loss: 1.5628 +2023-09-11 06:38:47 - train: epoch 0034, iter [01200, 05004], lr: 0.010000, loss: 1.6664 +2023-09-11 06:39:21 - train: epoch 0034, iter [01300, 05004], lr: 0.010000, loss: 1.4628 +2023-09-11 06:39:56 - train: epoch 0034, iter [01400, 05004], lr: 0.010000, loss: 1.5638 +2023-09-11 06:40:35 - train: epoch 0034, iter [01500, 05004], lr: 0.010000, loss: 1.5254 +2023-09-11 06:41:10 - train: epoch 0034, iter [01600, 05004], lr: 0.010000, loss: 1.6053 +2023-09-11 06:41:46 - train: epoch 0034, iter [01700, 05004], lr: 0.010000, loss: 1.6710 +2023-09-11 06:42:23 - train: epoch 0034, iter [01800, 05004], lr: 0.010000, loss: 1.5620 +2023-09-11 06:42:58 - train: epoch 0034, iter [01900, 05004], lr: 0.010000, loss: 1.7543 +2023-09-11 06:43:34 - train: epoch 0034, iter [02000, 05004], lr: 0.010000, loss: 1.6041 +2023-09-11 06:44:12 - train: epoch 0034, iter [02100, 05004], lr: 0.010000, loss: 1.7343 +2023-09-11 06:44:50 - train: epoch 0034, iter [02200, 05004], lr: 0.010000, loss: 1.3720 +2023-09-11 06:45:25 - train: epoch 0034, iter [02300, 05004], lr: 0.010000, loss: 1.7487 +2023-09-11 06:46:01 - train: epoch 0034, iter [02400, 05004], lr: 0.010000, loss: 1.3610 +2023-09-11 06:46:39 - train: epoch 0034, iter [02500, 05004], lr: 0.010000, loss: 1.6080 +2023-09-11 06:47:14 - train: epoch 0034, iter [02600, 05004], lr: 0.010000, loss: 1.8712 +2023-09-11 06:47:51 - train: epoch 0034, iter [02700, 05004], lr: 0.010000, loss: 1.6115 +2023-09-11 06:48:27 - train: epoch 0034, iter [02800, 05004], lr: 0.010000, loss: 1.3439 +2023-09-11 06:49:03 - train: epoch 0034, iter [02900, 05004], lr: 0.010000, loss: 1.4410 +2023-09-11 06:49:41 - train: epoch 0034, iter [03000, 05004], lr: 0.010000, loss: 1.6300 +2023-09-11 06:50:16 - train: epoch 0034, iter [03100, 05004], lr: 0.010000, loss: 1.6293 +2023-09-11 06:50:52 - train: epoch 0034, iter [03200, 05004], lr: 0.010000, loss: 1.7794 +2023-09-11 06:51:30 - train: epoch 0034, iter [03300, 05004], lr: 0.010000, loss: 1.5273 +2023-09-11 06:52:06 - train: epoch 0034, iter [03400, 05004], lr: 0.010000, loss: 1.6514 +2023-09-11 06:52:42 - train: epoch 0034, iter [03500, 05004], lr: 0.010000, loss: 1.4671 +2023-09-11 06:53:20 - train: epoch 0034, iter [03600, 05004], lr: 0.010000, loss: 1.3948 +2023-09-11 06:53:56 - train: epoch 0034, iter [03700, 05004], lr: 0.010000, loss: 1.5961 +2023-09-11 06:54:31 - train: epoch 0034, iter [03800, 05004], lr: 0.010000, loss: 1.6233 +2023-09-11 06:55:08 - train: epoch 0034, iter [03900, 05004], lr: 0.010000, loss: 1.6838 +2023-09-11 06:55:42 - train: epoch 0034, iter [04000, 05004], lr: 0.010000, loss: 1.4474 +2023-09-11 06:56:20 - train: epoch 0034, iter [04100, 05004], lr: 0.010000, loss: 1.6634 +2023-09-11 06:56:56 - train: epoch 0034, iter [04200, 05004], lr: 0.010000, loss: 1.6580 +2023-09-11 06:57:32 - train: epoch 0034, iter [04300, 05004], lr: 0.010000, loss: 1.7624 +2023-09-11 06:58:11 - train: epoch 0034, iter [04400, 05004], lr: 0.010000, loss: 1.4170 +2023-09-11 06:58:46 - train: epoch 0034, iter [04500, 05004], lr: 0.010000, loss: 1.5127 +2023-09-11 06:59:21 - train: epoch 0034, iter [04600, 05004], lr: 0.010000, loss: 1.7978 +2023-09-11 07:00:01 - train: epoch 0034, iter [04700, 05004], lr: 0.010000, loss: 1.7037 +2023-09-11 07:00:36 - train: epoch 0034, iter [04800, 05004], lr: 0.010000, loss: 1.6346 +2023-09-11 07:01:11 - train: epoch 0034, iter [04900, 05004], lr: 0.010000, loss: 1.7461 +2023-09-11 07:01:45 - train: epoch 0034, iter [05000, 05004], lr: 0.010000, loss: 1.4619 +2023-09-11 07:01:46 - train: epoch 034, train_loss: 1.6227 +2023-09-11 07:03:10 - eval: epoch: 034, acc1: 65.794%, acc5: 86.874%, test_loss: 1.4035, per_image_load_time: 1.334ms, per_image_inference_time: 0.325ms +2023-09-11 07:03:10 - until epoch: 034, best_acc1: 65.794% +2023-09-11 07:03:10 - epoch 035 lr: 0.010000 +2023-09-11 07:03:56 - train: epoch 0035, iter [00100, 05004], lr: 0.010000, loss: 1.4846 +2023-09-11 07:04:31 - train: epoch 0035, iter [00200, 05004], lr: 0.010000, loss: 1.4863 +2023-09-11 07:05:07 - train: epoch 0035, iter [00300, 05004], lr: 0.010000, loss: 1.6138 +2023-09-11 07:05:43 - train: epoch 0035, iter [00400, 05004], lr: 0.010000, loss: 1.5205 +2023-09-11 07:06:18 - train: epoch 0035, iter [00500, 05004], lr: 0.010000, loss: 1.6271 +2023-09-11 07:06:53 - train: epoch 0035, iter [00600, 05004], lr: 0.010000, loss: 1.7645 +2023-09-11 07:07:32 - train: epoch 0035, iter [00700, 05004], lr: 0.010000, loss: 1.6908 +2023-09-11 07:08:07 - train: epoch 0035, iter [00800, 05004], lr: 0.010000, loss: 1.5240 +2023-09-11 07:08:42 - train: epoch 0035, iter [00900, 05004], lr: 0.010000, loss: 1.6521 +2023-09-11 07:09:21 - train: epoch 0035, iter [01000, 05004], lr: 0.010000, loss: 1.6756 +2023-09-11 07:09:56 - train: epoch 0035, iter [01100, 05004], lr: 0.010000, loss: 1.6879 +2023-09-11 07:10:32 - train: epoch 0035, iter [01200, 05004], lr: 0.010000, loss: 1.6054 +2023-09-11 07:11:08 - train: epoch 0035, iter [01300, 05004], lr: 0.010000, loss: 1.7757 +2023-09-11 07:11:45 - train: epoch 0035, iter [01400, 05004], lr: 0.010000, loss: 1.5822 +2023-09-11 07:12:22 - train: epoch 0035, iter [01500, 05004], lr: 0.010000, loss: 1.6686 +2023-09-11 07:12:57 - train: epoch 0035, iter [01600, 05004], lr: 0.010000, loss: 1.4938 +2023-09-11 07:13:35 - train: epoch 0035, iter [01700, 05004], lr: 0.010000, loss: 1.5803 +2023-09-11 07:14:11 - train: epoch 0035, iter [01800, 05004], lr: 0.010000, loss: 1.6877 +2023-09-11 07:14:46 - train: epoch 0035, iter [01900, 05004], lr: 0.010000, loss: 1.7343 +2023-09-11 07:15:24 - train: epoch 0035, iter [02000, 05004], lr: 0.010000, loss: 1.7609 +2023-09-11 07:16:00 - train: epoch 0035, iter [02100, 05004], lr: 0.010000, loss: 1.6851 +2023-09-11 07:16:36 - train: epoch 0035, iter [02200, 05004], lr: 0.010000, loss: 1.7657 +2023-09-11 07:17:13 - train: epoch 0035, iter [02300, 05004], lr: 0.010000, loss: 1.6191 +2023-09-11 07:17:49 - train: epoch 0035, iter [02400, 05004], lr: 0.010000, loss: 1.7576 +2023-09-11 07:18:25 - train: epoch 0035, iter [02500, 05004], lr: 0.010000, loss: 1.6475 +2023-09-11 07:19:01 - train: epoch 0035, iter [02600, 05004], lr: 0.010000, loss: 1.5692 +2023-09-11 07:19:39 - train: epoch 0035, iter [02700, 05004], lr: 0.010000, loss: 1.5856 +2023-09-11 07:20:15 - train: epoch 0035, iter [02800, 05004], lr: 0.010000, loss: 1.6156 +2023-09-11 07:20:51 - train: epoch 0035, iter [02900, 05004], lr: 0.010000, loss: 1.4081 +2023-09-11 07:21:29 - train: epoch 0035, iter [03000, 05004], lr: 0.010000, loss: 1.4099 +2023-09-11 07:22:05 - train: epoch 0035, iter [03100, 05004], lr: 0.010000, loss: 1.6381 +2023-09-11 07:22:40 - train: epoch 0035, iter [03200, 05004], lr: 0.010000, loss: 1.4803 +2023-09-11 07:23:17 - train: epoch 0035, iter [03300, 05004], lr: 0.010000, loss: 1.4147 +2023-09-11 07:23:55 - train: epoch 0035, iter [03400, 05004], lr: 0.010000, loss: 1.3990 +2023-09-11 07:24:31 - train: epoch 0035, iter [03500, 05004], lr: 0.010000, loss: 1.5026 +2023-09-11 07:25:08 - train: epoch 0035, iter [03600, 05004], lr: 0.010000, loss: 1.5777 +2023-09-11 07:25:44 - train: epoch 0035, iter [03700, 05004], lr: 0.010000, loss: 1.5385 +2023-09-11 07:26:19 - train: epoch 0035, iter [03800, 05004], lr: 0.010000, loss: 1.6141 +2023-09-11 07:26:57 - train: epoch 0035, iter [03900, 05004], lr: 0.010000, loss: 1.6895 +2023-09-11 07:27:32 - train: epoch 0035, iter [04000, 05004], lr: 0.010000, loss: 1.4304 +2023-09-11 07:28:12 - train: epoch 0035, iter [04100, 05004], lr: 0.010000, loss: 1.8622 +2023-09-11 07:28:47 - train: epoch 0035, iter [04200, 05004], lr: 0.010000, loss: 1.5559 +2023-09-11 07:29:23 - train: epoch 0035, iter [04300, 05004], lr: 0.010000, loss: 1.6690 +2023-09-11 07:30:01 - train: epoch 0035, iter [04400, 05004], lr: 0.010000, loss: 1.7188 +2023-09-11 07:30:36 - train: epoch 0035, iter [04500, 05004], lr: 0.010000, loss: 1.7723 +2023-09-11 07:31:11 - train: epoch 0035, iter [04600, 05004], lr: 0.010000, loss: 1.7472 +2023-09-11 07:31:48 - train: epoch 0035, iter [04700, 05004], lr: 0.010000, loss: 1.8058 +2023-09-11 07:32:23 - train: epoch 0035, iter [04800, 05004], lr: 0.010000, loss: 1.6836 +2023-09-11 07:33:02 - train: epoch 0035, iter [04900, 05004], lr: 0.010000, loss: 1.6348 +2023-09-11 07:33:34 - train: epoch 0035, iter [05000, 05004], lr: 0.010000, loss: 1.4026 +2023-09-11 07:33:35 - train: epoch 035, train_loss: 1.6002 +2023-09-11 07:34:59 - eval: epoch: 035, acc1: 66.222%, acc5: 86.820%, test_loss: 1.3983, per_image_load_time: 1.362ms, per_image_inference_time: 0.314ms +2023-09-11 07:34:59 - until epoch: 035, best_acc1: 66.222% +2023-09-11 07:34:59 - epoch 036 lr: 0.010000 +2023-09-11 07:35:44 - train: epoch 0036, iter [00100, 05004], lr: 0.010000, loss: 1.9488 +2023-09-11 07:36:20 - train: epoch 0036, iter [00200, 05004], lr: 0.010000, loss: 1.3928 +2023-09-11 07:36:56 - train: epoch 0036, iter [00300, 05004], lr: 0.010000, loss: 1.4088 +2023-09-11 07:37:34 - train: epoch 0036, iter [00400, 05004], lr: 0.010000, loss: 1.5183 +2023-09-11 07:38:08 - train: epoch 0036, iter [00500, 05004], lr: 0.010000, loss: 1.5754 +2023-09-11 07:38:44 - train: epoch 0036, iter [00600, 05004], lr: 0.010000, loss: 1.5318 +2023-09-11 07:39:23 - train: epoch 0036, iter [00700, 05004], lr: 0.010000, loss: 1.5900 +2023-09-11 07:39:58 - train: epoch 0036, iter [00800, 05004], lr: 0.010000, loss: 1.5579 +2023-09-11 07:40:33 - train: epoch 0036, iter [00900, 05004], lr: 0.010000, loss: 1.6006 +2023-09-11 07:41:10 - train: epoch 0036, iter [01000, 05004], lr: 0.010000, loss: 1.5688 +2023-09-11 07:41:46 - train: epoch 0036, iter [01100, 05004], lr: 0.010000, loss: 1.4742 +2023-09-11 07:42:21 - train: epoch 0036, iter [01200, 05004], lr: 0.010000, loss: 1.5740 +2023-09-11 07:43:00 - train: epoch 0036, iter [01300, 05004], lr: 0.010000, loss: 1.4742 +2023-09-11 07:43:35 - train: epoch 0036, iter [01400, 05004], lr: 0.010000, loss: 1.5837 +2023-09-11 07:44:11 - train: epoch 0036, iter [01500, 05004], lr: 0.010000, loss: 1.5112 +2023-09-11 07:44:50 - train: epoch 0036, iter [01600, 05004], lr: 0.010000, loss: 1.7156 +2023-09-11 07:45:25 - train: epoch 0036, iter [01700, 05004], lr: 0.010000, loss: 1.5606 +2023-09-11 07:46:00 - train: epoch 0036, iter [01800, 05004], lr: 0.010000, loss: 1.4487 +2023-09-11 07:46:39 - train: epoch 0036, iter [01900, 05004], lr: 0.010000, loss: 1.6489 +2023-09-11 07:47:15 - train: epoch 0036, iter [02000, 05004], lr: 0.010000, loss: 1.5725 +2023-09-11 07:47:49 - train: epoch 0036, iter [02100, 05004], lr: 0.010000, loss: 1.6206 +2023-09-11 07:48:27 - train: epoch 0036, iter [02200, 05004], lr: 0.010000, loss: 1.5665 +2023-09-11 07:49:03 - train: epoch 0036, iter [02300, 05004], lr: 0.010000, loss: 1.8033 +2023-09-11 07:49:39 - train: epoch 0036, iter [02400, 05004], lr: 0.010000, loss: 1.9039 +2023-09-11 07:50:16 - train: epoch 0036, iter [02500, 05004], lr: 0.010000, loss: 1.4221 +2023-09-11 07:50:52 - train: epoch 0036, iter [02600, 05004], lr: 0.010000, loss: 1.5974 +2023-09-11 07:51:27 - train: epoch 0036, iter [02700, 05004], lr: 0.010000, loss: 1.5957 +2023-09-11 07:52:07 - train: epoch 0036, iter [02800, 05004], lr: 0.010000, loss: 1.3333 +2023-09-11 07:52:42 - train: epoch 0036, iter [02900, 05004], lr: 0.010000, loss: 1.4038 +2023-09-11 07:53:18 - train: epoch 0036, iter [03000, 05004], lr: 0.010000, loss: 1.5346 +2023-09-11 07:53:56 - train: epoch 0036, iter [03100, 05004], lr: 0.010000, loss: 1.6728 +2023-09-11 07:54:30 - train: epoch 0036, iter [03200, 05004], lr: 0.010000, loss: 1.6550 +2023-09-11 07:55:05 - train: epoch 0036, iter [03300, 05004], lr: 0.010000, loss: 1.4748 +2023-09-11 07:55:44 - train: epoch 0036, iter [03400, 05004], lr: 0.010000, loss: 1.4026 +2023-09-11 07:56:20 - train: epoch 0036, iter [03500, 05004], lr: 0.010000, loss: 1.6012 +2023-09-11 07:56:55 - train: epoch 0036, iter [03600, 05004], lr: 0.010000, loss: 1.6488 +2023-09-11 07:57:32 - train: epoch 0036, iter [03700, 05004], lr: 0.010000, loss: 1.5677 +2023-09-11 07:58:10 - train: epoch 0036, iter [03800, 05004], lr: 0.010000, loss: 1.5833 +2023-09-11 07:58:45 - train: epoch 0036, iter [03900, 05004], lr: 0.010000, loss: 1.5970 +2023-09-11 07:59:20 - train: epoch 0036, iter [04000, 05004], lr: 0.010000, loss: 1.5918 +2023-09-11 07:59:58 - train: epoch 0036, iter [04100, 05004], lr: 0.010000, loss: 1.4754 +2023-09-11 08:00:33 - train: epoch 0036, iter [04200, 05004], lr: 0.010000, loss: 1.6625 +2023-09-11 08:01:09 - train: epoch 0036, iter [04300, 05004], lr: 0.010000, loss: 1.5751 +2023-09-11 08:01:46 - train: epoch 0036, iter [04400, 05004], lr: 0.010000, loss: 1.8058 +2023-09-11 08:02:21 - train: epoch 0036, iter [04500, 05004], lr: 0.010000, loss: 1.5672 +2023-09-11 08:02:59 - train: epoch 0036, iter [04600, 05004], lr: 0.010000, loss: 1.5050 +2023-09-11 08:03:35 - train: epoch 0036, iter [04700, 05004], lr: 0.010000, loss: 1.5463 +2023-09-11 08:04:11 - train: epoch 0036, iter [04800, 05004], lr: 0.010000, loss: 1.5581 +2023-09-11 08:04:48 - train: epoch 0036, iter [04900, 05004], lr: 0.010000, loss: 1.4056 +2023-09-11 08:05:20 - train: epoch 0036, iter [05000, 05004], lr: 0.010000, loss: 1.4780 +2023-09-11 08:05:21 - train: epoch 036, train_loss: 1.5871 +2023-09-11 08:06:45 - eval: epoch: 036, acc1: 66.210%, acc5: 86.940%, test_loss: 1.3966, per_image_load_time: 1.352ms, per_image_inference_time: 0.314ms +2023-09-11 08:06:45 - until epoch: 036, best_acc1: 66.222% +2023-09-11 08:06:45 - epoch 037 lr: 0.010000 +2023-09-11 08:07:30 - train: epoch 0037, iter [00100, 05004], lr: 0.010000, loss: 1.5449 +2023-09-11 08:08:06 - train: epoch 0037, iter [00200, 05004], lr: 0.010000, loss: 1.4297 +2023-09-11 08:08:41 - train: epoch 0037, iter [00300, 05004], lr: 0.010000, loss: 1.4195 +2023-09-11 08:09:19 - train: epoch 0037, iter [00400, 05004], lr: 0.010000, loss: 1.5261 +2023-09-11 08:09:53 - train: epoch 0037, iter [00500, 05004], lr: 0.010000, loss: 1.7285 +2023-09-11 08:10:31 - train: epoch 0037, iter [00600, 05004], lr: 0.010000, loss: 1.6892 +2023-09-11 08:11:07 - train: epoch 0037, iter [00700, 05004], lr: 0.010000, loss: 1.6527 +2023-09-11 08:11:45 - train: epoch 0037, iter [00800, 05004], lr: 0.010000, loss: 1.4816 +2023-09-11 08:12:19 - train: epoch 0037, iter [00900, 05004], lr: 0.010000, loss: 1.8572 +2023-09-11 08:12:56 - train: epoch 0037, iter [01000, 05004], lr: 0.010000, loss: 1.7084 +2023-09-11 08:13:33 - train: epoch 0037, iter [01100, 05004], lr: 0.010000, loss: 1.6894 +2023-09-11 08:14:09 - train: epoch 0037, iter [01200, 05004], lr: 0.010000, loss: 1.7714 +2023-09-11 08:14:43 - train: epoch 0037, iter [01300, 05004], lr: 0.010000, loss: 1.6671 +2023-09-11 08:15:21 - train: epoch 0037, iter [01400, 05004], lr: 0.010000, loss: 1.9782 +2023-09-11 08:15:57 - train: epoch 0037, iter [01500, 05004], lr: 0.010000, loss: 1.5091 +2023-09-11 08:16:34 - train: epoch 0037, iter [01600, 05004], lr: 0.010000, loss: 1.4061 +2023-09-11 08:17:09 - train: epoch 0037, iter [01700, 05004], lr: 0.010000, loss: 1.6999 +2023-09-11 08:17:45 - train: epoch 0037, iter [01800, 05004], lr: 0.010000, loss: 1.4764 +2023-09-11 08:18:22 - train: epoch 0037, iter [01900, 05004], lr: 0.010000, loss: 1.6298 +2023-09-11 08:18:58 - train: epoch 0037, iter [02000, 05004], lr: 0.010000, loss: 1.5942 +2023-09-11 08:19:35 - train: epoch 0037, iter [02100, 05004], lr: 0.010000, loss: 1.4050 +2023-09-11 08:20:10 - train: epoch 0037, iter [02200, 05004], lr: 0.010000, loss: 1.4725 +2023-09-11 08:20:48 - train: epoch 0037, iter [02300, 05004], lr: 0.010000, loss: 1.4179 +2023-09-11 08:21:22 - train: epoch 0037, iter [02400, 05004], lr: 0.010000, loss: 1.5386 +2023-09-11 08:21:58 - train: epoch 0037, iter [02500, 05004], lr: 0.010000, loss: 1.6359 +2023-09-11 08:22:34 - train: epoch 0037, iter [02600, 05004], lr: 0.010000, loss: 1.8325 +2023-09-11 08:23:13 - train: epoch 0037, iter [02700, 05004], lr: 0.010000, loss: 1.9619 +2023-09-11 08:23:48 - train: epoch 0037, iter [02800, 05004], lr: 0.010000, loss: 1.7489 +2023-09-11 08:24:25 - train: epoch 0037, iter [02900, 05004], lr: 0.010000, loss: 1.5259 +2023-09-11 08:25:03 - train: epoch 0037, iter [03000, 05004], lr: 0.010000, loss: 1.8357 +2023-09-11 08:25:38 - train: epoch 0037, iter [03100, 05004], lr: 0.010000, loss: 1.5811 +2023-09-11 08:26:14 - train: epoch 0037, iter [03200, 05004], lr: 0.010000, loss: 1.4959 +2023-09-11 08:26:51 - train: epoch 0037, iter [03300, 05004], lr: 0.010000, loss: 1.6270 +2023-09-11 08:27:27 - train: epoch 0037, iter [03400, 05004], lr: 0.010000, loss: 1.4778 +2023-09-11 08:28:01 - train: epoch 0037, iter [03500, 05004], lr: 0.010000, loss: 1.7063 +2023-09-11 08:28:40 - train: epoch 0037, iter [03600, 05004], lr: 0.010000, loss: 1.7154 +2023-09-11 08:29:15 - train: epoch 0037, iter [03700, 05004], lr: 0.010000, loss: 1.5795 +2023-09-11 08:29:51 - train: epoch 0037, iter [03800, 05004], lr: 0.010000, loss: 1.6302 +2023-09-11 08:30:29 - train: epoch 0037, iter [03900, 05004], lr: 0.010000, loss: 1.6040 +2023-09-11 08:31:04 - train: epoch 0037, iter [04000, 05004], lr: 0.010000, loss: 1.6291 +2023-09-11 08:31:39 - train: epoch 0037, iter [04100, 05004], lr: 0.010000, loss: 1.4211 +2023-09-11 08:32:18 - train: epoch 0037, iter [04200, 05004], lr: 0.010000, loss: 1.7559 +2023-09-11 08:32:53 - train: epoch 0037, iter [04300, 05004], lr: 0.010000, loss: 1.6272 +2023-09-11 08:33:29 - train: epoch 0037, iter [04400, 05004], lr: 0.010000, loss: 1.4509 +2023-09-11 08:34:07 - train: epoch 0037, iter [04500, 05004], lr: 0.010000, loss: 1.4960 +2023-09-11 08:34:43 - train: epoch 0037, iter [04600, 05004], lr: 0.010000, loss: 1.5163 +2023-09-11 08:35:17 - train: epoch 0037, iter [04700, 05004], lr: 0.010000, loss: 1.6413 +2023-09-11 08:35:56 - train: epoch 0037, iter [04800, 05004], lr: 0.010000, loss: 1.3931 +2023-09-11 08:36:31 - train: epoch 0037, iter [04900, 05004], lr: 0.010000, loss: 1.7120 +2023-09-11 08:37:04 - train: epoch 0037, iter [05000, 05004], lr: 0.010000, loss: 1.4053 +2023-09-11 08:37:05 - train: epoch 037, train_loss: 1.5765 +2023-09-11 08:38:29 - eval: epoch: 037, acc1: 65.952%, acc5: 86.958%, test_loss: 1.4022, per_image_load_time: 1.354ms, per_image_inference_time: 0.316ms +2023-09-11 08:38:30 - until epoch: 037, best_acc1: 66.222% +2023-09-11 08:38:30 - epoch 038 lr: 0.010000 +2023-09-11 08:39:14 - train: epoch 0038, iter [00100, 05004], lr: 0.010000, loss: 1.5365 +2023-09-11 08:39:51 - train: epoch 0038, iter [00200, 05004], lr: 0.010000, loss: 1.4271 +2023-09-11 08:40:26 - train: epoch 0038, iter [00300, 05004], lr: 0.010000, loss: 1.4967 +2023-09-11 08:41:03 - train: epoch 0038, iter [00400, 05004], lr: 0.010000, loss: 1.4838 +2023-09-11 08:41:40 - train: epoch 0038, iter [00500, 05004], lr: 0.010000, loss: 1.3335 +2023-09-11 08:42:16 - train: epoch 0038, iter [00600, 05004], lr: 0.010000, loss: 1.6659 +2023-09-11 08:42:52 - train: epoch 0038, iter [00700, 05004], lr: 0.010000, loss: 1.4735 +2023-09-11 08:43:30 - train: epoch 0038, iter [00800, 05004], lr: 0.010000, loss: 1.6080 +2023-09-11 08:44:05 - train: epoch 0038, iter [00900, 05004], lr: 0.010000, loss: 1.6188 +2023-09-11 08:44:40 - train: epoch 0038, iter [01000, 05004], lr: 0.010000, loss: 1.5019 +2023-09-11 08:45:17 - train: epoch 0038, iter [01100, 05004], lr: 0.010000, loss: 1.5717 +2023-09-11 08:45:53 - train: epoch 0038, iter [01200, 05004], lr: 0.010000, loss: 1.7666 +2023-09-11 08:46:28 - train: epoch 0038, iter [01300, 05004], lr: 0.010000, loss: 1.4277 +2023-09-11 08:47:07 - train: epoch 0038, iter [01400, 05004], lr: 0.010000, loss: 1.6461 +2023-09-11 08:47:41 - train: epoch 0038, iter [01500, 05004], lr: 0.010000, loss: 1.5880 +2023-09-11 08:48:20 - train: epoch 0038, iter [01600, 05004], lr: 0.010000, loss: 1.7631 +2023-09-11 08:48:55 - train: epoch 0038, iter [01700, 05004], lr: 0.010000, loss: 1.7058 +2023-09-11 08:49:31 - train: epoch 0038, iter [01800, 05004], lr: 0.010000, loss: 1.5449 +2023-09-11 08:50:09 - train: epoch 0038, iter [01900, 05004], lr: 0.010000, loss: 1.6367 +2023-09-11 08:50:45 - train: epoch 0038, iter [02000, 05004], lr: 0.010000, loss: 1.5659 +2023-09-11 08:51:21 - train: epoch 0038, iter [02100, 05004], lr: 0.010000, loss: 1.6276 +2023-09-11 08:51:55 - train: epoch 0038, iter [02200, 05004], lr: 0.010000, loss: 1.5085 +2023-09-11 08:52:34 - train: epoch 0038, iter [02300, 05004], lr: 0.010000, loss: 1.5967 +2023-09-11 08:53:11 - train: epoch 0038, iter [02400, 05004], lr: 0.010000, loss: 1.7077 +2023-09-11 08:53:50 - train: epoch 0038, iter [02500, 05004], lr: 0.010000, loss: 1.4539 +2023-09-11 08:54:25 - train: epoch 0038, iter [02600, 05004], lr: 0.010000, loss: 1.6637 +2023-09-11 08:55:03 - train: epoch 0038, iter [02700, 05004], lr: 0.010000, loss: 1.6544 +2023-09-11 08:55:39 - train: epoch 0038, iter [02800, 05004], lr: 0.010000, loss: 1.7031 +2023-09-11 08:56:17 - train: epoch 0038, iter [02900, 05004], lr: 0.010000, loss: 1.6019 +2023-09-11 08:56:52 - train: epoch 0038, iter [03000, 05004], lr: 0.010000, loss: 1.4385 +2023-09-11 08:57:27 - train: epoch 0038, iter [03100, 05004], lr: 0.010000, loss: 1.5372 +2023-09-11 08:58:05 - train: epoch 0038, iter [03200, 05004], lr: 0.010000, loss: 1.2907 +2023-09-11 08:58:41 - train: epoch 0038, iter [03300, 05004], lr: 0.010000, loss: 1.4195 +2023-09-11 08:59:18 - train: epoch 0038, iter [03400, 05004], lr: 0.010000, loss: 1.5290 +2023-09-11 08:59:56 - train: epoch 0038, iter [03500, 05004], lr: 0.010000, loss: 1.4935 +2023-09-11 09:00:32 - train: epoch 0038, iter [03600, 05004], lr: 0.010000, loss: 1.6127 +2023-09-11 09:01:08 - train: epoch 0038, iter [03700, 05004], lr: 0.010000, loss: 1.4560 +2023-09-11 09:01:44 - train: epoch 0038, iter [03800, 05004], lr: 0.010000, loss: 1.6613 +2023-09-11 09:02:21 - train: epoch 0038, iter [03900, 05004], lr: 0.010000, loss: 1.5276 +2023-09-11 09:02:58 - train: epoch 0038, iter [04000, 05004], lr: 0.010000, loss: 1.7295 +2023-09-11 09:03:33 - train: epoch 0038, iter [04100, 05004], lr: 0.010000, loss: 1.6363 +2023-09-11 09:04:13 - train: epoch 0038, iter [04200, 05004], lr: 0.010000, loss: 1.6113 +2023-09-11 09:04:46 - train: epoch 0038, iter [04300, 05004], lr: 0.010000, loss: 1.7018 +2023-09-11 09:05:23 - train: epoch 0038, iter [04400, 05004], lr: 0.010000, loss: 1.5131 +2023-09-11 09:06:01 - train: epoch 0038, iter [04500, 05004], lr: 0.010000, loss: 1.5997 +2023-09-11 09:06:37 - train: epoch 0038, iter [04600, 05004], lr: 0.010000, loss: 1.6046 +2023-09-11 09:07:11 - train: epoch 0038, iter [04700, 05004], lr: 0.010000, loss: 1.5142 +2023-09-11 09:07:51 - train: epoch 0038, iter [04800, 05004], lr: 0.010000, loss: 1.6358 +2023-09-11 09:08:26 - train: epoch 0038, iter [04900, 05004], lr: 0.010000, loss: 1.5108 +2023-09-11 09:08:59 - train: epoch 0038, iter [05000, 05004], lr: 0.010000, loss: 1.5553 +2023-09-11 09:09:01 - train: epoch 038, train_loss: 1.5690 +2023-09-11 09:10:26 - eval: epoch: 038, acc1: 66.160%, acc5: 86.904%, test_loss: 1.3922, per_image_load_time: 1.357ms, per_image_inference_time: 0.326ms +2023-09-11 09:10:26 - until epoch: 038, best_acc1: 66.222% +2023-09-11 09:10:26 - epoch 039 lr: 0.010000 +2023-09-11 09:11:10 - train: epoch 0039, iter [00100, 05004], lr: 0.010000, loss: 1.5524 +2023-09-11 09:11:47 - train: epoch 0039, iter [00200, 05004], lr: 0.010000, loss: 1.6312 +2023-09-11 09:12:24 - train: epoch 0039, iter [00300, 05004], lr: 0.010000, loss: 1.3878 +2023-09-11 09:12:59 - train: epoch 0039, iter [00400, 05004], lr: 0.010000, loss: 1.5589 +2023-09-11 09:13:38 - train: epoch 0039, iter [00500, 05004], lr: 0.010000, loss: 1.5008 +2023-09-11 09:14:15 - train: epoch 0039, iter [00600, 05004], lr: 0.010000, loss: 1.4875 +2023-09-11 09:14:50 - train: epoch 0039, iter [00700, 05004], lr: 0.010000, loss: 1.7428 +2023-09-11 09:15:28 - train: epoch 0039, iter [00800, 05004], lr: 0.010000, loss: 1.4047 +2023-09-11 09:16:05 - train: epoch 0039, iter [00900, 05004], lr: 0.010000, loss: 1.5399 +2023-09-11 09:16:41 - train: epoch 0039, iter [01000, 05004], lr: 0.010000, loss: 1.4682 +2023-09-11 09:17:19 - train: epoch 0039, iter [01100, 05004], lr: 0.010000, loss: 1.5931 +2023-09-11 09:17:55 - train: epoch 0039, iter [01200, 05004], lr: 0.010000, loss: 1.5797 +2023-09-11 09:18:29 - train: epoch 0039, iter [01300, 05004], lr: 0.010000, loss: 1.6055 +2023-09-11 09:19:07 - train: epoch 0039, iter [01400, 05004], lr: 0.010000, loss: 1.6910 +2023-09-11 09:19:43 - train: epoch 0039, iter [01500, 05004], lr: 0.010000, loss: 1.6365 +2023-09-11 09:20:17 - train: epoch 0039, iter [01600, 05004], lr: 0.010000, loss: 1.5868 +2023-09-11 09:20:55 - train: epoch 0039, iter [01700, 05004], lr: 0.010000, loss: 1.4241 +2023-09-11 09:21:33 - train: epoch 0039, iter [01800, 05004], lr: 0.010000, loss: 1.5800 +2023-09-11 09:22:07 - train: epoch 0039, iter [01900, 05004], lr: 0.010000, loss: 1.3683 +2023-09-11 09:22:46 - train: epoch 0039, iter [02000, 05004], lr: 0.010000, loss: 1.5315 +2023-09-11 09:23:21 - train: epoch 0039, iter [02100, 05004], lr: 0.010000, loss: 1.5276 +2023-09-11 09:23:57 - train: epoch 0039, iter [02200, 05004], lr: 0.010000, loss: 1.4841 +2023-09-11 09:24:34 - train: epoch 0039, iter [02300, 05004], lr: 0.010000, loss: 1.9227 +2023-09-11 09:25:09 - train: epoch 0039, iter [02400, 05004], lr: 0.010000, loss: 1.8732 +2023-09-11 09:25:47 - train: epoch 0039, iter [02500, 05004], lr: 0.010000, loss: 1.5538 +2023-09-11 09:26:23 - train: epoch 0039, iter [02600, 05004], lr: 0.010000, loss: 1.3514 +2023-09-11 09:26:59 - train: epoch 0039, iter [02700, 05004], lr: 0.010000, loss: 1.6494 +2023-09-11 09:27:38 - train: epoch 0039, iter [02800, 05004], lr: 0.010000, loss: 1.4831 +2023-09-11 09:28:13 - train: epoch 0039, iter [02900, 05004], lr: 0.010000, loss: 1.3770 +2023-09-11 09:28:50 - train: epoch 0039, iter [03000, 05004], lr: 0.010000, loss: 1.5793 +2023-09-11 09:29:26 - train: epoch 0039, iter [03100, 05004], lr: 0.010000, loss: 1.5200 +2023-09-11 09:30:01 - train: epoch 0039, iter [03200, 05004], lr: 0.010000, loss: 1.7089 +2023-09-11 09:30:38 - train: epoch 0039, iter [03300, 05004], lr: 0.010000, loss: 1.6494 +2023-09-11 09:31:14 - train: epoch 0039, iter [03400, 05004], lr: 0.010000, loss: 1.5130 +2023-09-11 09:31:51 - train: epoch 0039, iter [03500, 05004], lr: 0.010000, loss: 1.8309 +2023-09-11 09:32:25 - train: epoch 0039, iter [03600, 05004], lr: 0.010000, loss: 1.7132 +2023-09-11 09:33:00 - train: epoch 0039, iter [03700, 05004], lr: 0.010000, loss: 1.5058 +2023-09-11 09:33:39 - train: epoch 0039, iter [03800, 05004], lr: 0.010000, loss: 1.5652 +2023-09-11 09:34:14 - train: epoch 0039, iter [03900, 05004], lr: 0.010000, loss: 1.5177 +2023-09-11 09:34:51 - train: epoch 0039, iter [04000, 05004], lr: 0.010000, loss: 1.4187 +2023-09-11 09:35:27 - train: epoch 0039, iter [04100, 05004], lr: 0.010000, loss: 1.7076 +2023-09-11 09:36:05 - train: epoch 0039, iter [04200, 05004], lr: 0.010000, loss: 1.6852 +2023-09-11 09:36:40 - train: epoch 0039, iter [04300, 05004], lr: 0.010000, loss: 1.3907 +2023-09-11 09:37:16 - train: epoch 0039, iter [04400, 05004], lr: 0.010000, loss: 1.5368 +2023-09-11 09:37:52 - train: epoch 0039, iter [04500, 05004], lr: 0.010000, loss: 1.4152 +2023-09-11 09:38:28 - train: epoch 0039, iter [04600, 05004], lr: 0.010000, loss: 1.6122 +2023-09-11 09:39:03 - train: epoch 0039, iter [04700, 05004], lr: 0.010000, loss: 1.4306 +2023-09-11 09:39:40 - train: epoch 0039, iter [04800, 05004], lr: 0.010000, loss: 1.4856 +2023-09-11 09:40:15 - train: epoch 0039, iter [04900, 05004], lr: 0.010000, loss: 1.5162 +2023-09-11 09:40:49 - train: epoch 0039, iter [05000, 05004], lr: 0.010000, loss: 1.5508 +2023-09-11 09:40:50 - train: epoch 039, train_loss: 1.5651 +2023-09-11 09:42:14 - eval: epoch: 039, acc1: 65.838%, acc5: 87.032%, test_loss: 1.3924, per_image_load_time: 1.364ms, per_image_inference_time: 0.302ms +2023-09-11 09:42:14 - until epoch: 039, best_acc1: 66.222% +2023-09-11 09:42:14 - epoch 040 lr: 0.010000 +2023-09-11 09:42:59 - train: epoch 0040, iter [00100, 05004], lr: 0.010000, loss: 2.0150 +2023-09-11 09:43:35 - train: epoch 0040, iter [00200, 05004], lr: 0.010000, loss: 1.5021 +2023-09-11 09:44:10 - train: epoch 0040, iter [00300, 05004], lr: 0.010000, loss: 1.7042 +2023-09-11 09:44:47 - train: epoch 0040, iter [00400, 05004], lr: 0.010000, loss: 1.4736 +2023-09-11 09:45:22 - train: epoch 0040, iter [00500, 05004], lr: 0.010000, loss: 1.3455 +2023-09-11 09:45:57 - train: epoch 0040, iter [00600, 05004], lr: 0.010000, loss: 1.5174 +2023-09-11 09:46:34 - train: epoch 0040, iter [00700, 05004], lr: 0.010000, loss: 1.4635 +2023-09-11 09:47:10 - train: epoch 0040, iter [00800, 05004], lr: 0.010000, loss: 1.5551 +2023-09-11 09:47:45 - train: epoch 0040, iter [00900, 05004], lr: 0.010000, loss: 1.5838 +2023-09-11 09:48:22 - train: epoch 0040, iter [01000, 05004], lr: 0.010000, loss: 1.4040 +2023-09-11 09:48:57 - train: epoch 0040, iter [01100, 05004], lr: 0.010000, loss: 1.5310 +2023-09-11 09:49:31 - train: epoch 0040, iter [01200, 05004], lr: 0.010000, loss: 1.4640 +2023-09-11 09:50:10 - train: epoch 0040, iter [01300, 05004], lr: 0.010000, loss: 1.4557 +2023-09-11 09:50:44 - train: epoch 0040, iter [01400, 05004], lr: 0.010000, loss: 1.6927 +2023-09-11 09:51:19 - train: epoch 0040, iter [01500, 05004], lr: 0.010000, loss: 1.5197 +2023-09-11 09:51:55 - train: epoch 0040, iter [01600, 05004], lr: 0.010000, loss: 1.7622 +2023-09-11 09:52:32 - train: epoch 0040, iter [01700, 05004], lr: 0.010000, loss: 1.5624 +2023-09-11 09:53:08 - train: epoch 0040, iter [01800, 05004], lr: 0.010000, loss: 1.4153 +2023-09-11 09:53:43 - train: epoch 0040, iter [01900, 05004], lr: 0.010000, loss: 1.5160 +2023-09-11 09:54:20 - train: epoch 0040, iter [02000, 05004], lr: 0.010000, loss: 1.5502 +2023-09-11 09:54:54 - train: epoch 0040, iter [02100, 05004], lr: 0.010000, loss: 1.3435 +2023-09-11 09:55:31 - train: epoch 0040, iter [02200, 05004], lr: 0.010000, loss: 1.4328 +2023-09-11 09:56:07 - train: epoch 0040, iter [02300, 05004], lr: 0.010000, loss: 1.5457 +2023-09-11 09:56:41 - train: epoch 0040, iter [02400, 05004], lr: 0.010000, loss: 1.5600 +2023-09-11 09:57:16 - train: epoch 0040, iter [02500, 05004], lr: 0.010000, loss: 1.7164 +2023-09-11 09:57:52 - train: epoch 0040, iter [02600, 05004], lr: 0.010000, loss: 1.5860 +2023-09-11 09:58:29 - train: epoch 0040, iter [02700, 05004], lr: 0.010000, loss: 1.8240 +2023-09-11 09:59:05 - train: epoch 0040, iter [02800, 05004], lr: 0.010000, loss: 1.5815 +2023-09-11 09:59:40 - train: epoch 0040, iter [02900, 05004], lr: 0.010000, loss: 1.7422 +2023-09-11 10:00:17 - train: epoch 0040, iter [03000, 05004], lr: 0.010000, loss: 1.4809 +2023-09-11 10:00:51 - train: epoch 0040, iter [03100, 05004], lr: 0.010000, loss: 1.5993 +2023-09-11 10:01:28 - train: epoch 0040, iter [03200, 05004], lr: 0.010000, loss: 1.7582 +2023-09-11 10:02:03 - train: epoch 0040, iter [03300, 05004], lr: 0.010000, loss: 1.6063 +2023-09-11 10:02:39 - train: epoch 0040, iter [03400, 05004], lr: 0.010000, loss: 1.6313 +2023-09-11 10:03:15 - train: epoch 0040, iter [03500, 05004], lr: 0.010000, loss: 1.5927 +2023-09-11 10:03:49 - train: epoch 0040, iter [03600, 05004], lr: 0.010000, loss: 1.5781 +2023-09-11 10:04:27 - train: epoch 0040, iter [03700, 05004], lr: 0.010000, loss: 1.4415 +2023-09-11 10:05:03 - train: epoch 0040, iter [03800, 05004], lr: 0.010000, loss: 1.3939 +2023-09-11 10:05:37 - train: epoch 0040, iter [03900, 05004], lr: 0.010000, loss: 1.7001 +2023-09-11 10:06:14 - train: epoch 0040, iter [04000, 05004], lr: 0.010000, loss: 1.5190 +2023-09-11 10:06:50 - train: epoch 0040, iter [04100, 05004], lr: 0.010000, loss: 1.4508 +2023-09-11 10:07:26 - train: epoch 0040, iter [04200, 05004], lr: 0.010000, loss: 1.5570 +2023-09-11 10:08:01 - train: epoch 0040, iter [04300, 05004], lr: 0.010000, loss: 1.4631 +2023-09-11 10:08:38 - train: epoch 0040, iter [04400, 05004], lr: 0.010000, loss: 1.5366 +2023-09-11 10:09:12 - train: epoch 0040, iter [04500, 05004], lr: 0.010000, loss: 1.3845 +2023-09-11 10:09:50 - train: epoch 0040, iter [04600, 05004], lr: 0.010000, loss: 1.5305 +2023-09-11 10:10:25 - train: epoch 0040, iter [04700, 05004], lr: 0.010000, loss: 1.6901 +2023-09-11 10:11:00 - train: epoch 0040, iter [04800, 05004], lr: 0.010000, loss: 1.5704 +2023-09-11 10:11:38 - train: epoch 0040, iter [04900, 05004], lr: 0.010000, loss: 1.5574 +2023-09-11 10:12:11 - train: epoch 0040, iter [05000, 05004], lr: 0.010000, loss: 1.5168 +2023-09-11 10:12:12 - train: epoch 040, train_loss: 1.5630 +2023-09-11 10:13:35 - eval: epoch: 040, acc1: 65.926%, acc5: 86.990%, test_loss: 1.3997, per_image_load_time: 1.354ms, per_image_inference_time: 0.299ms +2023-09-11 10:13:35 - until epoch: 040, best_acc1: 66.222% +2023-09-11 10:13:35 - epoch 041 lr: 0.010000 +2023-09-11 10:14:19 - train: epoch 0041, iter [00100, 05004], lr: 0.010000, loss: 1.6114 +2023-09-11 10:14:55 - train: epoch 0041, iter [00200, 05004], lr: 0.010000, loss: 1.7201 +2023-09-11 10:15:30 - train: epoch 0041, iter [00300, 05004], lr: 0.010000, loss: 1.7579 +2023-09-11 10:16:08 - train: epoch 0041, iter [00400, 05004], lr: 0.010000, loss: 1.5192 +2023-09-11 10:16:43 - train: epoch 0041, iter [00500, 05004], lr: 0.010000, loss: 1.4277 +2023-09-11 10:17:19 - train: epoch 0041, iter [00600, 05004], lr: 0.010000, loss: 1.5240 +2023-09-11 10:17:56 - train: epoch 0041, iter [00700, 05004], lr: 0.010000, loss: 1.5409 +2023-09-11 10:18:32 - train: epoch 0041, iter [00800, 05004], lr: 0.010000, loss: 1.5253 +2023-09-11 10:19:08 - train: epoch 0041, iter [00900, 05004], lr: 0.010000, loss: 1.4609 +2023-09-11 10:19:45 - train: epoch 0041, iter [01000, 05004], lr: 0.010000, loss: 1.8048 +2023-09-11 10:20:20 - train: epoch 0041, iter [01100, 05004], lr: 0.010000, loss: 1.5272 +2023-09-11 10:20:56 - train: epoch 0041, iter [01200, 05004], lr: 0.010000, loss: 1.3856 +2023-09-11 10:21:35 - train: epoch 0041, iter [01300, 05004], lr: 0.010000, loss: 1.3470 +2023-09-11 10:22:11 - train: epoch 0041, iter [01400, 05004], lr: 0.010000, loss: 1.7006 +2023-09-11 10:22:46 - train: epoch 0041, iter [01500, 05004], lr: 0.010000, loss: 1.7412 +2023-09-11 10:23:25 - train: epoch 0041, iter [01600, 05004], lr: 0.010000, loss: 1.6433 +2023-09-11 10:24:00 - train: epoch 0041, iter [01700, 05004], lr: 0.010000, loss: 1.5981 +2023-09-11 10:24:35 - train: epoch 0041, iter [01800, 05004], lr: 0.010000, loss: 1.5641 +2023-09-11 10:25:13 - train: epoch 0041, iter [01900, 05004], lr: 0.010000, loss: 1.6717 +2023-09-11 10:25:49 - train: epoch 0041, iter [02000, 05004], lr: 0.010000, loss: 1.5108 +2023-09-11 10:26:26 - train: epoch 0041, iter [02100, 05004], lr: 0.010000, loss: 1.3939 +2023-09-11 10:27:02 - train: epoch 0041, iter [02200, 05004], lr: 0.010000, loss: 1.4375 +2023-09-11 10:27:36 - train: epoch 0041, iter [02300, 05004], lr: 0.010000, loss: 1.5325 +2023-09-11 10:28:14 - train: epoch 0041, iter [02400, 05004], lr: 0.010000, loss: 1.6348 +2023-09-11 10:28:50 - train: epoch 0041, iter [02500, 05004], lr: 0.010000, loss: 1.5174 +2023-09-11 10:29:26 - train: epoch 0041, iter [02600, 05004], lr: 0.010000, loss: 1.6154 +2023-09-11 10:30:03 - train: epoch 0041, iter [02700, 05004], lr: 0.010000, loss: 1.5953 +2023-09-11 10:30:38 - train: epoch 0041, iter [02800, 05004], lr: 0.010000, loss: 1.5725 +2023-09-11 10:31:12 - train: epoch 0041, iter [02900, 05004], lr: 0.010000, loss: 1.5323 +2023-09-11 10:31:50 - train: epoch 0041, iter [03000, 05004], lr: 0.010000, loss: 1.5742 +2023-09-11 10:32:28 - train: epoch 0041, iter [03100, 05004], lr: 0.010000, loss: 1.6995 +2023-09-11 10:33:03 - train: epoch 0041, iter [03200, 05004], lr: 0.010000, loss: 1.4398 +2023-09-11 10:33:38 - train: epoch 0041, iter [03300, 05004], lr: 0.010000, loss: 1.5887 +2023-09-11 10:34:15 - train: epoch 0041, iter [03400, 05004], lr: 0.010000, loss: 1.4803 +2023-09-11 10:34:50 - train: epoch 0041, iter [03500, 05004], lr: 0.010000, loss: 1.4335 +2023-09-11 10:35:26 - train: epoch 0041, iter [03600, 05004], lr: 0.010000, loss: 1.6404 +2023-09-11 10:36:02 - train: epoch 0041, iter [03700, 05004], lr: 0.010000, loss: 1.5419 +2023-09-11 10:36:37 - train: epoch 0041, iter [03800, 05004], lr: 0.010000, loss: 1.7177 +2023-09-11 10:37:16 - train: epoch 0041, iter [03900, 05004], lr: 0.010000, loss: 1.5538 +2023-09-11 10:37:52 - train: epoch 0041, iter [04000, 05004], lr: 0.010000, loss: 1.4486 +2023-09-11 10:38:28 - train: epoch 0041, iter [04100, 05004], lr: 0.010000, loss: 1.6934 +2023-09-11 10:39:06 - train: epoch 0041, iter [04200, 05004], lr: 0.010000, loss: 1.2352 +2023-09-11 10:39:41 - train: epoch 0041, iter [04300, 05004], lr: 0.010000, loss: 1.4797 +2023-09-11 10:40:19 - train: epoch 0041, iter [04400, 05004], lr: 0.010000, loss: 1.6141 +2023-09-11 10:40:53 - train: epoch 0041, iter [04500, 05004], lr: 0.010000, loss: 1.7709 +2023-09-11 10:41:31 - train: epoch 0041, iter [04600, 05004], lr: 0.010000, loss: 1.7875 +2023-09-11 10:42:09 - train: epoch 0041, iter [04700, 05004], lr: 0.010000, loss: 1.5842 +2023-09-11 10:42:43 - train: epoch 0041, iter [04800, 05004], lr: 0.010000, loss: 1.6274 +2023-09-11 10:43:19 - train: epoch 0041, iter [04900, 05004], lr: 0.010000, loss: 1.7359 +2023-09-11 10:43:52 - train: epoch 0041, iter [05000, 05004], lr: 0.010000, loss: 1.6601 +2023-09-11 10:43:53 - train: epoch 041, train_loss: 1.5639 +2023-09-11 10:45:17 - eval: epoch: 041, acc1: 65.866%, acc5: 86.830%, test_loss: 1.3999, per_image_load_time: 1.344ms, per_image_inference_time: 0.308ms +2023-09-11 10:45:17 - until epoch: 041, best_acc1: 66.222% +2023-09-11 10:45:17 - epoch 042 lr: 0.010000 +2023-09-11 10:46:01 - train: epoch 0042, iter [00100, 05004], lr: 0.010000, loss: 1.2834 +2023-09-11 10:46:37 - train: epoch 0042, iter [00200, 05004], lr: 0.010000, loss: 1.6500 +2023-09-11 10:47:11 - train: epoch 0042, iter [00300, 05004], lr: 0.010000, loss: 1.7074 +2023-09-11 10:47:49 - train: epoch 0042, iter [00400, 05004], lr: 0.010000, loss: 1.5475 +2023-09-11 10:48:25 - train: epoch 0042, iter [00500, 05004], lr: 0.010000, loss: 1.3276 +2023-09-11 10:49:01 - train: epoch 0042, iter [00600, 05004], lr: 0.010000, loss: 1.5808 +2023-09-11 10:49:37 - train: epoch 0042, iter [00700, 05004], lr: 0.010000, loss: 1.6534 +2023-09-11 10:50:14 - train: epoch 0042, iter [00800, 05004], lr: 0.010000, loss: 1.4812 +2023-09-11 10:50:49 - train: epoch 0042, iter [00900, 05004], lr: 0.010000, loss: 1.7235 +2023-09-11 10:51:27 - train: epoch 0042, iter [01000, 05004], lr: 0.010000, loss: 1.8725 +2023-09-11 10:52:03 - train: epoch 0042, iter [01100, 05004], lr: 0.010000, loss: 1.4064 +2023-09-11 10:52:39 - train: epoch 0042, iter [01200, 05004], lr: 0.010000, loss: 1.4304 +2023-09-11 10:53:16 - train: epoch 0042, iter [01300, 05004], lr: 0.010000, loss: 1.5126 +2023-09-11 10:53:52 - train: epoch 0042, iter [01400, 05004], lr: 0.010000, loss: 1.6495 +2023-09-11 10:54:28 - train: epoch 0042, iter [01500, 05004], lr: 0.010000, loss: 1.4354 +2023-09-11 10:55:06 - train: epoch 0042, iter [01600, 05004], lr: 0.010000, loss: 1.6589 +2023-09-11 10:55:41 - train: epoch 0042, iter [01700, 05004], lr: 0.010000, loss: 1.5150 +2023-09-11 10:56:16 - train: epoch 0042, iter [01800, 05004], lr: 0.010000, loss: 1.3850 +2023-09-11 10:56:53 - train: epoch 0042, iter [01900, 05004], lr: 0.010000, loss: 1.7773 +2023-09-11 10:57:29 - train: epoch 0042, iter [02000, 05004], lr: 0.010000, loss: 1.4935 +2023-09-11 10:58:04 - train: epoch 0042, iter [02100, 05004], lr: 0.010000, loss: 1.3047 +2023-09-11 10:58:42 - train: epoch 0042, iter [02200, 05004], lr: 0.010000, loss: 1.4645 +2023-09-11 10:59:18 - train: epoch 0042, iter [02300, 05004], lr: 0.010000, loss: 1.5716 +2023-09-11 10:59:54 - train: epoch 0042, iter [02400, 05004], lr: 0.010000, loss: 1.4613 +2023-09-11 11:00:32 - train: epoch 0042, iter [02500, 05004], lr: 0.010000, loss: 1.6783 +2023-09-11 11:01:08 - train: epoch 0042, iter [02600, 05004], lr: 0.010000, loss: 1.6597 +2023-09-11 11:01:44 - train: epoch 0042, iter [02700, 05004], lr: 0.010000, loss: 1.4374 +2023-09-11 11:02:21 - train: epoch 0042, iter [02800, 05004], lr: 0.010000, loss: 1.4478 +2023-09-11 11:02:57 - train: epoch 0042, iter [02900, 05004], lr: 0.010000, loss: 1.5649 +2023-09-11 11:03:33 - train: epoch 0042, iter [03000, 05004], lr: 0.010000, loss: 1.4961 +2023-09-11 11:04:11 - train: epoch 0042, iter [03100, 05004], lr: 0.010000, loss: 1.4939 +2023-09-11 11:04:46 - train: epoch 0042, iter [03200, 05004], lr: 0.010000, loss: 1.7619 +2023-09-11 11:05:21 - train: epoch 0042, iter [03300, 05004], lr: 0.010000, loss: 1.6582 +2023-09-11 11:06:00 - train: epoch 0042, iter [03400, 05004], lr: 0.010000, loss: 1.3826 +2023-09-11 11:06:35 - train: epoch 0042, iter [03500, 05004], lr: 0.010000, loss: 1.7253 +2023-09-11 11:07:10 - train: epoch 0042, iter [03600, 05004], lr: 0.010000, loss: 1.8312 +2023-09-11 11:07:48 - train: epoch 0042, iter [03700, 05004], lr: 0.010000, loss: 1.7268 +2023-09-11 11:08:23 - train: epoch 0042, iter [03800, 05004], lr: 0.010000, loss: 1.2329 +2023-09-11 11:08:59 - train: epoch 0042, iter [03900, 05004], lr: 0.010000, loss: 1.5243 +2023-09-11 11:09:38 - train: epoch 0042, iter [04000, 05004], lr: 0.010000, loss: 1.3577 +2023-09-11 11:10:12 - train: epoch 0042, iter [04100, 05004], lr: 0.010000, loss: 1.6246 +2023-09-11 11:10:49 - train: epoch 0042, iter [04200, 05004], lr: 0.010000, loss: 1.6025 +2023-09-11 11:11:25 - train: epoch 0042, iter [04300, 05004], lr: 0.010000, loss: 1.3477 +2023-09-11 11:12:03 - train: epoch 0042, iter [04400, 05004], lr: 0.010000, loss: 1.5888 +2023-09-11 11:12:38 - train: epoch 0042, iter [04500, 05004], lr: 0.010000, loss: 1.5931 +2023-09-11 11:13:16 - train: epoch 0042, iter [04600, 05004], lr: 0.010000, loss: 1.5330 +2023-09-11 11:13:53 - train: epoch 0042, iter [04700, 05004], lr: 0.010000, loss: 1.4021 +2023-09-11 11:14:29 - train: epoch 0042, iter [04800, 05004], lr: 0.010000, loss: 1.5207 +2023-09-11 11:15:07 - train: epoch 0042, iter [04900, 05004], lr: 0.010000, loss: 1.3931 +2023-09-11 11:15:40 - train: epoch 0042, iter [05000, 05004], lr: 0.010000, loss: 1.6109 +2023-09-11 11:15:41 - train: epoch 042, train_loss: 1.5605 +2023-09-11 11:17:08 - eval: epoch: 042, acc1: 65.920%, acc5: 86.922%, test_loss: 1.3990, per_image_load_time: 1.390ms, per_image_inference_time: 0.324ms +2023-09-11 11:17:08 - until epoch: 042, best_acc1: 66.222% +2023-09-11 11:17:08 - epoch 043 lr: 0.010000 +2023-09-11 11:17:52 - train: epoch 0043, iter [00100, 05004], lr: 0.010000, loss: 1.5102 +2023-09-11 11:18:30 - train: epoch 0043, iter [00200, 05004], lr: 0.010000, loss: 1.5947 +2023-09-11 11:19:06 - train: epoch 0043, iter [00300, 05004], lr: 0.010000, loss: 1.3953 +2023-09-11 11:19:43 - train: epoch 0043, iter [00400, 05004], lr: 0.010000, loss: 1.4529 +2023-09-11 11:20:21 - train: epoch 0043, iter [00500, 05004], lr: 0.010000, loss: 1.3996 +2023-09-11 11:20:57 - train: epoch 0043, iter [00600, 05004], lr: 0.010000, loss: 1.6388 +2023-09-11 11:21:33 - train: epoch 0043, iter [00700, 05004], lr: 0.010000, loss: 1.6062 +2023-09-11 11:22:11 - train: epoch 0043, iter [00800, 05004], lr: 0.010000, loss: 1.5342 +2023-09-11 11:22:47 - train: epoch 0043, iter [00900, 05004], lr: 0.010000, loss: 1.4977 +2023-09-11 11:23:22 - train: epoch 0043, iter [01000, 05004], lr: 0.010000, loss: 1.7952 +2023-09-11 11:24:00 - train: epoch 0043, iter [01100, 05004], lr: 0.010000, loss: 1.6477 +2023-09-11 11:24:37 - train: epoch 0043, iter [01200, 05004], lr: 0.010000, loss: 1.4612 +2023-09-11 11:25:13 - train: epoch 0043, iter [01300, 05004], lr: 0.010000, loss: 1.7810 +2023-09-11 11:25:49 - train: epoch 0043, iter [01400, 05004], lr: 0.010000, loss: 1.5869 +2023-09-11 11:26:26 - train: epoch 0043, iter [01500, 05004], lr: 0.010000, loss: 1.3123 +2023-09-11 11:27:01 - train: epoch 0043, iter [01600, 05004], lr: 0.010000, loss: 1.4707 +2023-09-11 11:27:39 - train: epoch 0043, iter [01700, 05004], lr: 0.010000, loss: 1.6666 +2023-09-11 11:28:13 - train: epoch 0043, iter [01800, 05004], lr: 0.010000, loss: 1.7168 +2023-09-11 11:28:46 - train: epoch 0043, iter [01900, 05004], lr: 0.010000, loss: 1.6535 +2023-09-11 11:29:24 - train: epoch 0043, iter [02000, 05004], lr: 0.010000, loss: 1.6363 +2023-09-11 11:29:58 - train: epoch 0043, iter [02100, 05004], lr: 0.010000, loss: 1.4763 +2023-09-11 11:30:32 - train: epoch 0043, iter [02200, 05004], lr: 0.010000, loss: 1.5996 +2023-09-11 11:31:07 - train: epoch 0043, iter [02300, 05004], lr: 0.010000, loss: 1.8675 +2023-09-11 11:31:45 - train: epoch 0043, iter [02400, 05004], lr: 0.010000, loss: 1.3605 +2023-09-11 11:32:21 - train: epoch 0043, iter [02500, 05004], lr: 0.010000, loss: 1.6730 +2023-09-11 11:32:58 - train: epoch 0043, iter [02600, 05004], lr: 0.010000, loss: 1.4318 +2023-09-11 11:33:36 - train: epoch 0043, iter [02700, 05004], lr: 0.010000, loss: 1.6460 +2023-09-11 11:34:10 - train: epoch 0043, iter [02800, 05004], lr: 0.010000, loss: 1.4215 +2023-09-11 11:34:48 - train: epoch 0043, iter [02900, 05004], lr: 0.010000, loss: 1.5385 +2023-09-11 11:35:24 - train: epoch 0043, iter [03000, 05004], lr: 0.010000, loss: 1.8711 +2023-09-11 11:35:58 - train: epoch 0043, iter [03100, 05004], lr: 0.010000, loss: 1.8229 +2023-09-11 11:36:36 - train: epoch 0043, iter [03200, 05004], lr: 0.010000, loss: 1.5682 +2023-09-11 11:37:13 - train: epoch 0043, iter [03300, 05004], lr: 0.010000, loss: 1.6132 +2023-09-11 11:37:46 - train: epoch 0043, iter [03400, 05004], lr: 0.010000, loss: 1.6347 +2023-09-11 11:38:24 - train: epoch 0043, iter [03500, 05004], lr: 0.010000, loss: 1.7414 +2023-09-11 11:39:01 - train: epoch 0043, iter [03600, 05004], lr: 0.010000, loss: 1.6940 +2023-09-11 11:39:36 - train: epoch 0043, iter [03700, 05004], lr: 0.010000, loss: 1.5295 +2023-09-11 11:40:13 - train: epoch 0043, iter [03800, 05004], lr: 0.010000, loss: 1.7606 +2023-09-11 11:40:49 - train: epoch 0043, iter [03900, 05004], lr: 0.010000, loss: 1.5236 +2023-09-11 11:41:24 - train: epoch 0043, iter [04000, 05004], lr: 0.010000, loss: 1.5763 +2023-09-11 11:42:02 - train: epoch 0043, iter [04100, 05004], lr: 0.010000, loss: 1.7440 +2023-09-11 11:42:38 - train: epoch 0043, iter [04200, 05004], lr: 0.010000, loss: 1.5988 +2023-09-11 11:43:14 - train: epoch 0043, iter [04300, 05004], lr: 0.010000, loss: 1.5053 +2023-09-11 11:43:52 - train: epoch 0043, iter [04400, 05004], lr: 0.010000, loss: 1.6048 +2023-09-11 11:44:27 - train: epoch 0043, iter [04500, 05004], lr: 0.010000, loss: 1.6520 +2023-09-11 11:45:02 - train: epoch 0043, iter [04600, 05004], lr: 0.010000, loss: 1.5923 +2023-09-11 11:45:41 - train: epoch 0043, iter [04700, 05004], lr: 0.010000, loss: 1.6029 +2023-09-11 11:46:16 - train: epoch 0043, iter [04800, 05004], lr: 0.010000, loss: 1.4870 +2023-09-11 11:46:51 - train: epoch 0043, iter [04900, 05004], lr: 0.010000, loss: 1.5264 +2023-09-11 11:47:25 - train: epoch 0043, iter [05000, 05004], lr: 0.010000, loss: 1.4849 +2023-09-11 11:47:26 - train: epoch 043, train_loss: 1.5592 +2023-09-11 11:48:50 - eval: epoch: 043, acc1: 65.580%, acc5: 86.846%, test_loss: 1.4100, per_image_load_time: 1.362ms, per_image_inference_time: 0.289ms +2023-09-11 11:48:50 - until epoch: 043, best_acc1: 66.222% +2023-09-11 11:48:50 - epoch 044 lr: 0.010000 +2023-09-11 11:49:36 - train: epoch 0044, iter [00100, 05004], lr: 0.010000, loss: 1.4206 +2023-09-11 11:50:12 - train: epoch 0044, iter [00200, 05004], lr: 0.010000, loss: 1.5058 +2023-09-11 11:50:47 - train: epoch 0044, iter [00300, 05004], lr: 0.010000, loss: 1.4373 +2023-09-11 11:51:25 - train: epoch 0044, iter [00400, 05004], lr: 0.010000, loss: 1.3399 +2023-09-11 11:52:00 - train: epoch 0044, iter [00500, 05004], lr: 0.010000, loss: 1.5578 +2023-09-11 11:52:38 - train: epoch 0044, iter [00600, 05004], lr: 0.010000, loss: 1.2355 +2023-09-11 11:53:14 - train: epoch 0044, iter [00700, 05004], lr: 0.010000, loss: 1.4762 +2023-09-11 11:53:50 - train: epoch 0044, iter [00800, 05004], lr: 0.010000, loss: 1.7001 +2023-09-11 11:54:27 - train: epoch 0044, iter [00900, 05004], lr: 0.010000, loss: 1.5788 +2023-09-11 11:55:05 - train: epoch 0044, iter [01000, 05004], lr: 0.010000, loss: 1.4596 +2023-09-11 11:55:40 - train: epoch 0044, iter [01100, 05004], lr: 0.010000, loss: 1.5061 +2023-09-11 11:56:18 - train: epoch 0044, iter [01200, 05004], lr: 0.010000, loss: 1.5142 +2023-09-11 11:56:54 - train: epoch 0044, iter [01300, 05004], lr: 0.010000, loss: 1.6898 +2023-09-11 11:57:30 - train: epoch 0044, iter [01400, 05004], lr: 0.010000, loss: 1.4828 +2023-09-11 11:58:08 - train: epoch 0044, iter [01500, 05004], lr: 0.010000, loss: 1.5968 +2023-09-11 11:58:45 - train: epoch 0044, iter [01600, 05004], lr: 0.010000, loss: 1.5405 +2023-09-11 11:59:20 - train: epoch 0044, iter [01700, 05004], lr: 0.010000, loss: 1.5340 +2023-09-11 11:59:59 - train: epoch 0044, iter [01800, 05004], lr: 0.010000, loss: 1.5079 +2023-09-11 12:00:35 - train: epoch 0044, iter [01900, 05004], lr: 0.010000, loss: 1.6737 +2023-09-11 12:01:10 - train: epoch 0044, iter [02000, 05004], lr: 0.010000, loss: 1.4527 +2023-09-11 12:01:49 - train: epoch 0044, iter [02100, 05004], lr: 0.010000, loss: 1.7963 +2023-09-11 12:02:23 - train: epoch 0044, iter [02200, 05004], lr: 0.010000, loss: 1.6987 +2023-09-11 12:03:00 - train: epoch 0044, iter [02300, 05004], lr: 0.010000, loss: 1.6442 +2023-09-11 12:03:37 - train: epoch 0044, iter [02400, 05004], lr: 0.010000, loss: 1.6000 +2023-09-11 12:04:12 - train: epoch 0044, iter [02500, 05004], lr: 0.010000, loss: 1.5826 +2023-09-11 12:04:49 - train: epoch 0044, iter [02600, 05004], lr: 0.010000, loss: 1.4374 +2023-09-11 12:05:25 - train: epoch 0044, iter [02700, 05004], lr: 0.010000, loss: 1.6837 +2023-09-11 12:06:02 - train: epoch 0044, iter [02800, 05004], lr: 0.010000, loss: 1.2629 +2023-09-11 12:06:39 - train: epoch 0044, iter [02900, 05004], lr: 0.010000, loss: 1.3838 +2023-09-11 12:07:16 - train: epoch 0044, iter [03000, 05004], lr: 0.010000, loss: 1.2799 +2023-09-11 12:07:53 - train: epoch 0044, iter [03100, 05004], lr: 0.010000, loss: 1.7095 +2023-09-11 12:08:29 - train: epoch 0044, iter [03200, 05004], lr: 0.010000, loss: 1.5162 +2023-09-11 12:09:05 - train: epoch 0044, iter [03300, 05004], lr: 0.010000, loss: 1.5831 +2023-09-11 12:09:43 - train: epoch 0044, iter [03400, 05004], lr: 0.010000, loss: 1.7314 +2023-09-11 12:10:19 - train: epoch 0044, iter [03500, 05004], lr: 0.010000, loss: 1.7953 +2023-09-11 12:10:56 - train: epoch 0044, iter [03600, 05004], lr: 0.010000, loss: 1.7641 +2023-09-11 12:11:32 - train: epoch 0044, iter [03700, 05004], lr: 0.010000, loss: 1.5346 +2023-09-11 12:12:09 - train: epoch 0044, iter [03800, 05004], lr: 0.010000, loss: 1.3388 +2023-09-11 12:12:46 - train: epoch 0044, iter [03900, 05004], lr: 0.010000, loss: 1.4433 +2023-09-11 12:13:21 - train: epoch 0044, iter [04000, 05004], lr: 0.010000, loss: 1.6274 +2023-09-11 12:13:58 - train: epoch 0044, iter [04100, 05004], lr: 0.010000, loss: 1.3489 +2023-09-11 12:14:35 - train: epoch 0044, iter [04200, 05004], lr: 0.010000, loss: 1.7213 +2023-09-11 12:15:10 - train: epoch 0044, iter [04300, 05004], lr: 0.010000, loss: 1.6393 +2023-09-11 12:15:48 - train: epoch 0044, iter [04400, 05004], lr: 0.010000, loss: 1.5777 +2023-09-11 12:16:25 - train: epoch 0044, iter [04500, 05004], lr: 0.010000, loss: 1.6732 +2023-09-11 12:17:00 - train: epoch 0044, iter [04600, 05004], lr: 0.010000, loss: 1.4349 +2023-09-11 12:17:37 - train: epoch 0044, iter [04700, 05004], lr: 0.010000, loss: 1.8576 +2023-09-11 12:18:14 - train: epoch 0044, iter [04800, 05004], lr: 0.010000, loss: 1.6901 +2023-09-11 12:18:48 - train: epoch 0044, iter [04900, 05004], lr: 0.010000, loss: 1.4904 +2023-09-11 12:19:23 - train: epoch 0044, iter [05000, 05004], lr: 0.010000, loss: 1.5735 +2023-09-11 12:19:23 - train: epoch 044, train_loss: 1.5601 +2023-09-11 12:20:48 - eval: epoch: 044, acc1: 65.792%, acc5: 86.842%, test_loss: 1.4075, per_image_load_time: 1.407ms, per_image_inference_time: 0.272ms +2023-09-11 12:20:48 - until epoch: 044, best_acc1: 66.222% +2023-09-11 12:20:48 - epoch 045 lr: 0.010000 +2023-09-11 12:21:34 - train: epoch 0045, iter [00100, 05004], lr: 0.010000, loss: 1.5760 +2023-09-11 12:22:09 - train: epoch 0045, iter [00200, 05004], lr: 0.010000, loss: 1.3808 +2023-09-11 12:22:45 - train: epoch 0045, iter [00300, 05004], lr: 0.010000, loss: 1.5800 +2023-09-11 12:23:22 - train: epoch 0045, iter [00400, 05004], lr: 0.010000, loss: 1.5131 +2023-09-11 12:23:57 - train: epoch 0045, iter [00500, 05004], lr: 0.010000, loss: 1.6090 +2023-09-11 12:24:33 - train: epoch 0045, iter [00600, 05004], lr: 0.010000, loss: 1.7942 +2023-09-11 12:25:11 - train: epoch 0045, iter [00700, 05004], lr: 0.010000, loss: 1.3857 +2023-09-11 12:25:49 - train: epoch 0045, iter [00800, 05004], lr: 0.010000, loss: 1.3668 +2023-09-11 12:26:26 - train: epoch 0045, iter [00900, 05004], lr: 0.010000, loss: 1.5217 +2023-09-11 12:27:01 - train: epoch 0045, iter [01000, 05004], lr: 0.010000, loss: 1.5893 +2023-09-11 12:27:40 - train: epoch 0045, iter [01100, 05004], lr: 0.010000, loss: 1.6223 +2023-09-11 12:28:16 - train: epoch 0045, iter [01200, 05004], lr: 0.010000, loss: 1.5036 +2023-09-11 12:28:52 - train: epoch 0045, iter [01300, 05004], lr: 0.010000, loss: 1.5784 +2023-09-11 12:29:30 - train: epoch 0045, iter [01400, 05004], lr: 0.010000, loss: 1.6772 +2023-09-11 12:30:05 - train: epoch 0045, iter [01500, 05004], lr: 0.010000, loss: 1.6617 +2023-09-11 12:30:41 - train: epoch 0045, iter [01600, 05004], lr: 0.010000, loss: 1.3287 +2023-09-11 12:31:20 - train: epoch 0045, iter [01700, 05004], lr: 0.010000, loss: 1.5133 +2023-09-11 12:31:56 - train: epoch 0045, iter [01800, 05004], lr: 0.010000, loss: 1.4463 +2023-09-11 12:32:31 - train: epoch 0045, iter [01900, 05004], lr: 0.010000, loss: 1.6029 +2023-09-11 12:33:10 - train: epoch 0045, iter [02000, 05004], lr: 0.010000, loss: 1.7346 +2023-09-11 12:33:45 - train: epoch 0045, iter [02100, 05004], lr: 0.010000, loss: 1.6589 +2023-09-11 12:34:22 - train: epoch 0045, iter [02200, 05004], lr: 0.010000, loss: 1.7575 +2023-09-11 12:34:58 - train: epoch 0045, iter [02300, 05004], lr: 0.010000, loss: 1.5991 +2023-09-11 12:35:35 - train: epoch 0045, iter [02400, 05004], lr: 0.010000, loss: 1.6832 +2023-09-11 12:36:11 - train: epoch 0045, iter [02500, 05004], lr: 0.010000, loss: 1.5112 +2023-09-11 12:36:47 - train: epoch 0045, iter [02600, 05004], lr: 0.010000, loss: 1.5685 +2023-09-11 12:37:25 - train: epoch 0045, iter [02700, 05004], lr: 0.010000, loss: 1.5211 +2023-09-11 12:38:01 - train: epoch 0045, iter [02800, 05004], lr: 0.010000, loss: 1.3624 +2023-09-11 12:38:36 - train: epoch 0045, iter [02900, 05004], lr: 0.010000, loss: 1.7795 +2023-09-11 12:39:13 - train: epoch 0045, iter [03000, 05004], lr: 0.010000, loss: 1.6020 +2023-09-11 12:39:51 - train: epoch 0045, iter [03100, 05004], lr: 0.010000, loss: 1.6181 +2023-09-11 12:40:26 - train: epoch 0045, iter [03200, 05004], lr: 0.010000, loss: 1.6633 +2023-09-11 12:41:02 - train: epoch 0045, iter [03300, 05004], lr: 0.010000, loss: 1.5402 +2023-09-11 12:41:39 - train: epoch 0045, iter [03400, 05004], lr: 0.010000, loss: 1.3757 +2023-09-11 12:42:15 - train: epoch 0045, iter [03500, 05004], lr: 0.010000, loss: 1.4677 +2023-09-11 12:42:51 - train: epoch 0045, iter [03600, 05004], lr: 0.010000, loss: 1.7086 +2023-09-11 12:43:28 - train: epoch 0045, iter [03700, 05004], lr: 0.010000, loss: 1.5570 +2023-09-11 12:44:04 - train: epoch 0045, iter [03800, 05004], lr: 0.010000, loss: 1.5608 +2023-09-11 12:44:40 - train: epoch 0045, iter [03900, 05004], lr: 0.010000, loss: 1.6194 +2023-09-11 12:45:15 - train: epoch 0045, iter [04000, 05004], lr: 0.010000, loss: 1.5289 +2023-09-11 12:45:53 - train: epoch 0045, iter [04100, 05004], lr: 0.010000, loss: 1.3966 +2023-09-11 12:46:28 - train: epoch 0045, iter [04200, 05004], lr: 0.010000, loss: 1.6958 +2023-09-11 12:47:05 - train: epoch 0045, iter [04300, 05004], lr: 0.010000, loss: 1.6475 +2023-09-11 12:47:42 - train: epoch 0045, iter [04400, 05004], lr: 0.010000, loss: 1.7648 +2023-09-11 12:48:17 - train: epoch 0045, iter [04500, 05004], lr: 0.010000, loss: 1.4854 +2023-09-11 12:48:55 - train: epoch 0045, iter [04600, 05004], lr: 0.010000, loss: 1.7094 +2023-09-11 12:49:31 - train: epoch 0045, iter [04700, 05004], lr: 0.010000, loss: 1.6280 +2023-09-11 12:50:07 - train: epoch 0045, iter [04800, 05004], lr: 0.010000, loss: 1.5288 +2023-09-11 12:50:45 - train: epoch 0045, iter [04900, 05004], lr: 0.010000, loss: 1.6917 +2023-09-11 12:51:19 - train: epoch 0045, iter [05000, 05004], lr: 0.010000, loss: 1.5889 +2023-09-11 12:51:20 - train: epoch 045, train_loss: 1.5622 +2023-09-11 12:52:43 - eval: epoch: 045, acc1: 65.070%, acc5: 86.536%, test_loss: 1.4334, per_image_load_time: 1.350ms, per_image_inference_time: 0.296ms +2023-09-11 12:52:43 - until epoch: 045, best_acc1: 66.222% +2023-09-11 12:52:43 - epoch 046 lr: 0.010000 +2023-09-11 12:53:29 - train: epoch 0046, iter [00100, 05004], lr: 0.010000, loss: 1.5963 +2023-09-11 12:54:04 - train: epoch 0046, iter [00200, 05004], lr: 0.010000, loss: 1.3919 +2023-09-11 12:54:39 - train: epoch 0046, iter [00300, 05004], lr: 0.010000, loss: 1.5020 +2023-09-11 12:55:16 - train: epoch 0046, iter [00400, 05004], lr: 0.010000, loss: 1.4653 +2023-09-11 12:55:54 - train: epoch 0046, iter [00500, 05004], lr: 0.010000, loss: 1.6138 +2023-09-11 12:56:29 - train: epoch 0046, iter [00600, 05004], lr: 0.010000, loss: 1.4805 +2023-09-11 12:57:04 - train: epoch 0046, iter [00700, 05004], lr: 0.010000, loss: 1.4674 +2023-09-11 12:57:42 - train: epoch 0046, iter [00800, 05004], lr: 0.010000, loss: 1.5382 +2023-09-11 12:58:17 - train: epoch 0046, iter [00900, 05004], lr: 0.010000, loss: 1.5149 +2023-09-11 12:58:52 - train: epoch 0046, iter [01000, 05004], lr: 0.010000, loss: 1.6501 +2023-09-11 12:59:29 - train: epoch 0046, iter [01100, 05004], lr: 0.010000, loss: 1.3315 +2023-09-11 13:00:06 - train: epoch 0046, iter [01200, 05004], lr: 0.010000, loss: 1.3847 +2023-09-11 13:00:42 - train: epoch 0046, iter [01300, 05004], lr: 0.010000, loss: 1.7368 +2023-09-11 13:01:18 - train: epoch 0046, iter [01400, 05004], lr: 0.010000, loss: 1.6379 +2023-09-11 13:01:53 - train: epoch 0046, iter [01500, 05004], lr: 0.010000, loss: 1.4974 +2023-09-11 13:02:31 - train: epoch 0046, iter [01600, 05004], lr: 0.010000, loss: 1.7924 +2023-09-11 13:03:07 - train: epoch 0046, iter [01700, 05004], lr: 0.010000, loss: 1.7776 +2023-09-11 13:03:42 - train: epoch 0046, iter [01800, 05004], lr: 0.010000, loss: 1.6761 +2023-09-11 13:04:20 - train: epoch 0046, iter [01900, 05004], lr: 0.010000, loss: 1.4708 +2023-09-11 13:04:55 - train: epoch 0046, iter [02000, 05004], lr: 0.010000, loss: 1.4365 +2023-09-11 13:05:30 - train: epoch 0046, iter [02100, 05004], lr: 0.010000, loss: 1.6138 +2023-09-11 13:06:08 - train: epoch 0046, iter [02200, 05004], lr: 0.010000, loss: 1.3515 +2023-09-11 13:06:44 - train: epoch 0046, iter [02300, 05004], lr: 0.010000, loss: 1.6707 +2023-09-11 13:07:19 - train: epoch 0046, iter [02400, 05004], lr: 0.010000, loss: 1.5351 +2023-09-11 13:07:56 - train: epoch 0046, iter [02500, 05004], lr: 0.010000, loss: 1.4145 +2023-09-11 13:08:32 - train: epoch 0046, iter [02600, 05004], lr: 0.010000, loss: 1.6944 +2023-09-11 13:09:08 - train: epoch 0046, iter [02700, 05004], lr: 0.010000, loss: 1.4640 +2023-09-11 13:09:45 - train: epoch 0046, iter [02800, 05004], lr: 0.010000, loss: 1.4892 +2023-09-11 13:10:22 - train: epoch 0046, iter [02900, 05004], lr: 0.010000, loss: 1.6901 +2023-09-11 13:10:56 - train: epoch 0046, iter [03000, 05004], lr: 0.010000, loss: 1.2724 +2023-09-11 13:11:34 - train: epoch 0046, iter [03100, 05004], lr: 0.010000, loss: 1.2962 +2023-09-11 13:12:10 - train: epoch 0046, iter [03200, 05004], lr: 0.010000, loss: 1.5506 +2023-09-11 13:12:46 - train: epoch 0046, iter [03300, 05004], lr: 0.010000, loss: 1.7490 +2023-09-11 13:13:22 - train: epoch 0046, iter [03400, 05004], lr: 0.010000, loss: 1.6067 +2023-09-11 13:14:00 - train: epoch 0046, iter [03500, 05004], lr: 0.010000, loss: 1.6395 +2023-09-11 13:14:35 - train: epoch 0046, iter [03600, 05004], lr: 0.010000, loss: 1.6714 +2023-09-11 13:15:12 - train: epoch 0046, iter [03700, 05004], lr: 0.010000, loss: 1.4439 +2023-09-11 13:15:48 - train: epoch 0046, iter [03800, 05004], lr: 0.010000, loss: 1.5563 +2023-09-11 13:16:25 - train: epoch 0046, iter [03900, 05004], lr: 0.010000, loss: 1.6512 +2023-09-11 13:17:01 - train: epoch 0046, iter [04000, 05004], lr: 0.010000, loss: 1.4839 +2023-09-11 13:17:38 - train: epoch 0046, iter [04100, 05004], lr: 0.010000, loss: 1.7036 +2023-09-11 13:18:13 - train: epoch 0046, iter [04200, 05004], lr: 0.010000, loss: 1.5702 +2023-09-11 13:18:50 - train: epoch 0046, iter [04300, 05004], lr: 0.010000, loss: 1.6576 +2023-09-11 13:19:26 - train: epoch 0046, iter [04400, 05004], lr: 0.010000, loss: 1.6930 +2023-09-11 13:20:03 - train: epoch 0046, iter [04500, 05004], lr: 0.010000, loss: 1.5551 +2023-09-11 13:20:40 - train: epoch 0046, iter [04600, 05004], lr: 0.010000, loss: 1.6081 +2023-09-11 13:21:16 - train: epoch 0046, iter [04700, 05004], lr: 0.010000, loss: 1.4719 +2023-09-11 13:21:52 - train: epoch 0046, iter [04800, 05004], lr: 0.010000, loss: 1.4173 +2023-09-11 13:22:29 - train: epoch 0046, iter [04900, 05004], lr: 0.010000, loss: 1.7927 +2023-09-11 13:23:03 - train: epoch 0046, iter [05000, 05004], lr: 0.010000, loss: 1.5888 +2023-09-11 13:23:04 - train: epoch 046, train_loss: 1.5599 +2023-09-11 13:24:27 - eval: epoch: 046, acc1: 65.514%, acc5: 86.748%, test_loss: 1.4180, per_image_load_time: 1.352ms, per_image_inference_time: 0.292ms +2023-09-11 13:24:27 - until epoch: 046, best_acc1: 66.222% +2023-09-11 13:24:27 - epoch 047 lr: 0.010000 +2023-09-11 13:25:12 - train: epoch 0047, iter [00100, 05004], lr: 0.010000, loss: 1.4975 +2023-09-11 13:25:49 - train: epoch 0047, iter [00200, 05004], lr: 0.010000, loss: 1.6721 +2023-09-11 13:26:24 - train: epoch 0047, iter [00300, 05004], lr: 0.010000, loss: 1.3874 +2023-09-11 13:27:01 - train: epoch 0047, iter [00400, 05004], lr: 0.010000, loss: 1.4422 +2023-09-11 13:27:37 - train: epoch 0047, iter [00500, 05004], lr: 0.010000, loss: 1.4741 +2023-09-11 13:28:13 - train: epoch 0047, iter [00600, 05004], lr: 0.010000, loss: 1.3949 +2023-09-11 13:28:50 - train: epoch 0047, iter [00700, 05004], lr: 0.010000, loss: 1.5239 +2023-09-11 13:29:27 - train: epoch 0047, iter [00800, 05004], lr: 0.010000, loss: 1.4967 +2023-09-11 13:30:02 - train: epoch 0047, iter [00900, 05004], lr: 0.010000, loss: 1.6612 +2023-09-11 13:30:38 - train: epoch 0047, iter [01000, 05004], lr: 0.010000, loss: 1.5084 +2023-09-11 13:31:15 - train: epoch 0047, iter [01100, 05004], lr: 0.010000, loss: 1.7356 +2023-09-11 13:31:50 - train: epoch 0047, iter [01200, 05004], lr: 0.010000, loss: 1.8051 +2023-09-11 13:32:27 - train: epoch 0047, iter [01300, 05004], lr: 0.010000, loss: 1.6508 +2023-09-11 13:33:03 - train: epoch 0047, iter [01400, 05004], lr: 0.010000, loss: 1.5552 +2023-09-11 13:33:39 - train: epoch 0047, iter [01500, 05004], lr: 0.010000, loss: 1.5079 +2023-09-11 13:34:15 - train: epoch 0047, iter [01600, 05004], lr: 0.010000, loss: 1.6515 +2023-09-11 13:34:53 - train: epoch 0047, iter [01700, 05004], lr: 0.010000, loss: 1.6324 +2023-09-11 13:35:29 - train: epoch 0047, iter [01800, 05004], lr: 0.010000, loss: 1.5966 +2023-09-11 13:36:05 - train: epoch 0047, iter [01900, 05004], lr: 0.010000, loss: 1.4228 +2023-09-11 13:36:44 - train: epoch 0047, iter [02000, 05004], lr: 0.010000, loss: 1.4534 +2023-09-11 13:37:20 - train: epoch 0047, iter [02100, 05004], lr: 0.010000, loss: 1.7609 +2023-09-11 13:37:55 - train: epoch 0047, iter [02200, 05004], lr: 0.010000, loss: 1.7205 +2023-09-11 13:38:34 - train: epoch 0047, iter [02300, 05004], lr: 0.010000, loss: 1.6030 +2023-09-11 13:39:10 - train: epoch 0047, iter [02400, 05004], lr: 0.010000, loss: 1.4453 +2023-09-11 13:39:45 - train: epoch 0047, iter [02500, 05004], lr: 0.010000, loss: 1.7136 +2023-09-11 13:40:24 - train: epoch 0047, iter [02600, 05004], lr: 0.010000, loss: 1.8584 +2023-09-11 13:41:00 - train: epoch 0047, iter [02700, 05004], lr: 0.010000, loss: 1.4863 +2023-09-11 13:41:36 - train: epoch 0047, iter [02800, 05004], lr: 0.010000, loss: 1.6618 +2023-09-11 13:42:14 - train: epoch 0047, iter [02900, 05004], lr: 0.010000, loss: 1.4951 +2023-09-11 13:42:49 - train: epoch 0047, iter [03000, 05004], lr: 0.010000, loss: 1.6010 +2023-09-11 13:43:25 - train: epoch 0047, iter [03100, 05004], lr: 0.010000, loss: 1.5606 +2023-09-11 13:44:04 - train: epoch 0047, iter [03200, 05004], lr: 0.010000, loss: 1.6019 +2023-09-11 13:44:40 - train: epoch 0047, iter [03300, 05004], lr: 0.010000, loss: 1.5134 +2023-09-11 13:45:15 - train: epoch 0047, iter [03400, 05004], lr: 0.010000, loss: 1.3782 +2023-09-11 13:45:53 - train: epoch 0047, iter [03500, 05004], lr: 0.010000, loss: 1.7549 +2023-09-11 13:46:28 - train: epoch 0047, iter [03600, 05004], lr: 0.010000, loss: 1.6152 +2023-09-11 13:47:05 - train: epoch 0047, iter [03700, 05004], lr: 0.010000, loss: 1.5746 +2023-09-11 13:47:42 - train: epoch 0047, iter [03800, 05004], lr: 0.010000, loss: 1.5588 +2023-09-11 13:48:17 - train: epoch 0047, iter [03900, 05004], lr: 0.010000, loss: 1.5951 +2023-09-11 13:48:52 - train: epoch 0047, iter [04000, 05004], lr: 0.010000, loss: 1.5269 +2023-09-11 13:49:30 - train: epoch 0047, iter [04100, 05004], lr: 0.010000, loss: 1.6644 +2023-09-11 13:50:04 - train: epoch 0047, iter [04200, 05004], lr: 0.010000, loss: 1.6620 +2023-09-11 13:50:42 - train: epoch 0047, iter [04300, 05004], lr: 0.010000, loss: 1.3748 +2023-09-11 13:51:19 - train: epoch 0047, iter [04400, 05004], lr: 0.010000, loss: 1.5190 +2023-09-11 13:51:56 - train: epoch 0047, iter [04500, 05004], lr: 0.010000, loss: 1.4721 +2023-09-11 13:52:32 - train: epoch 0047, iter [04600, 05004], lr: 0.010000, loss: 1.6112 +2023-09-11 13:53:10 - train: epoch 0047, iter [04700, 05004], lr: 0.010000, loss: 1.7685 +2023-09-11 13:53:44 - train: epoch 0047, iter [04800, 05004], lr: 0.010000, loss: 1.4340 +2023-09-11 13:54:20 - train: epoch 0047, iter [04900, 05004], lr: 0.010000, loss: 1.6233 +2023-09-11 13:54:55 - train: epoch 0047, iter [05000, 05004], lr: 0.010000, loss: 1.7154 +2023-09-11 13:54:55 - train: epoch 047, train_loss: 1.5590 +2023-09-11 13:56:19 - eval: epoch: 047, acc1: 65.682%, acc5: 86.702%, test_loss: 1.4157, per_image_load_time: 1.368ms, per_image_inference_time: 0.276ms +2023-09-11 13:56:19 - until epoch: 047, best_acc1: 66.222% +2023-09-11 13:56:19 - epoch 048 lr: 0.010000 +2023-09-11 13:57:04 - train: epoch 0048, iter [00100, 05004], lr: 0.010000, loss: 1.6147 +2023-09-11 13:57:39 - train: epoch 0048, iter [00200, 05004], lr: 0.010000, loss: 1.8850 +2023-09-11 13:58:14 - train: epoch 0048, iter [00300, 05004], lr: 0.010000, loss: 1.6001 +2023-09-11 13:58:52 - train: epoch 0048, iter [00400, 05004], lr: 0.010000, loss: 1.6503 +2023-09-11 13:59:27 - train: epoch 0048, iter [00500, 05004], lr: 0.010000, loss: 1.7149 +2023-09-11 14:00:03 - train: epoch 0048, iter [00600, 05004], lr: 0.010000, loss: 1.5910 +2023-09-11 14:00:40 - train: epoch 0048, iter [00700, 05004], lr: 0.010000, loss: 1.4982 +2023-09-11 14:01:17 - train: epoch 0048, iter [00800, 05004], lr: 0.010000, loss: 1.7296 +2023-09-11 14:01:52 - train: epoch 0048, iter [00900, 05004], lr: 0.010000, loss: 1.7172 +2023-09-11 14:02:27 - train: epoch 0048, iter [01000, 05004], lr: 0.010000, loss: 1.5264 +2023-09-11 14:03:06 - train: epoch 0048, iter [01100, 05004], lr: 0.010000, loss: 1.7160 +2023-09-11 14:03:41 - train: epoch 0048, iter [01200, 05004], lr: 0.010000, loss: 1.4774 +2023-09-11 14:04:17 - train: epoch 0048, iter [01300, 05004], lr: 0.010000, loss: 1.2766 +2023-09-11 14:04:55 - train: epoch 0048, iter [01400, 05004], lr: 0.010000, loss: 1.5382 +2023-09-11 14:05:30 - train: epoch 0048, iter [01500, 05004], lr: 0.010000, loss: 1.4112 +2023-09-11 14:06:07 - train: epoch 0048, iter [01600, 05004], lr: 0.010000, loss: 1.4901 +2023-09-11 14:06:43 - train: epoch 0048, iter [01700, 05004], lr: 0.010000, loss: 1.5906 +2023-09-11 14:07:19 - train: epoch 0048, iter [01800, 05004], lr: 0.010000, loss: 1.3470 +2023-09-11 14:07:54 - train: epoch 0048, iter [01900, 05004], lr: 0.010000, loss: 1.5295 +2023-09-11 14:08:33 - train: epoch 0048, iter [02000, 05004], lr: 0.010000, loss: 1.6509 +2023-09-11 14:09:09 - train: epoch 0048, iter [02100, 05004], lr: 0.010000, loss: 1.4810 +2023-09-11 14:09:43 - train: epoch 0048, iter [02200, 05004], lr: 0.010000, loss: 1.6876 +2023-09-11 14:10:21 - train: epoch 0048, iter [02300, 05004], lr: 0.010000, loss: 1.2657 +2023-09-11 14:10:56 - train: epoch 0048, iter [02400, 05004], lr: 0.010000, loss: 1.5667 +2023-09-11 14:11:33 - train: epoch 0048, iter [02500, 05004], lr: 0.010000, loss: 1.7164 +2023-09-11 14:12:11 - train: epoch 0048, iter [02600, 05004], lr: 0.010000, loss: 1.4847 +2023-09-11 14:12:45 - train: epoch 0048, iter [02700, 05004], lr: 0.010000, loss: 1.7193 +2023-09-11 14:13:19 - train: epoch 0048, iter [02800, 05004], lr: 0.010000, loss: 1.4157 +2023-09-11 14:13:58 - train: epoch 0048, iter [02900, 05004], lr: 0.010000, loss: 1.5290 +2023-09-11 14:14:33 - train: epoch 0048, iter [03000, 05004], lr: 0.010000, loss: 1.7395 +2023-09-11 14:15:08 - train: epoch 0048, iter [03100, 05004], lr: 0.010000, loss: 1.7973 +2023-09-11 14:15:46 - train: epoch 0048, iter [03200, 05004], lr: 0.010000, loss: 1.4653 +2023-09-11 14:16:20 - train: epoch 0048, iter [03300, 05004], lr: 0.010000, loss: 1.6928 +2023-09-11 14:16:55 - train: epoch 0048, iter [03400, 05004], lr: 0.010000, loss: 1.8741 +2023-09-11 14:17:34 - train: epoch 0048, iter [03500, 05004], lr: 0.010000, loss: 1.8886 +2023-09-11 14:18:09 - train: epoch 0048, iter [03600, 05004], lr: 0.010000, loss: 1.5187 +2023-09-11 14:18:45 - train: epoch 0048, iter [03700, 05004], lr: 0.010000, loss: 1.5843 +2023-09-11 14:19:22 - train: epoch 0048, iter [03800, 05004], lr: 0.010000, loss: 1.5036 +2023-09-11 14:19:57 - train: epoch 0048, iter [03900, 05004], lr: 0.010000, loss: 1.5948 +2023-09-11 14:20:33 - train: epoch 0048, iter [04000, 05004], lr: 0.010000, loss: 1.6138 +2023-09-11 14:21:12 - train: epoch 0048, iter [04100, 05004], lr: 0.010000, loss: 1.7598 +2023-09-11 14:21:46 - train: epoch 0048, iter [04200, 05004], lr: 0.010000, loss: 1.4015 +2023-09-11 14:22:21 - train: epoch 0048, iter [04300, 05004], lr: 0.010000, loss: 1.4662 +2023-09-11 14:22:59 - train: epoch 0048, iter [04400, 05004], lr: 0.010000, loss: 1.6728 +2023-09-11 14:23:34 - train: epoch 0048, iter [04500, 05004], lr: 0.010000, loss: 1.7041 +2023-09-11 14:24:10 - train: epoch 0048, iter [04600, 05004], lr: 0.010000, loss: 1.4714 +2023-09-11 14:24:48 - train: epoch 0048, iter [04700, 05004], lr: 0.010000, loss: 1.6263 +2023-09-11 14:25:24 - train: epoch 0048, iter [04800, 05004], lr: 0.010000, loss: 1.9745 +2023-09-11 14:26:01 - train: epoch 0048, iter [04900, 05004], lr: 0.010000, loss: 1.5437 +2023-09-11 14:26:35 - train: epoch 0048, iter [05000, 05004], lr: 0.010000, loss: 1.5755 +2023-09-11 14:26:36 - train: epoch 048, train_loss: 1.5588 +2023-09-11 14:28:00 - eval: epoch: 048, acc1: 65.630%, acc5: 86.744%, test_loss: 1.4205, per_image_load_time: 1.381ms, per_image_inference_time: 0.283ms +2023-09-11 14:28:00 - until epoch: 048, best_acc1: 66.222% +2023-09-11 14:28:00 - epoch 049 lr: 0.010000 +2023-09-11 14:28:45 - train: epoch 0049, iter [00100, 05004], lr: 0.010000, loss: 1.5904 +2023-09-11 14:29:22 - train: epoch 0049, iter [00200, 05004], lr: 0.010000, loss: 1.4569 +2023-09-11 14:29:58 - train: epoch 0049, iter [00300, 05004], lr: 0.010000, loss: 1.4202 +2023-09-11 14:30:33 - train: epoch 0049, iter [00400, 05004], lr: 0.010000, loss: 1.5691 +2023-09-11 14:31:11 - train: epoch 0049, iter [00500, 05004], lr: 0.010000, loss: 1.5642 +2023-09-11 14:31:46 - train: epoch 0049, iter [00600, 05004], lr: 0.010000, loss: 1.3030 +2023-09-11 14:32:21 - train: epoch 0049, iter [00700, 05004], lr: 0.010000, loss: 1.5959 +2023-09-11 14:33:01 - train: epoch 0049, iter [00800, 05004], lr: 0.010000, loss: 1.8427 +2023-09-11 14:33:36 - train: epoch 0049, iter [00900, 05004], lr: 0.010000, loss: 1.4978 +2023-09-11 14:34:12 - train: epoch 0049, iter [01000, 05004], lr: 0.010000, loss: 1.6691 +2023-09-11 14:34:50 - train: epoch 0049, iter [01100, 05004], lr: 0.010000, loss: 1.5754 +2023-09-11 14:35:25 - train: epoch 0049, iter [01200, 05004], lr: 0.010000, loss: 1.4069 +2023-09-11 14:36:01 - train: epoch 0049, iter [01300, 05004], lr: 0.010000, loss: 1.9145 +2023-09-11 14:36:39 - train: epoch 0049, iter [01400, 05004], lr: 0.010000, loss: 1.8784 +2023-09-11 14:37:14 - train: epoch 0049, iter [01500, 05004], lr: 0.010000, loss: 1.4826 +2023-09-11 14:37:51 - train: epoch 0049, iter [01600, 05004], lr: 0.010000, loss: 1.5936 +2023-09-11 14:38:26 - train: epoch 0049, iter [01700, 05004], lr: 0.010000, loss: 1.3621 +2023-09-11 14:39:02 - train: epoch 0049, iter [01800, 05004], lr: 0.010000, loss: 1.5334 +2023-09-11 14:39:39 - train: epoch 0049, iter [01900, 05004], lr: 0.010000, loss: 1.3111 +2023-09-11 14:40:15 - train: epoch 0049, iter [02000, 05004], lr: 0.010000, loss: 1.5081 +2023-09-11 14:40:51 - train: epoch 0049, iter [02100, 05004], lr: 0.010000, loss: 1.5197 +2023-09-11 14:41:28 - train: epoch 0049, iter [02200, 05004], lr: 0.010000, loss: 1.5508 +2023-09-11 14:42:04 - train: epoch 0049, iter [02300, 05004], lr: 0.010000, loss: 1.3142 +2023-09-11 14:42:39 - train: epoch 0049, iter [02400, 05004], lr: 0.010000, loss: 1.4894 +2023-09-11 14:43:18 - train: epoch 0049, iter [02500, 05004], lr: 0.010000, loss: 1.7354 +2023-09-11 14:43:55 - train: epoch 0049, iter [02600, 05004], lr: 0.010000, loss: 1.5642 +2023-09-11 14:44:30 - train: epoch 0049, iter [02700, 05004], lr: 0.010000, loss: 1.4156 +2023-09-11 14:45:08 - train: epoch 0049, iter [02800, 05004], lr: 0.010000, loss: 1.6001 +2023-09-11 14:45:45 - train: epoch 0049, iter [02900, 05004], lr: 0.010000, loss: 1.6425 +2023-09-11 14:46:20 - train: epoch 0049, iter [03000, 05004], lr: 0.010000, loss: 1.6214 +2023-09-11 14:46:55 - train: epoch 0049, iter [03100, 05004], lr: 0.010000, loss: 1.5493 +2023-09-11 14:47:33 - train: epoch 0049, iter [03200, 05004], lr: 0.010000, loss: 1.7573 +2023-09-11 14:48:09 - train: epoch 0049, iter [03300, 05004], lr: 0.010000, loss: 1.6451 +2023-09-11 14:48:44 - train: epoch 0049, iter [03400, 05004], lr: 0.010000, loss: 1.4073 +2023-09-11 14:49:21 - train: epoch 0049, iter [03500, 05004], lr: 0.010000, loss: 1.7279 +2023-09-11 14:49:58 - train: epoch 0049, iter [03600, 05004], lr: 0.010000, loss: 1.6505 +2023-09-11 14:50:33 - train: epoch 0049, iter [03700, 05004], lr: 0.010000, loss: 1.5807 +2023-09-11 14:51:12 - train: epoch 0049, iter [03800, 05004], lr: 0.010000, loss: 1.6904 +2023-09-11 14:51:47 - train: epoch 0049, iter [03900, 05004], lr: 0.010000, loss: 1.7220 +2023-09-11 14:52:22 - train: epoch 0049, iter [04000, 05004], lr: 0.010000, loss: 1.3875 +2023-09-11 14:52:59 - train: epoch 0049, iter [04100, 05004], lr: 0.010000, loss: 1.4507 +2023-09-11 14:53:36 - train: epoch 0049, iter [04200, 05004], lr: 0.010000, loss: 1.5296 +2023-09-11 14:54:12 - train: epoch 0049, iter [04300, 05004], lr: 0.010000, loss: 1.7516 +2023-09-11 14:54:51 - train: epoch 0049, iter [04400, 05004], lr: 0.010000, loss: 1.5456 +2023-09-11 14:55:25 - train: epoch 0049, iter [04500, 05004], lr: 0.010000, loss: 1.5557 +2023-09-11 14:56:00 - train: epoch 0049, iter [04600, 05004], lr: 0.010000, loss: 1.5202 +2023-09-11 14:56:39 - train: epoch 0049, iter [04700, 05004], lr: 0.010000, loss: 1.6926 +2023-09-11 14:57:14 - train: epoch 0049, iter [04800, 05004], lr: 0.010000, loss: 1.4365 +2023-09-11 14:57:49 - train: epoch 0049, iter [04900, 05004], lr: 0.010000, loss: 1.5074 +2023-09-11 14:58:23 - train: epoch 0049, iter [05000, 05004], lr: 0.010000, loss: 1.4711 +2023-09-11 14:58:24 - train: epoch 049, train_loss: 1.5597 +2023-09-11 14:59:47 - eval: epoch: 049, acc1: 65.674%, acc5: 86.690%, test_loss: 1.4212, per_image_load_time: 1.340ms, per_image_inference_time: 0.300ms +2023-09-11 14:59:47 - until epoch: 049, best_acc1: 66.222% +2023-09-11 14:59:47 - epoch 050 lr: 0.010000 +2023-09-11 15:00:28 - train: epoch 0050, iter [00100, 05004], lr: 0.010000, loss: 1.4447 +2023-09-11 15:01:06 - train: epoch 0050, iter [00200, 05004], lr: 0.010000, loss: 1.5017 +2023-09-11 15:01:40 - train: epoch 0050, iter [00300, 05004], lr: 0.010000, loss: 1.3996 +2023-09-11 15:02:17 - train: epoch 0050, iter [00400, 05004], lr: 0.010000, loss: 1.4546 +2023-09-11 15:02:51 - train: epoch 0050, iter [00500, 05004], lr: 0.010000, loss: 1.4604 +2023-09-11 15:03:27 - train: epoch 0050, iter [00600, 05004], lr: 0.010000, loss: 1.7904 +2023-09-11 15:04:03 - train: epoch 0050, iter [00700, 05004], lr: 0.010000, loss: 1.3962 +2023-09-11 15:04:40 - train: epoch 0050, iter [00800, 05004], lr: 0.010000, loss: 1.5037 +2023-09-11 15:05:18 - train: epoch 0050, iter [00900, 05004], lr: 0.010000, loss: 1.5351 +2023-09-11 15:05:54 - train: epoch 0050, iter [01000, 05004], lr: 0.010000, loss: 1.4153 +2023-09-11 15:06:29 - train: epoch 0050, iter [01100, 05004], lr: 0.010000, loss: 1.5634 +2023-09-11 15:07:07 - train: epoch 0050, iter [01200, 05004], lr: 0.010000, loss: 1.4924 +2023-09-11 15:07:41 - train: epoch 0050, iter [01300, 05004], lr: 0.010000, loss: 1.5790 +2023-09-11 15:08:16 - train: epoch 0050, iter [01400, 05004], lr: 0.010000, loss: 1.5310 +2023-09-11 15:08:55 - train: epoch 0050, iter [01500, 05004], lr: 0.010000, loss: 1.4475 +2023-09-11 15:09:29 - train: epoch 0050, iter [01600, 05004], lr: 0.010000, loss: 1.4580 +2023-09-11 15:10:03 - train: epoch 0050, iter [01700, 05004], lr: 0.010000, loss: 1.5423 +2023-09-11 15:10:40 - train: epoch 0050, iter [01800, 05004], lr: 0.010000, loss: 1.8243 +2023-09-11 15:11:17 - train: epoch 0050, iter [01900, 05004], lr: 0.010000, loss: 1.4829 +2023-09-11 15:11:53 - train: epoch 0050, iter [02000, 05004], lr: 0.010000, loss: 1.3427 +2023-09-11 15:12:28 - train: epoch 0050, iter [02100, 05004], lr: 0.010000, loss: 1.4053 +2023-09-11 15:13:03 - train: epoch 0050, iter [02200, 05004], lr: 0.010000, loss: 1.6745 +2023-09-11 15:13:39 - train: epoch 0050, iter [02300, 05004], lr: 0.010000, loss: 1.2860 +2023-09-11 15:14:15 - train: epoch 0050, iter [02400, 05004], lr: 0.010000, loss: 1.9468 +2023-09-11 15:14:53 - train: epoch 0050, iter [02500, 05004], lr: 0.010000, loss: 1.4796 +2023-09-11 15:15:28 - train: epoch 0050, iter [02600, 05004], lr: 0.010000, loss: 1.4432 +2023-09-11 15:16:04 - train: epoch 0050, iter [02700, 05004], lr: 0.010000, loss: 1.7262 +2023-09-11 15:16:38 - train: epoch 0050, iter [02800, 05004], lr: 0.010000, loss: 1.5327 +2023-09-11 15:17:15 - train: epoch 0050, iter [02900, 05004], lr: 0.010000, loss: 1.9402 +2023-09-11 15:17:52 - train: epoch 0050, iter [03000, 05004], lr: 0.010000, loss: 1.5392 +2023-09-11 15:18:27 - train: epoch 0050, iter [03100, 05004], lr: 0.010000, loss: 1.4814 +2023-09-11 15:19:05 - train: epoch 0050, iter [03200, 05004], lr: 0.010000, loss: 1.4981 +2023-09-11 15:19:40 - train: epoch 0050, iter [03300, 05004], lr: 0.010000, loss: 1.5591 +2023-09-11 15:20:14 - train: epoch 0050, iter [03400, 05004], lr: 0.010000, loss: 1.6640 +2023-09-11 15:20:52 - train: epoch 0050, iter [03500, 05004], lr: 0.010000, loss: 1.5840 +2023-09-11 15:21:27 - train: epoch 0050, iter [03600, 05004], lr: 0.010000, loss: 1.7241 +2023-09-11 15:22:03 - train: epoch 0050, iter [03700, 05004], lr: 0.010000, loss: 1.9158 +2023-09-11 15:22:40 - train: epoch 0050, iter [03800, 05004], lr: 0.010000, loss: 1.3439 +2023-09-11 15:23:16 - train: epoch 0050, iter [03900, 05004], lr: 0.010000, loss: 1.5630 +2023-09-11 15:23:52 - train: epoch 0050, iter [04000, 05004], lr: 0.010000, loss: 1.7252 +2023-09-11 15:24:30 - train: epoch 0050, iter [04100, 05004], lr: 0.010000, loss: 1.7308 +2023-09-11 15:25:06 - train: epoch 0050, iter [04200, 05004], lr: 0.010000, loss: 1.7426 +2023-09-11 15:25:40 - train: epoch 0050, iter [04300, 05004], lr: 0.010000, loss: 1.5485 +2023-09-11 15:26:19 - train: epoch 0050, iter [04400, 05004], lr: 0.010000, loss: 1.6665 +2023-09-11 15:26:53 - train: epoch 0050, iter [04500, 05004], lr: 0.010000, loss: 1.4054 +2023-09-11 15:27:29 - train: epoch 0050, iter [04600, 05004], lr: 0.010000, loss: 1.7192 +2023-09-11 15:28:07 - train: epoch 0050, iter [04700, 05004], lr: 0.010000, loss: 1.6412 +2023-09-11 15:28:43 - train: epoch 0050, iter [04800, 05004], lr: 0.010000, loss: 1.4651 +2023-09-11 15:29:18 - train: epoch 0050, iter [04900, 05004], lr: 0.010000, loss: 1.3998 +2023-09-11 15:29:53 - train: epoch 0050, iter [05000, 05004], lr: 0.010000, loss: 1.3584 +2023-09-11 15:29:53 - train: epoch 050, train_loss: 1.5560 +2023-09-11 15:31:21 - eval: epoch: 050, acc1: 65.496%, acc5: 86.734%, test_loss: 1.4242, per_image_load_time: 1.440ms, per_image_inference_time: 0.287ms +2023-09-11 15:31:21 - until epoch: 050, best_acc1: 66.222% +2023-09-11 15:31:21 - epoch 051 lr: 0.010000 +2023-09-11 15:32:06 - train: epoch 0051, iter [00100, 05004], lr: 0.010000, loss: 1.6383 +2023-09-11 15:32:43 - train: epoch 0051, iter [00200, 05004], lr: 0.010000, loss: 1.7568 +2023-09-11 15:33:19 - train: epoch 0051, iter [00300, 05004], lr: 0.010000, loss: 1.4477 +2023-09-11 15:33:57 - train: epoch 0051, iter [00400, 05004], lr: 0.010000, loss: 1.3811 +2023-09-11 15:34:34 - train: epoch 0051, iter [00500, 05004], lr: 0.010000, loss: 1.5549 +2023-09-11 15:35:12 - train: epoch 0051, iter [00600, 05004], lr: 0.010000, loss: 1.5690 +2023-09-11 15:35:47 - train: epoch 0051, iter [00700, 05004], lr: 0.010000, loss: 1.6791 +2023-09-11 15:36:23 - train: epoch 0051, iter [00800, 05004], lr: 0.010000, loss: 1.6482 +2023-09-11 15:37:00 - train: epoch 0051, iter [00900, 05004], lr: 0.010000, loss: 1.5275 +2023-09-11 15:37:36 - train: epoch 0051, iter [01000, 05004], lr: 0.010000, loss: 1.7449 +2023-09-11 15:38:13 - train: epoch 0051, iter [01100, 05004], lr: 0.010000, loss: 1.5251 +2023-09-11 15:38:54 - train: epoch 0051, iter [01200, 05004], lr: 0.010000, loss: 1.3772 +2023-09-11 15:39:29 - train: epoch 0051, iter [01300, 05004], lr: 0.010000, loss: 1.2853 +2023-09-11 15:40:05 - train: epoch 0051, iter [01400, 05004], lr: 0.010000, loss: 1.6047 +2023-09-11 15:40:44 - train: epoch 0051, iter [01500, 05004], lr: 0.010000, loss: 1.5187 +2023-09-11 15:41:19 - train: epoch 0051, iter [01600, 05004], lr: 0.010000, loss: 1.4715 +2023-09-11 15:41:55 - train: epoch 0051, iter [01700, 05004], lr: 0.010000, loss: 1.6509 +2023-09-11 15:42:33 - train: epoch 0051, iter [01800, 05004], lr: 0.010000, loss: 1.5858 +2023-09-11 15:43:08 - train: epoch 0051, iter [01900, 05004], lr: 0.010000, loss: 1.7050 +2023-09-11 15:43:43 - train: epoch 0051, iter [02000, 05004], lr: 0.010000, loss: 1.3543 +2023-09-11 15:44:21 - train: epoch 0051, iter [02100, 05004], lr: 0.010000, loss: 1.5741 +2023-09-11 15:44:57 - train: epoch 0051, iter [02200, 05004], lr: 0.010000, loss: 1.5670 +2023-09-11 15:45:32 - train: epoch 0051, iter [02300, 05004], lr: 0.010000, loss: 1.6840 +2023-09-11 15:46:09 - train: epoch 0051, iter [02400, 05004], lr: 0.010000, loss: 1.6036 +2023-09-11 15:46:45 - train: epoch 0051, iter [02500, 05004], lr: 0.010000, loss: 1.6378 +2023-09-11 15:47:23 - train: epoch 0051, iter [02600, 05004], lr: 0.010000, loss: 1.4654 +2023-09-11 15:47:58 - train: epoch 0051, iter [02700, 05004], lr: 0.010000, loss: 1.5878 +2023-09-11 15:48:33 - train: epoch 0051, iter [02800, 05004], lr: 0.010000, loss: 1.5982 +2023-09-11 15:49:12 - train: epoch 0051, iter [02900, 05004], lr: 0.010000, loss: 1.5709 +2023-09-11 15:49:48 - train: epoch 0051, iter [03000, 05004], lr: 0.010000, loss: 1.5415 +2023-09-11 15:50:22 - train: epoch 0051, iter [03100, 05004], lr: 0.010000, loss: 1.4871 +2023-09-11 15:51:00 - train: epoch 0051, iter [03200, 05004], lr: 0.010000, loss: 1.5497 +2023-09-11 15:51:35 - train: epoch 0051, iter [03300, 05004], lr: 0.010000, loss: 1.6203 +2023-09-11 15:52:11 - train: epoch 0051, iter [03400, 05004], lr: 0.010000, loss: 1.5693 +2023-09-11 15:52:49 - train: epoch 0051, iter [03500, 05004], lr: 0.010000, loss: 1.4243 +2023-09-11 15:53:23 - train: epoch 0051, iter [03600, 05004], lr: 0.010000, loss: 1.5237 +2023-09-11 15:54:00 - train: epoch 0051, iter [03700, 05004], lr: 0.010000, loss: 1.7027 +2023-09-11 15:54:36 - train: epoch 0051, iter [03800, 05004], lr: 0.010000, loss: 1.4874 +2023-09-11 15:55:12 - train: epoch 0051, iter [03900, 05004], lr: 0.010000, loss: 1.7286 +2023-09-11 15:55:50 - train: epoch 0051, iter [04000, 05004], lr: 0.010000, loss: 1.6819 +2023-09-11 15:56:27 - train: epoch 0051, iter [04100, 05004], lr: 0.010000, loss: 1.6830 +2023-09-11 15:57:01 - train: epoch 0051, iter [04200, 05004], lr: 0.010000, loss: 1.6961 +2023-09-11 15:57:39 - train: epoch 0051, iter [04300, 05004], lr: 0.010000, loss: 1.4870 +2023-09-11 15:58:14 - train: epoch 0051, iter [04400, 05004], lr: 0.010000, loss: 1.6108 +2023-09-11 15:58:50 - train: epoch 0051, iter [04500, 05004], lr: 0.010000, loss: 1.5309 +2023-09-11 15:59:28 - train: epoch 0051, iter [04600, 05004], lr: 0.010000, loss: 1.8584 +2023-09-11 16:00:04 - train: epoch 0051, iter [04700, 05004], lr: 0.010000, loss: 1.4852 +2023-09-11 16:00:40 - train: epoch 0051, iter [04800, 05004], lr: 0.010000, loss: 1.5596 +2023-09-11 16:01:19 - train: epoch 0051, iter [04900, 05004], lr: 0.010000, loss: 1.6232 +2023-09-11 16:01:51 - train: epoch 0051, iter [05000, 05004], lr: 0.010000, loss: 1.6674 +2023-09-11 16:01:51 - train: epoch 051, train_loss: 1.5555 +2023-09-11 16:03:15 - eval: epoch: 051, acc1: 65.180%, acc5: 86.364%, test_loss: 1.4384, per_image_load_time: 1.362ms, per_image_inference_time: 0.280ms +2023-09-11 16:03:15 - until epoch: 051, best_acc1: 66.222% +2023-09-11 16:03:15 - epoch 052 lr: 0.010000 +2023-09-11 16:03:59 - train: epoch 0052, iter [00100, 05004], lr: 0.010000, loss: 1.4581 +2023-09-11 16:04:35 - train: epoch 0052, iter [00200, 05004], lr: 0.010000, loss: 1.6169 +2023-09-11 16:05:11 - train: epoch 0052, iter [00300, 05004], lr: 0.010000, loss: 1.5144 +2023-09-11 16:05:51 - train: epoch 0052, iter [00400, 05004], lr: 0.010000, loss: 1.5037 +2023-09-11 16:06:25 - train: epoch 0052, iter [00500, 05004], lr: 0.010000, loss: 1.6981 +2023-09-11 16:07:01 - train: epoch 0052, iter [00600, 05004], lr: 0.010000, loss: 1.3587 +2023-09-11 16:07:37 - train: epoch 0052, iter [00700, 05004], lr: 0.010000, loss: 1.5550 +2023-09-11 16:08:16 - train: epoch 0052, iter [00800, 05004], lr: 0.010000, loss: 1.4902 +2023-09-11 16:08:51 - train: epoch 0052, iter [00900, 05004], lr: 0.010000, loss: 1.6340 +2023-09-11 16:09:26 - train: epoch 0052, iter [01000, 05004], lr: 0.010000, loss: 1.6120 +2023-09-11 16:10:05 - train: epoch 0052, iter [01100, 05004], lr: 0.010000, loss: 1.4215 +2023-09-11 16:10:39 - train: epoch 0052, iter [01200, 05004], lr: 0.010000, loss: 1.5077 +2023-09-11 16:11:15 - train: epoch 0052, iter [01300, 05004], lr: 0.010000, loss: 1.4758 +2023-09-11 16:11:53 - train: epoch 0052, iter [01400, 05004], lr: 0.010000, loss: 1.6757 +2023-09-11 16:12:28 - train: epoch 0052, iter [01500, 05004], lr: 0.010000, loss: 1.3831 +2023-09-11 16:13:03 - train: epoch 0052, iter [01600, 05004], lr: 0.010000, loss: 1.3223 +2023-09-11 16:13:42 - train: epoch 0052, iter [01700, 05004], lr: 0.010000, loss: 1.2680 +2023-09-11 16:14:17 - train: epoch 0052, iter [01800, 05004], lr: 0.010000, loss: 1.5632 +2023-09-11 16:14:53 - train: epoch 0052, iter [01900, 05004], lr: 0.010000, loss: 1.4293 +2023-09-11 16:15:31 - train: epoch 0052, iter [02000, 05004], lr: 0.010000, loss: 1.6170 +2023-09-11 16:16:07 - train: epoch 0052, iter [02100, 05004], lr: 0.010000, loss: 1.3031 +2023-09-11 16:16:41 - train: epoch 0052, iter [02200, 05004], lr: 0.010000, loss: 1.7783 +2023-09-11 16:17:20 - train: epoch 0052, iter [02300, 05004], lr: 0.010000, loss: 1.3295 +2023-09-11 16:17:55 - train: epoch 0052, iter [02400, 05004], lr: 0.010000, loss: 1.3861 +2023-09-11 16:18:31 - train: epoch 0052, iter [02500, 05004], lr: 0.010000, loss: 1.4389 +2023-09-11 16:19:10 - train: epoch 0052, iter [02600, 05004], lr: 0.010000, loss: 1.3705 +2023-09-11 16:19:46 - train: epoch 0052, iter [02700, 05004], lr: 0.010000, loss: 1.4274 +2023-09-11 16:20:22 - train: epoch 0052, iter [02800, 05004], lr: 0.010000, loss: 1.6863 +2023-09-11 16:21:02 - train: epoch 0052, iter [02900, 05004], lr: 0.010000, loss: 1.3742 +2023-09-11 16:21:36 - train: epoch 0052, iter [03000, 05004], lr: 0.010000, loss: 1.3511 +2023-09-11 16:22:11 - train: epoch 0052, iter [03100, 05004], lr: 0.010000, loss: 1.8009 +2023-09-11 16:22:49 - train: epoch 0052, iter [03200, 05004], lr: 0.010000, loss: 1.6042 +2023-09-11 16:23:24 - train: epoch 0052, iter [03300, 05004], lr: 0.010000, loss: 1.5597 +2023-09-11 16:24:01 - train: epoch 0052, iter [03400, 05004], lr: 0.010000, loss: 1.7601 +2023-09-11 16:24:38 - train: epoch 0052, iter [03500, 05004], lr: 0.010000, loss: 1.6138 +2023-09-11 16:25:14 - train: epoch 0052, iter [03600, 05004], lr: 0.010000, loss: 1.6697 +2023-09-11 16:25:52 - train: epoch 0052, iter [03700, 05004], lr: 0.010000, loss: 1.6876 +2023-09-11 16:26:27 - train: epoch 0052, iter [03800, 05004], lr: 0.010000, loss: 1.4328 +2023-09-11 16:27:04 - train: epoch 0052, iter [03900, 05004], lr: 0.010000, loss: 1.4303 +2023-09-11 16:27:40 - train: epoch 0052, iter [04000, 05004], lr: 0.010000, loss: 1.7602 +2023-09-11 16:28:17 - train: epoch 0052, iter [04100, 05004], lr: 0.010000, loss: 1.5457 +2023-09-11 16:28:52 - train: epoch 0052, iter [04200, 05004], lr: 0.010000, loss: 1.6621 +2023-09-11 16:29:30 - train: epoch 0052, iter [04300, 05004], lr: 0.010000, loss: 1.5678 +2023-09-11 16:30:06 - train: epoch 0052, iter [04400, 05004], lr: 0.010000, loss: 1.5412 +2023-09-11 16:30:40 - train: epoch 0052, iter [04500, 05004], lr: 0.010000, loss: 1.4255 +2023-09-11 16:31:19 - train: epoch 0052, iter [04600, 05004], lr: 0.010000, loss: 1.5921 +2023-09-11 16:31:54 - train: epoch 0052, iter [04700, 05004], lr: 0.010000, loss: 1.5920 +2023-09-11 16:32:31 - train: epoch 0052, iter [04800, 05004], lr: 0.010000, loss: 1.4198 +2023-09-11 16:33:08 - train: epoch 0052, iter [04900, 05004], lr: 0.010000, loss: 1.4872 +2023-09-11 16:33:41 - train: epoch 0052, iter [05000, 05004], lr: 0.010000, loss: 1.5997 +2023-09-11 16:33:42 - train: epoch 052, train_loss: 1.5506 +2023-09-11 16:35:06 - eval: epoch: 052, acc1: 64.568%, acc5: 86.164%, test_loss: 1.4579, per_image_load_time: 1.353ms, per_image_inference_time: 0.310ms +2023-09-11 16:35:06 - until epoch: 052, best_acc1: 66.222% +2023-09-11 16:35:06 - epoch 053 lr: 0.010000 +2023-09-11 16:35:50 - train: epoch 0053, iter [00100, 05004], lr: 0.010000, loss: 1.7793 +2023-09-11 16:36:27 - train: epoch 0053, iter [00200, 05004], lr: 0.010000, loss: 1.5884 +2023-09-11 16:37:02 - train: epoch 0053, iter [00300, 05004], lr: 0.010000, loss: 1.5358 +2023-09-11 16:37:40 - train: epoch 0053, iter [00400, 05004], lr: 0.010000, loss: 1.4726 +2023-09-11 16:38:15 - train: epoch 0053, iter [00500, 05004], lr: 0.010000, loss: 1.3477 +2023-09-11 16:38:50 - train: epoch 0053, iter [00600, 05004], lr: 0.010000, loss: 1.4521 +2023-09-11 16:39:28 - train: epoch 0053, iter [00700, 05004], lr: 0.010000, loss: 1.4111 +2023-09-11 16:40:03 - train: epoch 0053, iter [00800, 05004], lr: 0.010000, loss: 1.4965 +2023-09-11 16:40:42 - train: epoch 0053, iter [00900, 05004], lr: 0.010000, loss: 1.5111 +2023-09-11 16:41:17 - train: epoch 0053, iter [01000, 05004], lr: 0.010000, loss: 1.6141 +2023-09-11 16:41:51 - train: epoch 0053, iter [01100, 05004], lr: 0.010000, loss: 1.5596 +2023-09-11 16:42:29 - train: epoch 0053, iter [01200, 05004], lr: 0.010000, loss: 1.2897 +2023-09-11 16:43:06 - train: epoch 0053, iter [01300, 05004], lr: 0.010000, loss: 1.5554 +2023-09-11 16:43:41 - train: epoch 0053, iter [01400, 05004], lr: 0.010000, loss: 1.8281 +2023-09-11 16:44:19 - train: epoch 0053, iter [01500, 05004], lr: 0.010000, loss: 1.5643 +2023-09-11 16:44:55 - train: epoch 0053, iter [01600, 05004], lr: 0.010000, loss: 1.8368 +2023-09-11 16:45:31 - train: epoch 0053, iter [01700, 05004], lr: 0.010000, loss: 1.5792 +2023-09-11 16:46:09 - train: epoch 0053, iter [01800, 05004], lr: 0.010000, loss: 1.6120 +2023-09-11 16:46:45 - train: epoch 0053, iter [01900, 05004], lr: 0.010000, loss: 1.5268 +2023-09-11 16:47:20 - train: epoch 0053, iter [02000, 05004], lr: 0.010000, loss: 1.4849 +2023-09-11 16:47:58 - train: epoch 0053, iter [02100, 05004], lr: 0.010000, loss: 1.6732 +2023-09-11 16:48:34 - train: epoch 0053, iter [02200, 05004], lr: 0.010000, loss: 1.5588 +2023-09-11 16:49:09 - train: epoch 0053, iter [02300, 05004], lr: 0.010000, loss: 1.5285 +2023-09-11 16:49:49 - train: epoch 0053, iter [02400, 05004], lr: 0.010000, loss: 1.5397 +2023-09-11 16:50:23 - train: epoch 0053, iter [02500, 05004], lr: 0.010000, loss: 1.7314 +2023-09-11 16:50:58 - train: epoch 0053, iter [02600, 05004], lr: 0.010000, loss: 1.6678 +2023-09-11 16:51:36 - train: epoch 0053, iter [02700, 05004], lr: 0.010000, loss: 1.5550 +2023-09-11 16:52:12 - train: epoch 0053, iter [02800, 05004], lr: 0.010000, loss: 1.6619 +2023-09-11 16:52:48 - train: epoch 0053, iter [02900, 05004], lr: 0.010000, loss: 1.4952 +2023-09-11 16:53:24 - train: epoch 0053, iter [03000, 05004], lr: 0.010000, loss: 1.5772 +2023-09-11 16:54:03 - train: epoch 0053, iter [03100, 05004], lr: 0.010000, loss: 1.6151 +2023-09-11 16:54:38 - train: epoch 0053, iter [03200, 05004], lr: 0.010000, loss: 1.6251 +2023-09-11 16:55:14 - train: epoch 0053, iter [03300, 05004], lr: 0.010000, loss: 1.5765 +2023-09-11 16:55:51 - train: epoch 0053, iter [03400, 05004], lr: 0.010000, loss: 1.5163 +2023-09-11 16:56:29 - train: epoch 0053, iter [03500, 05004], lr: 0.010000, loss: 1.6776 +2023-09-11 16:57:06 - train: epoch 0053, iter [03600, 05004], lr: 0.010000, loss: 1.5707 +2023-09-11 16:57:41 - train: epoch 0053, iter [03700, 05004], lr: 0.010000, loss: 1.6218 +2023-09-11 16:58:17 - train: epoch 0053, iter [03800, 05004], lr: 0.010000, loss: 1.4088 +2023-09-11 16:58:55 - train: epoch 0053, iter [03900, 05004], lr: 0.010000, loss: 1.9028 +2023-09-11 16:59:30 - train: epoch 0053, iter [04000, 05004], lr: 0.010000, loss: 1.6548 +2023-09-11 17:00:06 - train: epoch 0053, iter [04100, 05004], lr: 0.010000, loss: 1.8398 +2023-09-11 17:00:46 - train: epoch 0053, iter [04200, 05004], lr: 0.010000, loss: 1.6154 +2023-09-11 17:01:21 - train: epoch 0053, iter [04300, 05004], lr: 0.010000, loss: 1.8584 +2023-09-11 17:01:58 - train: epoch 0053, iter [04400, 05004], lr: 0.010000, loss: 1.5877 +2023-09-11 17:02:34 - train: epoch 0053, iter [04500, 05004], lr: 0.010000, loss: 1.7045 +2023-09-11 17:03:12 - train: epoch 0053, iter [04600, 05004], lr: 0.010000, loss: 1.5110 +2023-09-11 17:03:47 - train: epoch 0053, iter [04700, 05004], lr: 0.010000, loss: 1.4405 +2023-09-11 17:04:24 - train: epoch 0053, iter [04800, 05004], lr: 0.010000, loss: 1.7805 +2023-09-11 17:05:01 - train: epoch 0053, iter [04900, 05004], lr: 0.010000, loss: 1.4346 +2023-09-11 17:05:34 - train: epoch 0053, iter [05000, 05004], lr: 0.010000, loss: 1.5044 +2023-09-11 17:05:35 - train: epoch 053, train_loss: 1.5527 +2023-09-11 17:07:01 - eval: epoch: 053, acc1: 65.572%, acc5: 86.782%, test_loss: 1.4168, per_image_load_time: 1.394ms, per_image_inference_time: 0.305ms +2023-09-11 17:07:01 - until epoch: 053, best_acc1: 66.222% +2023-09-11 17:07:01 - epoch 054 lr: 0.010000 +2023-09-11 17:07:46 - train: epoch 0054, iter [00100, 05004], lr: 0.010000, loss: 1.3956 +2023-09-11 17:08:24 - train: epoch 0054, iter [00200, 05004], lr: 0.010000, loss: 1.5917 +2023-09-11 17:08:59 - train: epoch 0054, iter [00300, 05004], lr: 0.010000, loss: 1.5664 +2023-09-11 17:09:36 - train: epoch 0054, iter [00400, 05004], lr: 0.010000, loss: 1.4313 +2023-09-11 17:10:12 - train: epoch 0054, iter [00500, 05004], lr: 0.010000, loss: 1.4019 +2023-09-11 17:10:49 - train: epoch 0054, iter [00600, 05004], lr: 0.010000, loss: 1.4019 +2023-09-11 17:11:25 - train: epoch 0054, iter [00700, 05004], lr: 0.010000, loss: 1.6145 +2023-09-11 17:12:02 - train: epoch 0054, iter [00800, 05004], lr: 0.010000, loss: 1.8457 +2023-09-11 17:12:38 - train: epoch 0054, iter [00900, 05004], lr: 0.010000, loss: 1.3279 +2023-09-11 17:13:13 - train: epoch 0054, iter [01000, 05004], lr: 0.010000, loss: 1.3430 +2023-09-11 17:13:52 - train: epoch 0054, iter [01100, 05004], lr: 0.010000, loss: 1.4344 +2023-09-11 17:14:28 - train: epoch 0054, iter [01200, 05004], lr: 0.010000, loss: 1.6175 +2023-09-11 17:15:03 - train: epoch 0054, iter [01300, 05004], lr: 0.010000, loss: 1.3262 +2023-09-11 17:15:43 - train: epoch 0054, iter [01400, 05004], lr: 0.010000, loss: 1.7864 +2023-09-11 17:16:18 - train: epoch 0054, iter [01500, 05004], lr: 0.010000, loss: 1.5321 +2023-09-11 17:16:53 - train: epoch 0054, iter [01600, 05004], lr: 0.010000, loss: 1.4025 +2023-09-11 17:17:31 - train: epoch 0054, iter [01700, 05004], lr: 0.010000, loss: 1.4692 +2023-09-11 17:18:07 - train: epoch 0054, iter [01800, 05004], lr: 0.010000, loss: 1.3175 +2023-09-11 17:18:41 - train: epoch 0054, iter [01900, 05004], lr: 0.010000, loss: 1.7254 +2023-09-11 17:19:19 - train: epoch 0054, iter [02000, 05004], lr: 0.010000, loss: 1.7421 +2023-09-11 17:19:54 - train: epoch 0054, iter [02100, 05004], lr: 0.010000, loss: 1.4453 +2023-09-11 17:20:31 - train: epoch 0054, iter [02200, 05004], lr: 0.010000, loss: 1.3886 +2023-09-11 17:21:08 - train: epoch 0054, iter [02300, 05004], lr: 0.010000, loss: 1.6706 +2023-09-11 17:21:45 - train: epoch 0054, iter [02400, 05004], lr: 0.010000, loss: 1.7259 +2023-09-11 17:22:20 - train: epoch 0054, iter [02500, 05004], lr: 0.010000, loss: 1.6257 +2023-09-11 17:22:57 - train: epoch 0054, iter [02600, 05004], lr: 0.010000, loss: 1.4916 +2023-09-11 17:23:34 - train: epoch 0054, iter [02700, 05004], lr: 0.010000, loss: 1.5892 +2023-09-11 17:24:10 - train: epoch 0054, iter [02800, 05004], lr: 0.010000, loss: 1.6579 +2023-09-11 17:24:47 - train: epoch 0054, iter [02900, 05004], lr: 0.010000, loss: 1.5024 +2023-09-11 17:25:24 - train: epoch 0054, iter [03000, 05004], lr: 0.010000, loss: 1.5346 +2023-09-11 17:26:01 - train: epoch 0054, iter [03100, 05004], lr: 0.010000, loss: 1.6158 +2023-09-11 17:26:37 - train: epoch 0054, iter [03200, 05004], lr: 0.010000, loss: 1.6636 +2023-09-11 17:27:16 - train: epoch 0054, iter [03300, 05004], lr: 0.010000, loss: 1.6323 +2023-09-11 17:27:52 - train: epoch 0054, iter [03400, 05004], lr: 0.010000, loss: 1.3619 +2023-09-11 17:28:29 - train: epoch 0054, iter [03500, 05004], lr: 0.010000, loss: 1.6025 +2023-09-11 17:29:05 - train: epoch 0054, iter [03600, 05004], lr: 0.010000, loss: 1.6756 +2023-09-11 17:29:43 - train: epoch 0054, iter [03700, 05004], lr: 0.010000, loss: 1.4037 +2023-09-11 17:30:18 - train: epoch 0054, iter [03800, 05004], lr: 0.010000, loss: 1.4874 +2023-09-11 17:30:58 - train: epoch 0054, iter [03900, 05004], lr: 0.010000, loss: 1.6079 +2023-09-11 17:31:34 - train: epoch 0054, iter [04000, 05004], lr: 0.010000, loss: 1.5911 +2023-09-11 17:32:09 - train: epoch 0054, iter [04100, 05004], lr: 0.010000, loss: 1.6247 +2023-09-11 17:32:48 - train: epoch 0054, iter [04200, 05004], lr: 0.010000, loss: 1.5639 +2023-09-11 17:33:24 - train: epoch 0054, iter [04300, 05004], lr: 0.010000, loss: 1.5212 +2023-09-11 17:34:00 - train: epoch 0054, iter [04400, 05004], lr: 0.010000, loss: 1.3221 +2023-09-11 17:34:38 - train: epoch 0054, iter [04500, 05004], lr: 0.010000, loss: 1.4961 +2023-09-11 17:35:15 - train: epoch 0054, iter [04600, 05004], lr: 0.010000, loss: 1.8115 +2023-09-11 17:35:51 - train: epoch 0054, iter [04700, 05004], lr: 0.010000, loss: 1.8088 +2023-09-11 17:36:29 - train: epoch 0054, iter [04800, 05004], lr: 0.010000, loss: 1.6425 +2023-09-11 17:37:05 - train: epoch 0054, iter [04900, 05004], lr: 0.010000, loss: 1.5287 +2023-09-11 17:37:40 - train: epoch 0054, iter [05000, 05004], lr: 0.010000, loss: 1.5112 +2023-09-11 17:37:41 - train: epoch 054, train_loss: 1.5521 +2023-09-11 17:39:06 - eval: epoch: 054, acc1: 65.188%, acc5: 86.500%, test_loss: 1.4337, per_image_load_time: 1.369ms, per_image_inference_time: 0.315ms +2023-09-11 17:39:06 - until epoch: 054, best_acc1: 66.222% +2023-09-11 17:39:06 - epoch 055 lr: 0.010000 +2023-09-11 17:39:54 - train: epoch 0055, iter [00100, 05004], lr: 0.010000, loss: 1.3931 +2023-09-11 17:40:32 - train: epoch 0055, iter [00200, 05004], lr: 0.010000, loss: 1.5437 +2023-09-11 17:41:08 - train: epoch 0055, iter [00300, 05004], lr: 0.010000, loss: 1.4753 +2023-09-11 17:41:48 - train: epoch 0055, iter [00400, 05004], lr: 0.010000, loss: 1.3505 +2023-09-11 17:42:23 - train: epoch 0055, iter [00500, 05004], lr: 0.010000, loss: 1.3542 +2023-09-11 17:42:58 - train: epoch 0055, iter [00600, 05004], lr: 0.010000, loss: 1.6572 +2023-09-11 17:43:37 - train: epoch 0055, iter [00700, 05004], lr: 0.010000, loss: 1.4695 +2023-09-11 17:44:12 - train: epoch 0055, iter [00800, 05004], lr: 0.010000, loss: 1.4408 +2023-09-11 17:44:48 - train: epoch 0055, iter [00900, 05004], lr: 0.010000, loss: 1.6193 +2023-09-11 17:45:27 - train: epoch 0055, iter [01000, 05004], lr: 0.010000, loss: 1.7210 +2023-09-11 17:46:03 - train: epoch 0055, iter [01100, 05004], lr: 0.010000, loss: 1.6130 +2023-09-11 17:46:38 - train: epoch 0055, iter [01200, 05004], lr: 0.010000, loss: 1.5929 +2023-09-11 17:47:17 - train: epoch 0055, iter [01300, 05004], lr: 0.010000, loss: 1.6551 +2023-09-11 17:47:53 - train: epoch 0055, iter [01400, 05004], lr: 0.010000, loss: 1.5675 +2023-09-11 17:48:29 - train: epoch 0055, iter [01500, 05004], lr: 0.010000, loss: 1.3537 +2023-09-11 17:49:08 - train: epoch 0055, iter [01600, 05004], lr: 0.010000, loss: 1.6373 +2023-09-11 17:49:43 - train: epoch 0055, iter [01700, 05004], lr: 0.010000, loss: 1.7512 +2023-09-11 17:50:20 - train: epoch 0055, iter [01800, 05004], lr: 0.010000, loss: 1.4304 +2023-09-11 17:50:58 - train: epoch 0055, iter [01900, 05004], lr: 0.010000, loss: 1.6850 +2023-09-11 17:51:33 - train: epoch 0055, iter [02000, 05004], lr: 0.010000, loss: 1.6944 +2023-09-11 17:52:09 - train: epoch 0055, iter [02100, 05004], lr: 0.010000, loss: 1.3987 +2023-09-11 17:52:49 - train: epoch 0055, iter [02200, 05004], lr: 0.010000, loss: 1.7127 +2023-09-11 17:53:25 - train: epoch 0055, iter [02300, 05004], lr: 0.010000, loss: 1.5416 +2023-09-11 17:54:01 - train: epoch 0055, iter [02400, 05004], lr: 0.010000, loss: 1.5140 +2023-09-11 17:54:39 - train: epoch 0055, iter [02500, 05004], lr: 0.010000, loss: 1.6078 +2023-09-11 17:55:15 - train: epoch 0055, iter [02600, 05004], lr: 0.010000, loss: 1.4360 +2023-09-11 17:55:51 - train: epoch 0055, iter [02700, 05004], lr: 0.010000, loss: 1.5803 +2023-09-11 17:56:30 - train: epoch 0055, iter [02800, 05004], lr: 0.010000, loss: 1.6553 +2023-09-11 17:57:05 - train: epoch 0055, iter [02900, 05004], lr: 0.010000, loss: 1.5287 +2023-09-11 17:57:41 - train: epoch 0055, iter [03000, 05004], lr: 0.010000, loss: 1.6763 +2023-09-11 17:58:20 - train: epoch 0055, iter [03100, 05004], lr: 0.010000, loss: 1.6286 +2023-09-11 17:58:56 - train: epoch 0055, iter [03200, 05004], lr: 0.010000, loss: 1.5293 +2023-09-11 17:59:32 - train: epoch 0055, iter [03300, 05004], lr: 0.010000, loss: 1.4935 +2023-09-11 18:00:09 - train: epoch 0055, iter [03400, 05004], lr: 0.010000, loss: 1.4847 +2023-09-11 18:00:45 - train: epoch 0055, iter [03500, 05004], lr: 0.010000, loss: 1.5623 +2023-09-11 18:01:22 - train: epoch 0055, iter [03600, 05004], lr: 0.010000, loss: 1.6034 +2023-09-11 18:02:00 - train: epoch 0055, iter [03700, 05004], lr: 0.010000, loss: 1.3576 +2023-09-11 18:02:36 - train: epoch 0055, iter [03800, 05004], lr: 0.010000, loss: 1.5495 +2023-09-11 18:03:12 - train: epoch 0055, iter [03900, 05004], lr: 0.010000, loss: 1.7496 +2023-09-11 18:03:50 - train: epoch 0055, iter [04000, 05004], lr: 0.010000, loss: 1.4783 +2023-09-11 18:04:25 - train: epoch 0055, iter [04100, 05004], lr: 0.010000, loss: 1.5758 +2023-09-11 18:05:02 - train: epoch 0055, iter [04200, 05004], lr: 0.010000, loss: 1.6223 +2023-09-11 18:05:41 - train: epoch 0055, iter [04300, 05004], lr: 0.010000, loss: 1.6593 +2023-09-11 18:06:17 - train: epoch 0055, iter [04400, 05004], lr: 0.010000, loss: 1.5603 +2023-09-11 18:06:53 - train: epoch 0055, iter [04500, 05004], lr: 0.010000, loss: 1.5942 +2023-09-11 18:07:31 - train: epoch 0055, iter [04600, 05004], lr: 0.010000, loss: 1.6215 +2023-09-11 18:08:08 - train: epoch 0055, iter [04700, 05004], lr: 0.010000, loss: 1.6294 +2023-09-11 18:08:45 - train: epoch 0055, iter [04800, 05004], lr: 0.010000, loss: 1.5303 +2023-09-11 18:09:22 - train: epoch 0055, iter [04900, 05004], lr: 0.010000, loss: 1.5280 +2023-09-11 18:09:56 - train: epoch 0055, iter [05000, 05004], lr: 0.010000, loss: 1.5479 +2023-09-11 18:09:56 - train: epoch 055, train_loss: 1.5500 +2023-09-11 18:11:22 - eval: epoch: 055, acc1: 65.354%, acc5: 86.618%, test_loss: 1.4193, per_image_load_time: 1.418ms, per_image_inference_time: 0.284ms +2023-09-11 18:11:23 - until epoch: 055, best_acc1: 66.222% +2023-09-11 18:11:23 - epoch 056 lr: 0.010000 +2023-09-11 18:12:10 - train: epoch 0056, iter [00100, 05004], lr: 0.010000, loss: 1.5094 +2023-09-11 18:12:47 - train: epoch 0056, iter [00200, 05004], lr: 0.010000, loss: 1.5371 +2023-09-11 18:13:22 - train: epoch 0056, iter [00300, 05004], lr: 0.010000, loss: 1.5350 +2023-09-11 18:14:01 - train: epoch 0056, iter [00400, 05004], lr: 0.010000, loss: 1.4082 +2023-09-11 18:14:37 - train: epoch 0056, iter [00500, 05004], lr: 0.010000, loss: 1.6451 +2023-09-11 18:15:12 - train: epoch 0056, iter [00600, 05004], lr: 0.010000, loss: 1.4810 +2023-09-11 18:15:52 - train: epoch 0056, iter [00700, 05004], lr: 0.010000, loss: 1.5959 +2023-09-11 18:16:28 - train: epoch 0056, iter [00800, 05004], lr: 0.010000, loss: 1.7142 +2023-09-11 18:17:03 - train: epoch 0056, iter [00900, 05004], lr: 0.010000, loss: 1.4298 +2023-09-11 18:17:42 - train: epoch 0056, iter [01000, 05004], lr: 0.010000, loss: 1.5812 +2023-09-11 18:18:18 - train: epoch 0056, iter [01100, 05004], lr: 0.010000, loss: 1.4015 +2023-09-11 18:18:55 - train: epoch 0056, iter [01200, 05004], lr: 0.010000, loss: 1.5590 +2023-09-11 18:19:34 - train: epoch 0056, iter [01300, 05004], lr: 0.010000, loss: 1.7134 +2023-09-11 18:20:09 - train: epoch 0056, iter [01400, 05004], lr: 0.010000, loss: 1.4702 +2023-09-11 18:20:45 - train: epoch 0056, iter [01500, 05004], lr: 0.010000, loss: 1.9976 +2023-09-11 18:21:24 - train: epoch 0056, iter [01600, 05004], lr: 0.010000, loss: 1.6072 +2023-09-11 18:22:01 - train: epoch 0056, iter [01700, 05004], lr: 0.010000, loss: 1.6555 +2023-09-11 18:22:36 - train: epoch 0056, iter [01800, 05004], lr: 0.010000, loss: 1.8281 +2023-09-11 18:23:15 - train: epoch 0056, iter [01900, 05004], lr: 0.010000, loss: 1.5276 +2023-09-11 18:23:51 - train: epoch 0056, iter [02000, 05004], lr: 0.010000, loss: 1.4992 +2023-09-11 18:24:27 - train: epoch 0056, iter [02100, 05004], lr: 0.010000, loss: 1.5910 +2023-09-11 18:25:06 - train: epoch 0056, iter [02200, 05004], lr: 0.010000, loss: 1.7321 +2023-09-11 18:25:43 - train: epoch 0056, iter [02300, 05004], lr: 0.010000, loss: 1.6637 +2023-09-11 18:26:18 - train: epoch 0056, iter [02400, 05004], lr: 0.010000, loss: 1.4340 +2023-09-11 18:26:55 - train: epoch 0056, iter [02500, 05004], lr: 0.010000, loss: 1.7524 +2023-09-11 18:27:34 - train: epoch 0056, iter [02600, 05004], lr: 0.010000, loss: 1.4910 +2023-09-11 18:28:10 - train: epoch 0056, iter [02700, 05004], lr: 0.010000, loss: 1.3746 +2023-09-11 18:28:48 - train: epoch 0056, iter [02800, 05004], lr: 0.010000, loss: 1.5972 +2023-09-11 18:29:27 - train: epoch 0056, iter [02900, 05004], lr: 0.010000, loss: 1.6908 +2023-09-11 18:30:02 - train: epoch 0056, iter [03000, 05004], lr: 0.010000, loss: 1.7832 +2023-09-11 18:30:39 - train: epoch 0056, iter [03100, 05004], lr: 0.010000, loss: 1.4287 +2023-09-11 18:31:18 - train: epoch 0056, iter [03200, 05004], lr: 0.010000, loss: 1.4293 +2023-09-11 18:31:54 - train: epoch 0056, iter [03300, 05004], lr: 0.010000, loss: 1.4974 +2023-09-11 18:32:30 - train: epoch 0056, iter [03400, 05004], lr: 0.010000, loss: 1.3824 +2023-09-11 18:33:08 - train: epoch 0056, iter [03500, 05004], lr: 0.010000, loss: 1.4884 +2023-09-11 18:33:44 - train: epoch 0056, iter [03600, 05004], lr: 0.010000, loss: 1.3626 +2023-09-11 18:34:19 - train: epoch 0056, iter [03700, 05004], lr: 0.010000, loss: 1.4858 +2023-09-11 18:34:59 - train: epoch 0056, iter [03800, 05004], lr: 0.010000, loss: 1.5668 +2023-09-11 18:35:35 - train: epoch 0056, iter [03900, 05004], lr: 0.010000, loss: 1.7733 +2023-09-11 18:36:11 - train: epoch 0056, iter [04000, 05004], lr: 0.010000, loss: 1.5877 +2023-09-11 18:36:50 - train: epoch 0056, iter [04100, 05004], lr: 0.010000, loss: 1.7009 +2023-09-11 18:37:26 - train: epoch 0056, iter [04200, 05004], lr: 0.010000, loss: 1.7711 +2023-09-11 18:38:03 - train: epoch 0056, iter [04300, 05004], lr: 0.010000, loss: 1.4535 +2023-09-11 18:38:42 - train: epoch 0056, iter [04400, 05004], lr: 0.010000, loss: 1.6397 +2023-09-11 18:39:17 - train: epoch 0056, iter [04500, 05004], lr: 0.010000, loss: 1.6342 +2023-09-11 18:39:53 - train: epoch 0056, iter [04600, 05004], lr: 0.010000, loss: 1.5186 +2023-09-11 18:40:32 - train: epoch 0056, iter [04700, 05004], lr: 0.010000, loss: 1.4906 +2023-09-11 18:41:08 - train: epoch 0056, iter [04800, 05004], lr: 0.010000, loss: 1.4903 +2023-09-11 18:41:43 - train: epoch 0056, iter [04900, 05004], lr: 0.010000, loss: 1.6206 +2023-09-11 18:42:18 - train: epoch 0056, iter [05000, 05004], lr: 0.010000, loss: 1.4330 +2023-09-11 18:42:19 - train: epoch 056, train_loss: 1.5492 +2023-09-11 18:43:43 - eval: epoch: 056, acc1: 66.022%, acc5: 86.974%, test_loss: 1.4016, per_image_load_time: 1.350ms, per_image_inference_time: 0.308ms +2023-09-11 18:43:43 - until epoch: 056, best_acc1: 66.222% +2023-09-11 18:43:43 - epoch 057 lr: 0.010000 +2023-09-11 18:44:27 - train: epoch 0057, iter [00100, 05004], lr: 0.010000, loss: 1.4668 +2023-09-11 18:45:03 - train: epoch 0057, iter [00200, 05004], lr: 0.010000, loss: 1.4846 +2023-09-11 18:45:40 - train: epoch 0057, iter [00300, 05004], lr: 0.010000, loss: 1.4312 +2023-09-11 18:46:20 - train: epoch 0057, iter [00400, 05004], lr: 0.010000, loss: 1.4470 +2023-09-11 18:46:55 - train: epoch 0057, iter [00500, 05004], lr: 0.010000, loss: 1.4834 +2023-09-11 18:47:31 - train: epoch 0057, iter [00600, 05004], lr: 0.010000, loss: 1.6534 +2023-09-11 18:48:09 - train: epoch 0057, iter [00700, 05004], lr: 0.010000, loss: 1.2642 +2023-09-11 18:48:45 - train: epoch 0057, iter [00800, 05004], lr: 0.010000, loss: 1.4785 +2023-09-11 18:49:21 - train: epoch 0057, iter [00900, 05004], lr: 0.010000, loss: 1.4836 +2023-09-11 18:50:01 - train: epoch 0057, iter [01000, 05004], lr: 0.010000, loss: 1.4232 +2023-09-11 18:50:36 - train: epoch 0057, iter [01100, 05004], lr: 0.010000, loss: 1.2910 +2023-09-11 18:51:11 - train: epoch 0057, iter [01200, 05004], lr: 0.010000, loss: 1.4780 +2023-09-11 18:51:50 - train: epoch 0057, iter [01300, 05004], lr: 0.010000, loss: 1.6936 +2023-09-11 18:52:25 - train: epoch 0057, iter [01400, 05004], lr: 0.010000, loss: 1.4910 +2023-09-11 18:53:03 - train: epoch 0057, iter [01500, 05004], lr: 0.010000, loss: 1.7606 +2023-09-11 18:53:40 - train: epoch 0057, iter [01600, 05004], lr: 0.010000, loss: 1.6851 +2023-09-11 18:54:17 - train: epoch 0057, iter [01700, 05004], lr: 0.010000, loss: 1.5478 +2023-09-11 18:54:54 - train: epoch 0057, iter [01800, 05004], lr: 0.010000, loss: 1.7438 +2023-09-11 18:55:29 - train: epoch 0057, iter [01900, 05004], lr: 0.010000, loss: 1.5308 +2023-09-11 18:56:07 - train: epoch 0057, iter [02000, 05004], lr: 0.010000, loss: 1.6003 +2023-09-11 18:56:44 - train: epoch 0057, iter [02100, 05004], lr: 0.010000, loss: 1.5424 +2023-09-11 18:57:22 - train: epoch 0057, iter [02200, 05004], lr: 0.010000, loss: 1.6448 +2023-09-11 18:57:58 - train: epoch 0057, iter [02300, 05004], lr: 0.010000, loss: 1.5703 +2023-09-11 18:58:33 - train: epoch 0057, iter [02400, 05004], lr: 0.010000, loss: 1.4493 +2023-09-11 18:59:12 - train: epoch 0057, iter [02500, 05004], lr: 0.010000, loss: 1.7901 +2023-09-11 18:59:49 - train: epoch 0057, iter [02600, 05004], lr: 0.010000, loss: 1.5640 +2023-09-11 19:00:25 - train: epoch 0057, iter [02700, 05004], lr: 0.010000, loss: 1.3302 +2023-09-11 19:01:02 - train: epoch 0057, iter [02800, 05004], lr: 0.010000, loss: 1.5565 +2023-09-11 19:01:38 - train: epoch 0057, iter [02900, 05004], lr: 0.010000, loss: 1.4342 +2023-09-11 19:02:15 - train: epoch 0057, iter [03000, 05004], lr: 0.010000, loss: 1.7239 +2023-09-11 19:02:51 - train: epoch 0057, iter [03100, 05004], lr: 0.010000, loss: 1.5720 +2023-09-11 19:03:28 - train: epoch 0057, iter [03200, 05004], lr: 0.010000, loss: 1.8016 +2023-09-11 19:04:03 - train: epoch 0057, iter [03300, 05004], lr: 0.010000, loss: 1.5430 +2023-09-11 19:04:41 - train: epoch 0057, iter [03400, 05004], lr: 0.010000, loss: 1.6083 +2023-09-11 19:05:17 - train: epoch 0057, iter [03500, 05004], lr: 0.010000, loss: 1.7207 +2023-09-11 19:05:54 - train: epoch 0057, iter [03600, 05004], lr: 0.010000, loss: 1.6542 +2023-09-11 19:06:32 - train: epoch 0057, iter [03700, 05004], lr: 0.010000, loss: 1.5624 +2023-09-11 19:07:08 - train: epoch 0057, iter [03800, 05004], lr: 0.010000, loss: 1.4954 +2023-09-11 19:07:44 - train: epoch 0057, iter [03900, 05004], lr: 0.010000, loss: 1.7666 +2023-09-11 19:08:24 - train: epoch 0057, iter [04000, 05004], lr: 0.010000, loss: 1.5351 +2023-09-11 19:08:59 - train: epoch 0057, iter [04100, 05004], lr: 0.010000, loss: 1.7013 +2023-09-11 19:09:35 - train: epoch 0057, iter [04200, 05004], lr: 0.010000, loss: 1.4428 +2023-09-11 19:10:14 - train: epoch 0057, iter [04300, 05004], lr: 0.010000, loss: 1.3878 +2023-09-11 19:10:48 - train: epoch 0057, iter [04400, 05004], lr: 0.010000, loss: 1.4389 +2023-09-11 19:11:23 - train: epoch 0057, iter [04500, 05004], lr: 0.010000, loss: 1.6176 +2023-09-11 19:11:59 - train: epoch 0057, iter [04600, 05004], lr: 0.010000, loss: 1.5409 +2023-09-11 19:12:36 - train: epoch 0057, iter [04700, 05004], lr: 0.010000, loss: 1.7255 +2023-09-11 19:13:12 - train: epoch 0057, iter [04800, 05004], lr: 0.010000, loss: 1.7089 +2023-09-11 19:13:50 - train: epoch 0057, iter [04900, 05004], lr: 0.010000, loss: 1.7339 +2023-09-11 19:14:22 - train: epoch 0057, iter [05000, 05004], lr: 0.010000, loss: 1.5448 +2023-09-11 19:14:23 - train: epoch 057, train_loss: 1.5496 +2023-09-11 19:15:45 - eval: epoch: 057, acc1: 65.946%, acc5: 86.842%, test_loss: 1.4074, per_image_load_time: 1.323ms, per_image_inference_time: 0.301ms +2023-09-11 19:15:45 - until epoch: 057, best_acc1: 66.222% +2023-09-11 19:15:45 - epoch 058 lr: 0.010000 +2023-09-11 19:16:28 - train: epoch 0058, iter [00100, 05004], lr: 0.010000, loss: 1.5804 +2023-09-11 19:17:03 - train: epoch 0058, iter [00200, 05004], lr: 0.010000, loss: 1.5204 +2023-09-11 19:17:38 - train: epoch 0058, iter [00300, 05004], lr: 0.010000, loss: 1.6236 +2023-09-11 19:18:15 - train: epoch 0058, iter [00400, 05004], lr: 0.010000, loss: 1.5637 +2023-09-11 19:18:51 - train: epoch 0058, iter [00500, 05004], lr: 0.010000, loss: 1.4486 +2023-09-11 19:19:26 - train: epoch 0058, iter [00600, 05004], lr: 0.010000, loss: 1.6807 +2023-09-11 19:20:03 - train: epoch 0058, iter [00700, 05004], lr: 0.010000, loss: 1.5738 +2023-09-11 19:20:39 - train: epoch 0058, iter [00800, 05004], lr: 0.010000, loss: 1.5587 +2023-09-11 19:21:15 - train: epoch 0058, iter [00900, 05004], lr: 0.010000, loss: 1.4211 +2023-09-11 19:21:53 - train: epoch 0058, iter [01000, 05004], lr: 0.010000, loss: 1.2893 +2023-09-11 19:22:28 - train: epoch 0058, iter [01100, 05004], lr: 0.010000, loss: 1.4487 +2023-09-11 19:23:04 - train: epoch 0058, iter [01200, 05004], lr: 0.010000, loss: 1.5073 +2023-09-11 19:23:43 - train: epoch 0058, iter [01300, 05004], lr: 0.010000, loss: 1.7849 +2023-09-11 19:24:18 - train: epoch 0058, iter [01400, 05004], lr: 0.010000, loss: 1.7159 +2023-09-11 19:24:53 - train: epoch 0058, iter [01500, 05004], lr: 0.010000, loss: 1.5172 +2023-09-11 19:25:28 - train: epoch 0058, iter [01600, 05004], lr: 0.010000, loss: 1.7238 +2023-09-11 19:26:05 - train: epoch 0058, iter [01700, 05004], lr: 0.010000, loss: 1.6229 +2023-09-11 19:26:42 - train: epoch 0058, iter [01800, 05004], lr: 0.010000, loss: 1.7028 +2023-09-11 19:27:17 - train: epoch 0058, iter [01900, 05004], lr: 0.010000, loss: 1.7046 +2023-09-11 19:27:53 - train: epoch 0058, iter [02000, 05004], lr: 0.010000, loss: 1.5907 +2023-09-11 19:28:30 - train: epoch 0058, iter [02100, 05004], lr: 0.010000, loss: 1.3771 +2023-09-11 19:29:06 - train: epoch 0058, iter [02200, 05004], lr: 0.010000, loss: 1.2793 +2023-09-11 19:29:40 - train: epoch 0058, iter [02300, 05004], lr: 0.010000, loss: 1.4995 +2023-09-11 19:30:18 - train: epoch 0058, iter [02400, 05004], lr: 0.010000, loss: 1.7580 +2023-09-11 19:30:53 - train: epoch 0058, iter [02500, 05004], lr: 0.010000, loss: 1.5667 +2023-09-11 19:31:29 - train: epoch 0058, iter [02600, 05004], lr: 0.010000, loss: 1.4028 +2023-09-11 19:32:07 - train: epoch 0058, iter [02700, 05004], lr: 0.010000, loss: 1.9525 +2023-09-11 19:32:42 - train: epoch 0058, iter [02800, 05004], lr: 0.010000, loss: 1.5169 +2023-09-11 19:33:18 - train: epoch 0058, iter [02900, 05004], lr: 0.010000, loss: 1.4544 +2023-09-11 19:33:55 - train: epoch 0058, iter [03000, 05004], lr: 0.010000, loss: 1.6324 +2023-09-11 19:34:30 - train: epoch 0058, iter [03100, 05004], lr: 0.010000, loss: 1.4944 +2023-09-11 19:35:06 - train: epoch 0058, iter [03200, 05004], lr: 0.010000, loss: 1.2883 +2023-09-11 19:35:43 - train: epoch 0058, iter [03300, 05004], lr: 0.010000, loss: 1.3896 +2023-09-11 19:36:19 - train: epoch 0058, iter [03400, 05004], lr: 0.010000, loss: 1.5177 +2023-09-11 19:36:54 - train: epoch 0058, iter [03500, 05004], lr: 0.010000, loss: 1.6132 +2023-09-11 19:37:31 - train: epoch 0058, iter [03600, 05004], lr: 0.010000, loss: 1.5983 +2023-09-11 19:38:06 - train: epoch 0058, iter [03700, 05004], lr: 0.010000, loss: 1.4856 +2023-09-11 19:38:43 - train: epoch 0058, iter [03800, 05004], lr: 0.010000, loss: 1.5481 +2023-09-11 19:39:21 - train: epoch 0058, iter [03900, 05004], lr: 0.010000, loss: 1.5084 +2023-09-11 19:39:58 - train: epoch 0058, iter [04000, 05004], lr: 0.010000, loss: 1.5589 +2023-09-11 19:40:35 - train: epoch 0058, iter [04100, 05004], lr: 0.010000, loss: 1.5487 +2023-09-11 19:41:11 - train: epoch 0058, iter [04200, 05004], lr: 0.010000, loss: 1.4760 +2023-09-11 19:41:46 - train: epoch 0058, iter [04300, 05004], lr: 0.010000, loss: 1.6449 +2023-09-11 19:42:24 - train: epoch 0058, iter [04400, 05004], lr: 0.010000, loss: 1.4040 +2023-09-11 19:42:59 - train: epoch 0058, iter [04500, 05004], lr: 0.010000, loss: 1.3914 +2023-09-11 19:43:35 - train: epoch 0058, iter [04600, 05004], lr: 0.010000, loss: 1.5581 +2023-09-11 19:44:14 - train: epoch 0058, iter [04700, 05004], lr: 0.010000, loss: 1.6341 +2023-09-11 19:44:50 - train: epoch 0058, iter [04800, 05004], lr: 0.010000, loss: 1.6929 +2023-09-11 19:45:25 - train: epoch 0058, iter [04900, 05004], lr: 0.010000, loss: 1.4628 +2023-09-11 19:45:59 - train: epoch 0058, iter [05000, 05004], lr: 0.010000, loss: 1.5031 +2023-09-11 19:46:00 - train: epoch 058, train_loss: 1.5444 +2023-09-11 19:47:23 - eval: epoch: 058, acc1: 65.950%, acc5: 87.010%, test_loss: 1.3971, per_image_load_time: 1.335ms, per_image_inference_time: 0.308ms +2023-09-11 19:47:23 - until epoch: 058, best_acc1: 66.222% +2023-09-11 19:47:23 - epoch 059 lr: 0.010000 +2023-09-11 19:48:07 - train: epoch 0059, iter [00100, 05004], lr: 0.010000, loss: 1.6370 +2023-09-11 19:48:44 - train: epoch 0059, iter [00200, 05004], lr: 0.010000, loss: 1.5499 +2023-09-11 19:49:20 - train: epoch 0059, iter [00300, 05004], lr: 0.010000, loss: 1.3117 +2023-09-11 19:49:58 - train: epoch 0059, iter [00400, 05004], lr: 0.010000, loss: 1.6097 +2023-09-11 19:50:34 - train: epoch 0059, iter [00500, 05004], lr: 0.010000, loss: 1.5401 +2023-09-11 19:51:10 - train: epoch 0059, iter [00600, 05004], lr: 0.010000, loss: 1.4024 +2023-09-11 19:51:47 - train: epoch 0059, iter [00700, 05004], lr: 0.010000, loss: 1.4329 +2023-09-11 19:52:25 - train: epoch 0059, iter [00800, 05004], lr: 0.010000, loss: 1.3998 +2023-09-11 19:53:01 - train: epoch 0059, iter [00900, 05004], lr: 0.010000, loss: 1.3655 +2023-09-11 19:53:37 - train: epoch 0059, iter [01000, 05004], lr: 0.010000, loss: 1.5682 +2023-09-11 19:54:15 - train: epoch 0059, iter [01100, 05004], lr: 0.010000, loss: 1.8450 +2023-09-11 19:54:50 - train: epoch 0059, iter [01200, 05004], lr: 0.010000, loss: 1.2591 +2023-09-11 19:55:29 - train: epoch 0059, iter [01300, 05004], lr: 0.010000, loss: 1.7913 +2023-09-11 19:56:06 - train: epoch 0059, iter [01400, 05004], lr: 0.010000, loss: 1.7400 +2023-09-11 19:56:42 - train: epoch 0059, iter [01500, 05004], lr: 0.010000, loss: 1.5743 +2023-09-11 19:57:21 - train: epoch 0059, iter [01600, 05004], lr: 0.010000, loss: 1.3660 +2023-09-11 19:57:56 - train: epoch 0059, iter [01700, 05004], lr: 0.010000, loss: 1.7112 +2023-09-11 19:58:34 - train: epoch 0059, iter [01800, 05004], lr: 0.010000, loss: 1.4929 +2023-09-11 19:59:10 - train: epoch 0059, iter [01900, 05004], lr: 0.010000, loss: 1.2756 +2023-09-11 19:59:46 - train: epoch 0059, iter [02000, 05004], lr: 0.010000, loss: 1.4914 +2023-09-11 20:00:23 - train: epoch 0059, iter [02100, 05004], lr: 0.010000, loss: 1.6973 +2023-09-11 20:00:59 - train: epoch 0059, iter [02200, 05004], lr: 0.010000, loss: 1.5176 +2023-09-11 20:01:37 - train: epoch 0059, iter [02300, 05004], lr: 0.010000, loss: 1.4763 +2023-09-11 20:02:12 - train: epoch 0059, iter [02400, 05004], lr: 0.010000, loss: 1.5353 +2023-09-11 20:02:50 - train: epoch 0059, iter [02500, 05004], lr: 0.010000, loss: 1.5428 +2023-09-11 20:03:25 - train: epoch 0059, iter [02600, 05004], lr: 0.010000, loss: 1.5209 +2023-09-11 20:04:03 - train: epoch 0059, iter [02700, 05004], lr: 0.010000, loss: 1.4935 +2023-09-11 20:04:39 - train: epoch 0059, iter [02800, 05004], lr: 0.010000, loss: 1.6196 +2023-09-11 20:05:15 - train: epoch 0059, iter [02900, 05004], lr: 0.010000, loss: 1.5751 +2023-09-11 20:05:52 - train: epoch 0059, iter [03000, 05004], lr: 0.010000, loss: 1.6278 +2023-09-11 20:06:28 - train: epoch 0059, iter [03100, 05004], lr: 0.010000, loss: 1.4807 +2023-09-11 20:07:04 - train: epoch 0059, iter [03200, 05004], lr: 0.010000, loss: 1.6384 +2023-09-11 20:07:40 - train: epoch 0059, iter [03300, 05004], lr: 0.010000, loss: 1.5375 +2023-09-11 20:08:18 - train: epoch 0059, iter [03400, 05004], lr: 0.010000, loss: 1.8273 +2023-09-11 20:08:54 - train: epoch 0059, iter [03500, 05004], lr: 0.010000, loss: 1.3738 +2023-09-11 20:09:30 - train: epoch 0059, iter [03600, 05004], lr: 0.010000, loss: 1.5607 +2023-09-11 20:10:09 - train: epoch 0059, iter [03700, 05004], lr: 0.010000, loss: 1.7314 +2023-09-11 20:10:45 - train: epoch 0059, iter [03800, 05004], lr: 0.010000, loss: 1.4425 +2023-09-11 20:11:20 - train: epoch 0059, iter [03900, 05004], lr: 0.010000, loss: 1.4474 +2023-09-11 20:11:58 - train: epoch 0059, iter [04000, 05004], lr: 0.010000, loss: 1.7415 +2023-09-11 20:12:34 - train: epoch 0059, iter [04100, 05004], lr: 0.010000, loss: 1.3428 +2023-09-11 20:13:09 - train: epoch 0059, iter [04200, 05004], lr: 0.010000, loss: 1.5975 +2023-09-11 20:13:49 - train: epoch 0059, iter [04300, 05004], lr: 0.010000, loss: 1.5809 +2023-09-11 20:14:24 - train: epoch 0059, iter [04400, 05004], lr: 0.010000, loss: 1.5116 +2023-09-11 20:14:58 - train: epoch 0059, iter [04500, 05004], lr: 0.010000, loss: 1.7272 +2023-09-11 20:15:37 - train: epoch 0059, iter [04600, 05004], lr: 0.010000, loss: 1.5833 +2023-09-11 20:16:13 - train: epoch 0059, iter [04700, 05004], lr: 0.010000, loss: 1.7178 +2023-09-11 20:16:48 - train: epoch 0059, iter [04800, 05004], lr: 0.010000, loss: 1.4657 +2023-09-11 20:17:25 - train: epoch 0059, iter [04900, 05004], lr: 0.010000, loss: 1.6520 +2023-09-11 20:17:57 - train: epoch 0059, iter [05000, 05004], lr: 0.010000, loss: 1.8982 +2023-09-11 20:17:57 - train: epoch 059, train_loss: 1.5470 +2023-09-11 20:19:22 - eval: epoch: 059, acc1: 65.220%, acc5: 86.548%, test_loss: 1.4310, per_image_load_time: 1.387ms, per_image_inference_time: 0.288ms +2023-09-11 20:19:22 - until epoch: 059, best_acc1: 66.222% +2023-09-11 20:19:22 - epoch 060 lr: 0.010000 +2023-09-11 20:20:06 - train: epoch 0060, iter [00100, 05004], lr: 0.010000, loss: 1.5930 +2023-09-11 20:20:44 - train: epoch 0060, iter [00200, 05004], lr: 0.010000, loss: 1.3505 +2023-09-11 20:21:19 - train: epoch 0060, iter [00300, 05004], lr: 0.010000, loss: 1.4918 +2023-09-11 20:21:57 - train: epoch 0060, iter [00400, 05004], lr: 0.010000, loss: 1.5907 +2023-09-11 20:22:33 - train: epoch 0060, iter [00500, 05004], lr: 0.010000, loss: 1.7429 +2023-09-11 20:23:08 - train: epoch 0060, iter [00600, 05004], lr: 0.010000, loss: 1.6018 +2023-09-11 20:23:45 - train: epoch 0060, iter [00700, 05004], lr: 0.010000, loss: 1.4402 +2023-09-11 20:24:20 - train: epoch 0060, iter [00800, 05004], lr: 0.010000, loss: 1.7231 +2023-09-11 20:24:56 - train: epoch 0060, iter [00900, 05004], lr: 0.010000, loss: 1.3650 +2023-09-11 20:25:34 - train: epoch 0060, iter [01000, 05004], lr: 0.010000, loss: 1.2639 +2023-09-11 20:26:09 - train: epoch 0060, iter [01100, 05004], lr: 0.010000, loss: 1.3519 +2023-09-11 20:26:47 - train: epoch 0060, iter [01200, 05004], lr: 0.010000, loss: 1.5648 +2023-09-11 20:27:22 - train: epoch 0060, iter [01300, 05004], lr: 0.010000, loss: 1.5399 +2023-09-11 20:27:56 - train: epoch 0060, iter [01400, 05004], lr: 0.010000, loss: 1.5404 +2023-09-11 20:28:35 - train: epoch 0060, iter [01500, 05004], lr: 0.010000, loss: 1.4600 +2023-09-11 20:29:10 - train: epoch 0060, iter [01600, 05004], lr: 0.010000, loss: 1.4423 +2023-09-11 20:29:45 - train: epoch 0060, iter [01700, 05004], lr: 0.010000, loss: 1.4200 +2023-09-11 20:30:22 - train: epoch 0060, iter [01800, 05004], lr: 0.010000, loss: 1.7180 +2023-09-11 20:30:57 - train: epoch 0060, iter [01900, 05004], lr: 0.010000, loss: 1.6814 +2023-09-11 20:31:32 - train: epoch 0060, iter [02000, 05004], lr: 0.010000, loss: 1.5780 +2023-09-11 20:32:10 - train: epoch 0060, iter [02100, 05004], lr: 0.010000, loss: 1.5117 +2023-09-11 20:32:45 - train: epoch 0060, iter [02200, 05004], lr: 0.010000, loss: 1.6248 +2023-09-11 20:33:19 - train: epoch 0060, iter [02300, 05004], lr: 0.010000, loss: 1.4071 +2023-09-11 20:33:55 - train: epoch 0060, iter [02400, 05004], lr: 0.010000, loss: 1.4403 +2023-09-11 20:34:33 - train: epoch 0060, iter [02500, 05004], lr: 0.010000, loss: 1.4393 +2023-09-11 20:35:08 - train: epoch 0060, iter [02600, 05004], lr: 0.010000, loss: 1.5758 +2023-09-11 20:35:43 - train: epoch 0060, iter [02700, 05004], lr: 0.010000, loss: 1.5107 +2023-09-11 20:36:21 - train: epoch 0060, iter [02800, 05004], lr: 0.010000, loss: 1.6770 +2023-09-11 20:36:56 - train: epoch 0060, iter [02900, 05004], lr: 0.010000, loss: 1.6572 +2023-09-11 20:37:32 - train: epoch 0060, iter [03000, 05004], lr: 0.010000, loss: 1.5221 +2023-09-11 20:38:07 - train: epoch 0060, iter [03100, 05004], lr: 0.010000, loss: 1.5575 +2023-09-11 20:38:45 - train: epoch 0060, iter [03200, 05004], lr: 0.010000, loss: 1.4697 +2023-09-11 20:39:20 - train: epoch 0060, iter [03300, 05004], lr: 0.010000, loss: 1.4427 +2023-09-11 20:39:54 - train: epoch 0060, iter [03400, 05004], lr: 0.010000, loss: 1.6426 +2023-09-11 20:40:33 - train: epoch 0060, iter [03500, 05004], lr: 0.010000, loss: 1.5578 +2023-09-11 20:41:07 - train: epoch 0060, iter [03600, 05004], lr: 0.010000, loss: 1.5395 +2023-09-11 20:41:42 - train: epoch 0060, iter [03700, 05004], lr: 0.010000, loss: 1.8267 +2023-09-11 20:42:18 - train: epoch 0060, iter [03800, 05004], lr: 0.010000, loss: 1.5055 +2023-09-11 20:42:56 - train: epoch 0060, iter [03900, 05004], lr: 0.010000, loss: 1.5882 +2023-09-11 20:43:30 - train: epoch 0060, iter [04000, 05004], lr: 0.010000, loss: 1.4355 +2023-09-11 20:44:05 - train: epoch 0060, iter [04100, 05004], lr: 0.010000, loss: 1.7341 +2023-09-11 20:44:42 - train: epoch 0060, iter [04200, 05004], lr: 0.010000, loss: 1.5677 +2023-09-11 20:45:19 - train: epoch 0060, iter [04300, 05004], lr: 0.010000, loss: 1.5476 +2023-09-11 20:45:53 - train: epoch 0060, iter [04400, 05004], lr: 0.010000, loss: 1.6937 +2023-09-11 20:46:29 - train: epoch 0060, iter [04500, 05004], lr: 0.010000, loss: 1.6473 +2023-09-11 20:47:07 - train: epoch 0060, iter [04600, 05004], lr: 0.010000, loss: 1.4526 +2023-09-11 20:47:42 - train: epoch 0060, iter [04700, 05004], lr: 0.010000, loss: 1.4407 +2023-09-11 20:48:17 - train: epoch 0060, iter [04800, 05004], lr: 0.010000, loss: 1.6289 +2023-09-11 20:48:55 - train: epoch 0060, iter [04900, 05004], lr: 0.010000, loss: 1.6141 +2023-09-11 20:49:27 - train: epoch 0060, iter [05000, 05004], lr: 0.010000, loss: 1.5492 +2023-09-11 20:49:27 - train: epoch 060, train_loss: 1.5419 +2023-09-11 20:50:52 - eval: epoch: 060, acc1: 65.484%, acc5: 86.634%, test_loss: 1.4280, per_image_load_time: 1.370ms, per_image_inference_time: 0.305ms +2023-09-11 20:50:52 - until epoch: 060, best_acc1: 66.222% +2023-09-11 20:50:52 - epoch 061 lr: 0.001000 +2023-09-11 20:51:36 - train: epoch 0061, iter [00100, 05004], lr: 0.001000, loss: 1.3078 +2023-09-11 20:52:12 - train: epoch 0061, iter [00200, 05004], lr: 0.001000, loss: 1.3897 +2023-09-11 20:52:48 - train: epoch 0061, iter [00300, 05004], lr: 0.001000, loss: 1.3743 +2023-09-11 20:53:25 - train: epoch 0061, iter [00400, 05004], lr: 0.001000, loss: 1.5047 +2023-09-11 20:53:59 - train: epoch 0061, iter [00500, 05004], lr: 0.001000, loss: 1.3627 +2023-09-11 20:54:35 - train: epoch 0061, iter [00600, 05004], lr: 0.001000, loss: 1.4108 +2023-09-11 20:55:14 - train: epoch 0061, iter [00700, 05004], lr: 0.001000, loss: 1.4727 +2023-09-11 20:55:49 - train: epoch 0061, iter [00800, 05004], lr: 0.001000, loss: 1.4718 +2023-09-11 20:56:24 - train: epoch 0061, iter [00900, 05004], lr: 0.001000, loss: 1.4612 +2023-09-11 20:57:02 - train: epoch 0061, iter [01000, 05004], lr: 0.001000, loss: 1.3561 +2023-09-11 20:57:39 - train: epoch 0061, iter [01100, 05004], lr: 0.001000, loss: 1.1841 +2023-09-11 20:58:15 - train: epoch 0061, iter [01200, 05004], lr: 0.001000, loss: 1.3214 +2023-09-11 20:58:52 - train: epoch 0061, iter [01300, 05004], lr: 0.001000, loss: 1.2254 +2023-09-11 20:59:28 - train: epoch 0061, iter [01400, 05004], lr: 0.001000, loss: 1.2376 +2023-09-11 21:00:03 - train: epoch 0061, iter [01500, 05004], lr: 0.001000, loss: 1.3175 +2023-09-11 21:00:39 - train: epoch 0061, iter [01600, 05004], lr: 0.001000, loss: 1.3022 +2023-09-11 21:01:15 - train: epoch 0061, iter [01700, 05004], lr: 0.001000, loss: 1.3963 +2023-09-11 21:01:52 - train: epoch 0061, iter [01800, 05004], lr: 0.001000, loss: 1.2287 +2023-09-11 21:02:28 - train: epoch 0061, iter [01900, 05004], lr: 0.001000, loss: 1.4382 +2023-09-11 21:03:04 - train: epoch 0061, iter [02000, 05004], lr: 0.001000, loss: 1.4419 +2023-09-11 21:03:41 - train: epoch 0061, iter [02100, 05004], lr: 0.001000, loss: 1.7091 +2023-09-11 21:04:19 - train: epoch 0061, iter [02200, 05004], lr: 0.001000, loss: 1.2996 +2023-09-11 21:04:57 - train: epoch 0061, iter [02300, 05004], lr: 0.001000, loss: 1.5404 +2023-09-11 21:05:31 - train: epoch 0061, iter [02400, 05004], lr: 0.001000, loss: 1.1601 +2023-09-11 21:06:07 - train: epoch 0061, iter [02500, 05004], lr: 0.001000, loss: 1.3417 +2023-09-11 21:06:46 - train: epoch 0061, iter [02600, 05004], lr: 0.001000, loss: 1.1885 +2023-09-11 21:07:20 - train: epoch 0061, iter [02700, 05004], lr: 0.001000, loss: 1.5473 +2023-09-11 21:07:56 - train: epoch 0061, iter [02800, 05004], lr: 0.001000, loss: 1.3829 +2023-09-11 21:08:34 - train: epoch 0061, iter [02900, 05004], lr: 0.001000, loss: 1.4058 +2023-09-11 21:09:11 - train: epoch 0061, iter [03000, 05004], lr: 0.001000, loss: 1.3638 +2023-09-11 21:09:46 - train: epoch 0061, iter [03100, 05004], lr: 0.001000, loss: 1.3965 +2023-09-11 21:10:23 - train: epoch 0061, iter [03200, 05004], lr: 0.001000, loss: 1.2124 +2023-09-11 21:10:59 - train: epoch 0061, iter [03300, 05004], lr: 0.001000, loss: 1.3675 +2023-09-11 21:11:37 - train: epoch 0061, iter [03400, 05004], lr: 0.001000, loss: 1.1584 +2023-09-11 21:12:12 - train: epoch 0061, iter [03500, 05004], lr: 0.001000, loss: 1.5544 +2023-09-11 21:12:48 - train: epoch 0061, iter [03600, 05004], lr: 0.001000, loss: 1.3277 +2023-09-11 21:13:25 - train: epoch 0061, iter [03700, 05004], lr: 0.001000, loss: 1.2982 +2023-09-11 21:14:00 - train: epoch 0061, iter [03800, 05004], lr: 0.001000, loss: 1.2010 +2023-09-11 21:14:35 - train: epoch 0061, iter [03900, 05004], lr: 0.001000, loss: 1.3774 +2023-09-11 21:15:14 - train: epoch 0061, iter [04000, 05004], lr: 0.001000, loss: 1.3634 +2023-09-11 21:15:50 - train: epoch 0061, iter [04100, 05004], lr: 0.001000, loss: 1.4633 +2023-09-11 21:16:25 - train: epoch 0061, iter [04200, 05004], lr: 0.001000, loss: 1.4070 +2023-09-11 21:17:02 - train: epoch 0061, iter [04300, 05004], lr: 0.001000, loss: 1.3166 +2023-09-11 21:17:38 - train: epoch 0061, iter [04400, 05004], lr: 0.001000, loss: 1.3626 +2023-09-11 21:18:14 - train: epoch 0061, iter [04500, 05004], lr: 0.001000, loss: 1.4811 +2023-09-11 21:18:52 - train: epoch 0061, iter [04600, 05004], lr: 0.001000, loss: 1.3445 +2023-09-11 21:19:27 - train: epoch 0061, iter [04700, 05004], lr: 0.001000, loss: 1.3701 +2023-09-11 21:20:02 - train: epoch 0061, iter [04800, 05004], lr: 0.001000, loss: 1.4138 +2023-09-11 21:20:40 - train: epoch 0061, iter [04900, 05004], lr: 0.001000, loss: 1.3335 +2023-09-11 21:21:13 - train: epoch 0061, iter [05000, 05004], lr: 0.001000, loss: 1.4268 +2023-09-11 21:21:14 - train: epoch 061, train_loss: 1.3712 +2023-09-11 21:22:36 - eval: epoch: 061, acc1: 69.016%, acc5: 88.706%, test_loss: 1.2628, per_image_load_time: 1.329ms, per_image_inference_time: 0.301ms +2023-09-11 21:22:37 - until epoch: 061, best_acc1: 69.016% +2023-09-11 21:22:37 - epoch 062 lr: 0.001000 +2023-09-11 21:23:20 - train: epoch 0062, iter [00100, 05004], lr: 0.001000, loss: 1.4898 +2023-09-11 21:23:57 - train: epoch 0062, iter [00200, 05004], lr: 0.001000, loss: 1.4777 +2023-09-11 21:24:33 - train: epoch 0062, iter [00300, 05004], lr: 0.001000, loss: 1.4095 +2023-09-11 21:25:12 - train: epoch 0062, iter [00400, 05004], lr: 0.001000, loss: 1.1560 +2023-09-11 21:25:48 - train: epoch 0062, iter [00500, 05004], lr: 0.001000, loss: 1.2436 +2023-09-11 21:26:22 - train: epoch 0062, iter [00600, 05004], lr: 0.001000, loss: 1.2645 +2023-09-11 21:27:00 - train: epoch 0062, iter [00700, 05004], lr: 0.001000, loss: 1.3770 +2023-09-11 21:27:36 - train: epoch 0062, iter [00800, 05004], lr: 0.001000, loss: 1.3417 +2023-09-11 21:28:10 - train: epoch 0062, iter [00900, 05004], lr: 0.001000, loss: 1.3750 +2023-09-11 21:28:48 - train: epoch 0062, iter [01000, 05004], lr: 0.001000, loss: 1.4988 +2023-09-11 21:29:25 - train: epoch 0062, iter [01100, 05004], lr: 0.001000, loss: 1.1887 +2023-09-11 21:30:01 - train: epoch 0062, iter [01200, 05004], lr: 0.001000, loss: 1.5229 +2023-09-11 21:30:37 - train: epoch 0062, iter [01300, 05004], lr: 0.001000, loss: 1.3180 +2023-09-11 21:31:14 - train: epoch 0062, iter [01400, 05004], lr: 0.001000, loss: 1.2806 +2023-09-11 21:31:50 - train: epoch 0062, iter [01500, 05004], lr: 0.001000, loss: 1.3267 +2023-09-11 21:32:28 - train: epoch 0062, iter [01600, 05004], lr: 0.001000, loss: 1.5272 +2023-09-11 21:33:01 - train: epoch 0062, iter [01700, 05004], lr: 0.001000, loss: 1.3857 +2023-09-11 21:33:37 - train: epoch 0062, iter [01800, 05004], lr: 0.001000, loss: 1.2672 +2023-09-11 21:34:15 - train: epoch 0062, iter [01900, 05004], lr: 0.001000, loss: 1.2234 +2023-09-11 21:34:51 - train: epoch 0062, iter [02000, 05004], lr: 0.001000, loss: 1.3084 +2023-09-11 21:35:25 - train: epoch 0062, iter [02100, 05004], lr: 0.001000, loss: 1.5351 +2023-09-11 21:36:05 - train: epoch 0062, iter [02200, 05004], lr: 0.001000, loss: 1.2295 +2023-09-11 21:36:39 - train: epoch 0062, iter [02300, 05004], lr: 0.001000, loss: 1.2580 +2023-09-11 21:37:14 - train: epoch 0062, iter [02400, 05004], lr: 0.001000, loss: 1.2150 +2023-09-11 21:37:53 - train: epoch 0062, iter [02500, 05004], lr: 0.001000, loss: 1.4471 +2023-09-11 21:38:27 - train: epoch 0062, iter [02600, 05004], lr: 0.001000, loss: 1.1862 +2023-09-11 21:39:02 - train: epoch 0062, iter [02700, 05004], lr: 0.001000, loss: 1.3758 +2023-09-11 21:39:40 - train: epoch 0062, iter [02800, 05004], lr: 0.001000, loss: 1.4188 +2023-09-11 21:40:14 - train: epoch 0062, iter [02900, 05004], lr: 0.001000, loss: 1.2689 +2023-09-11 21:40:49 - train: epoch 0062, iter [03000, 05004], lr: 0.001000, loss: 1.5511 +2023-09-11 21:41:28 - train: epoch 0062, iter [03100, 05004], lr: 0.001000, loss: 1.2591 +2023-09-11 21:42:03 - train: epoch 0062, iter [03200, 05004], lr: 0.001000, loss: 1.3553 +2023-09-11 21:42:39 - train: epoch 0062, iter [03300, 05004], lr: 0.001000, loss: 1.2044 +2023-09-11 21:43:16 - train: epoch 0062, iter [03400, 05004], lr: 0.001000, loss: 1.2642 +2023-09-11 21:43:50 - train: epoch 0062, iter [03500, 05004], lr: 0.001000, loss: 1.4866 +2023-09-11 21:44:26 - train: epoch 0062, iter [03600, 05004], lr: 0.001000, loss: 1.2962 +2023-09-11 21:45:04 - train: epoch 0062, iter [03700, 05004], lr: 0.001000, loss: 1.4515 +2023-09-11 21:45:39 - train: epoch 0062, iter [03800, 05004], lr: 0.001000, loss: 1.2778 +2023-09-11 21:46:15 - train: epoch 0062, iter [03900, 05004], lr: 0.001000, loss: 1.4548 +2023-09-11 21:46:53 - train: epoch 0062, iter [04000, 05004], lr: 0.001000, loss: 1.1350 +2023-09-11 21:47:29 - train: epoch 0062, iter [04100, 05004], lr: 0.001000, loss: 1.6616 +2023-09-11 21:48:04 - train: epoch 0062, iter [04200, 05004], lr: 0.001000, loss: 1.2344 +2023-09-11 21:48:42 - train: epoch 0062, iter [04300, 05004], lr: 0.001000, loss: 1.2267 +2023-09-11 21:49:18 - train: epoch 0062, iter [04400, 05004], lr: 0.001000, loss: 1.1964 +2023-09-11 21:49:54 - train: epoch 0062, iter [04500, 05004], lr: 0.001000, loss: 1.1489 +2023-09-11 21:50:32 - train: epoch 0062, iter [04600, 05004], lr: 0.001000, loss: 1.2605 +2023-09-11 21:51:08 - train: epoch 0062, iter [04700, 05004], lr: 0.001000, loss: 1.3190 +2023-09-11 21:51:43 - train: epoch 0062, iter [04800, 05004], lr: 0.001000, loss: 1.2110 +2023-09-11 21:52:21 - train: epoch 0062, iter [04900, 05004], lr: 0.001000, loss: 1.2142 +2023-09-11 21:52:53 - train: epoch 0062, iter [05000, 05004], lr: 0.001000, loss: 1.3067 +2023-09-11 21:52:54 - train: epoch 062, train_loss: 1.3236 +2023-09-11 21:54:19 - eval: epoch: 062, acc1: 69.600%, acc5: 88.944%, test_loss: 1.2450, per_image_load_time: 1.366ms, per_image_inference_time: 0.308ms +2023-09-11 21:54:19 - until epoch: 062, best_acc1: 69.600% +2023-09-11 21:54:19 - epoch 063 lr: 0.001000 +2023-09-11 21:55:02 - train: epoch 0063, iter [00100, 05004], lr: 0.001000, loss: 1.2293 +2023-09-11 21:55:38 - train: epoch 0063, iter [00200, 05004], lr: 0.001000, loss: 1.1798 +2023-09-11 21:56:16 - train: epoch 0063, iter [00300, 05004], lr: 0.001000, loss: 1.2775 +2023-09-11 21:56:52 - train: epoch 0063, iter [00400, 05004], lr: 0.001000, loss: 1.3352 +2023-09-11 21:57:27 - train: epoch 0063, iter [00500, 05004], lr: 0.001000, loss: 1.1698 +2023-09-11 21:58:03 - train: epoch 0063, iter [00600, 05004], lr: 0.001000, loss: 1.3257 +2023-09-11 21:58:42 - train: epoch 0063, iter [00700, 05004], lr: 0.001000, loss: 1.2210 +2023-09-11 21:59:17 - train: epoch 0063, iter [00800, 05004], lr: 0.001000, loss: 1.2701 +2023-09-11 21:59:52 - train: epoch 0063, iter [00900, 05004], lr: 0.001000, loss: 1.3182 +2023-09-11 22:00:31 - train: epoch 0063, iter [01000, 05004], lr: 0.001000, loss: 1.3986 +2023-09-11 22:01:06 - train: epoch 0063, iter [01100, 05004], lr: 0.001000, loss: 1.3069 +2023-09-11 22:01:41 - train: epoch 0063, iter [01200, 05004], lr: 0.001000, loss: 1.4642 +2023-09-11 22:02:20 - train: epoch 0063, iter [01300, 05004], lr: 0.001000, loss: 1.0807 +2023-09-11 22:02:55 - train: epoch 0063, iter [01400, 05004], lr: 0.001000, loss: 1.6694 +2023-09-11 22:03:32 - train: epoch 0063, iter [01500, 05004], lr: 0.001000, loss: 1.3048 +2023-09-11 22:04:09 - train: epoch 0063, iter [01600, 05004], lr: 0.001000, loss: 1.3878 +2023-09-11 22:04:44 - train: epoch 0063, iter [01700, 05004], lr: 0.001000, loss: 1.0668 +2023-09-11 22:05:23 - train: epoch 0063, iter [01800, 05004], lr: 0.001000, loss: 1.2454 +2023-09-11 22:05:58 - train: epoch 0063, iter [01900, 05004], lr: 0.001000, loss: 1.5570 +2023-09-11 22:06:34 - train: epoch 0063, iter [02000, 05004], lr: 0.001000, loss: 1.1808 +2023-09-11 22:07:10 - train: epoch 0063, iter [02100, 05004], lr: 0.001000, loss: 1.1958 +2023-09-11 22:07:48 - train: epoch 0063, iter [02200, 05004], lr: 0.001000, loss: 1.4175 +2023-09-11 22:08:23 - train: epoch 0063, iter [02300, 05004], lr: 0.001000, loss: 1.3640 +2023-09-11 22:08:59 - train: epoch 0063, iter [02400, 05004], lr: 0.001000, loss: 1.3567 +2023-09-11 22:09:37 - train: epoch 0063, iter [02500, 05004], lr: 0.001000, loss: 1.2117 +2023-09-11 22:10:13 - train: epoch 0063, iter [02600, 05004], lr: 0.001000, loss: 1.2160 +2023-09-11 22:10:49 - train: epoch 0063, iter [02700, 05004], lr: 0.001000, loss: 1.4430 +2023-09-11 22:11:26 - train: epoch 0063, iter [02800, 05004], lr: 0.001000, loss: 1.2622 +2023-09-11 22:12:03 - train: epoch 0063, iter [02900, 05004], lr: 0.001000, loss: 1.4086 +2023-09-11 22:12:39 - train: epoch 0063, iter [03000, 05004], lr: 0.001000, loss: 1.3223 +2023-09-11 22:13:17 - train: epoch 0063, iter [03100, 05004], lr: 0.001000, loss: 1.5625 +2023-09-11 22:13:52 - train: epoch 0063, iter [03200, 05004], lr: 0.001000, loss: 1.4112 +2023-09-11 22:14:29 - train: epoch 0063, iter [03300, 05004], lr: 0.001000, loss: 1.3683 +2023-09-11 22:15:07 - train: epoch 0063, iter [03400, 05004], lr: 0.001000, loss: 1.2325 +2023-09-11 22:15:43 - train: epoch 0063, iter [03500, 05004], lr: 0.001000, loss: 1.5029 +2023-09-11 22:16:18 - train: epoch 0063, iter [03600, 05004], lr: 0.001000, loss: 1.2718 +2023-09-11 22:16:55 - train: epoch 0063, iter [03700, 05004], lr: 0.001000, loss: 1.2784 +2023-09-11 22:17:31 - train: epoch 0063, iter [03800, 05004], lr: 0.001000, loss: 1.3746 +2023-09-11 22:18:07 - train: epoch 0063, iter [03900, 05004], lr: 0.001000, loss: 1.1317 +2023-09-11 22:18:46 - train: epoch 0063, iter [04000, 05004], lr: 0.001000, loss: 1.3277 +2023-09-11 22:19:20 - train: epoch 0063, iter [04100, 05004], lr: 0.001000, loss: 1.4168 +2023-09-11 22:19:56 - train: epoch 0063, iter [04200, 05004], lr: 0.001000, loss: 1.3787 +2023-09-11 22:20:35 - train: epoch 0063, iter [04300, 05004], lr: 0.001000, loss: 1.1695 +2023-09-11 22:21:10 - train: epoch 0063, iter [04400, 05004], lr: 0.001000, loss: 1.2693 +2023-09-11 22:21:46 - train: epoch 0063, iter [04500, 05004], lr: 0.001000, loss: 1.3781 +2023-09-11 22:22:25 - train: epoch 0063, iter [04600, 05004], lr: 0.001000, loss: 1.3703 +2023-09-11 22:23:00 - train: epoch 0063, iter [04700, 05004], lr: 0.001000, loss: 1.1784 +2023-09-11 22:23:37 - train: epoch 0063, iter [04800, 05004], lr: 0.001000, loss: 1.2447 +2023-09-11 22:24:15 - train: epoch 0063, iter [04900, 05004], lr: 0.001000, loss: 1.2715 +2023-09-11 22:24:49 - train: epoch 0063, iter [05000, 05004], lr: 0.001000, loss: 1.3641 +2023-09-11 22:24:50 - train: epoch 063, train_loss: 1.3048 +2023-09-11 22:26:14 - eval: epoch: 063, acc1: 69.538%, acc5: 89.040%, test_loss: 1.2429, per_image_load_time: 1.354ms, per_image_inference_time: 0.308ms +2023-09-11 22:26:14 - until epoch: 063, best_acc1: 69.600% +2023-09-11 22:26:14 - epoch 064 lr: 0.001000 +2023-09-11 22:26:59 - train: epoch 0064, iter [00100, 05004], lr: 0.001000, loss: 1.2306 +2023-09-11 22:27:37 - train: epoch 0064, iter [00200, 05004], lr: 0.001000, loss: 1.2587 +2023-09-11 22:28:12 - train: epoch 0064, iter [00300, 05004], lr: 0.001000, loss: 1.1916 +2023-09-11 22:28:48 - train: epoch 0064, iter [00400, 05004], lr: 0.001000, loss: 1.2204 +2023-09-11 22:29:27 - train: epoch 0064, iter [00500, 05004], lr: 0.001000, loss: 1.2350 +2023-09-11 22:30:03 - train: epoch 0064, iter [00600, 05004], lr: 0.001000, loss: 1.2491 +2023-09-11 22:30:38 - train: epoch 0064, iter [00700, 05004], lr: 0.001000, loss: 1.4662 +2023-09-11 22:31:16 - train: epoch 0064, iter [00800, 05004], lr: 0.001000, loss: 1.1379 +2023-09-11 22:31:53 - train: epoch 0064, iter [00900, 05004], lr: 0.001000, loss: 1.0902 +2023-09-11 22:32:29 - train: epoch 0064, iter [01000, 05004], lr: 0.001000, loss: 1.2029 +2023-09-11 22:33:05 - train: epoch 0064, iter [01100, 05004], lr: 0.001000, loss: 1.2258 +2023-09-11 22:33:42 - train: epoch 0064, iter [01200, 05004], lr: 0.001000, loss: 1.2122 +2023-09-11 22:34:19 - train: epoch 0064, iter [01300, 05004], lr: 0.001000, loss: 1.3468 +2023-09-11 22:34:56 - train: epoch 0064, iter [01400, 05004], lr: 0.001000, loss: 1.4965 +2023-09-11 22:35:32 - train: epoch 0064, iter [01500, 05004], lr: 0.001000, loss: 1.4552 +2023-09-11 22:36:11 - train: epoch 0064, iter [01600, 05004], lr: 0.001000, loss: 1.2917 +2023-09-11 22:36:47 - train: epoch 0064, iter [01700, 05004], lr: 0.001000, loss: 1.1648 +2023-09-11 22:37:23 - train: epoch 0064, iter [01800, 05004], lr: 0.001000, loss: 1.0195 +2023-09-11 22:38:02 - train: epoch 0064, iter [01900, 05004], lr: 0.001000, loss: 1.5059 +2023-09-11 22:38:36 - train: epoch 0064, iter [02000, 05004], lr: 0.001000, loss: 1.2261 +2023-09-11 22:39:14 - train: epoch 0064, iter [02100, 05004], lr: 0.001000, loss: 1.2652 +2023-09-11 22:39:51 - train: epoch 0064, iter [02200, 05004], lr: 0.001000, loss: 1.3505 +2023-09-11 22:40:27 - train: epoch 0064, iter [02300, 05004], lr: 0.001000, loss: 1.3297 +2023-09-11 22:41:03 - train: epoch 0064, iter [02400, 05004], lr: 0.001000, loss: 1.2484 +2023-09-11 22:41:38 - train: epoch 0064, iter [02500, 05004], lr: 0.001000, loss: 1.2884 +2023-09-11 22:42:15 - train: epoch 0064, iter [02600, 05004], lr: 0.001000, loss: 1.1680 +2023-09-11 22:42:51 - train: epoch 0064, iter [02700, 05004], lr: 0.001000, loss: 1.2772 +2023-09-11 22:43:28 - train: epoch 0064, iter [02800, 05004], lr: 0.001000, loss: 1.4284 +2023-09-11 22:44:03 - train: epoch 0064, iter [02900, 05004], lr: 0.001000, loss: 1.3791 +2023-09-11 22:44:42 - train: epoch 0064, iter [03000, 05004], lr: 0.001000, loss: 1.2076 +2023-09-11 22:45:16 - train: epoch 0064, iter [03100, 05004], lr: 0.001000, loss: 1.3176 +2023-09-11 22:45:53 - train: epoch 0064, iter [03200, 05004], lr: 0.001000, loss: 1.1506 +2023-09-11 22:46:30 - train: epoch 0064, iter [03300, 05004], lr: 0.001000, loss: 1.2309 +2023-09-11 22:47:07 - train: epoch 0064, iter [03400, 05004], lr: 0.001000, loss: 1.1975 +2023-09-11 22:47:42 - train: epoch 0064, iter [03500, 05004], lr: 0.001000, loss: 1.2323 +2023-09-11 22:48:20 - train: epoch 0064, iter [03600, 05004], lr: 0.001000, loss: 1.3873 +2023-09-11 22:48:58 - train: epoch 0064, iter [03700, 05004], lr: 0.001000, loss: 0.9862 +2023-09-11 22:49:34 - train: epoch 0064, iter [03800, 05004], lr: 0.001000, loss: 1.3065 +2023-09-11 22:50:09 - train: epoch 0064, iter [03900, 05004], lr: 0.001000, loss: 1.3059 +2023-09-11 22:50:47 - train: epoch 0064, iter [04000, 05004], lr: 0.001000, loss: 1.2983 +2023-09-11 22:51:22 - train: epoch 0064, iter [04100, 05004], lr: 0.001000, loss: 1.1834 +2023-09-11 22:51:58 - train: epoch 0064, iter [04200, 05004], lr: 0.001000, loss: 1.1738 +2023-09-11 22:52:36 - train: epoch 0064, iter [04300, 05004], lr: 0.001000, loss: 1.3991 +2023-09-11 22:53:11 - train: epoch 0064, iter [04400, 05004], lr: 0.001000, loss: 1.1906 +2023-09-11 22:53:49 - train: epoch 0064, iter [04500, 05004], lr: 0.001000, loss: 1.2047 +2023-09-11 22:54:26 - train: epoch 0064, iter [04600, 05004], lr: 0.001000, loss: 1.4573 +2023-09-11 22:55:01 - train: epoch 0064, iter [04700, 05004], lr: 0.001000, loss: 1.6719 +2023-09-11 22:55:37 - train: epoch 0064, iter [04800, 05004], lr: 0.001000, loss: 1.3662 +2023-09-11 22:56:14 - train: epoch 0064, iter [04900, 05004], lr: 0.001000, loss: 1.4645 +2023-09-11 22:56:49 - train: epoch 0064, iter [05000, 05004], lr: 0.001000, loss: 0.9781 +2023-09-11 22:56:49 - train: epoch 064, train_loss: 1.2931 +2023-09-11 22:58:13 - eval: epoch: 064, acc1: 69.836%, acc5: 89.156%, test_loss: 1.2339, per_image_load_time: 1.355ms, per_image_inference_time: 0.306ms +2023-09-11 22:58:13 - until epoch: 064, best_acc1: 69.836% +2023-09-11 22:58:13 - epoch 065 lr: 0.001000 +2023-09-11 22:58:58 - train: epoch 0065, iter [00100, 05004], lr: 0.001000, loss: 1.1693 +2023-09-11 22:59:34 - train: epoch 0065, iter [00200, 05004], lr: 0.001000, loss: 1.3783 +2023-09-11 23:00:10 - train: epoch 0065, iter [00300, 05004], lr: 0.001000, loss: 1.1542 +2023-09-11 23:00:48 - train: epoch 0065, iter [00400, 05004], lr: 0.001000, loss: 1.3232 +2023-09-11 23:01:24 - train: epoch 0065, iter [00500, 05004], lr: 0.001000, loss: 1.2880 +2023-09-11 23:02:00 - train: epoch 0065, iter [00600, 05004], lr: 0.001000, loss: 1.4647 +2023-09-11 23:02:38 - train: epoch 0065, iter [00700, 05004], lr: 0.001000, loss: 1.4804 +2023-09-11 23:03:15 - train: epoch 0065, iter [00800, 05004], lr: 0.001000, loss: 1.1773 +2023-09-11 23:03:51 - train: epoch 0065, iter [00900, 05004], lr: 0.001000, loss: 1.3180 +2023-09-11 23:04:29 - train: epoch 0065, iter [01000, 05004], lr: 0.001000, loss: 1.2675 +2023-09-11 23:05:05 - train: epoch 0065, iter [01100, 05004], lr: 0.001000, loss: 1.1549 +2023-09-11 23:05:40 - train: epoch 0065, iter [01200, 05004], lr: 0.001000, loss: 1.3567 +2023-09-11 23:06:19 - train: epoch 0065, iter [01300, 05004], lr: 0.001000, loss: 1.3956 +2023-09-11 23:06:53 - train: epoch 0065, iter [01400, 05004], lr: 0.001000, loss: 1.2057 +2023-09-11 23:07:29 - train: epoch 0065, iter [01500, 05004], lr: 0.001000, loss: 1.2904 +2023-09-11 23:08:07 - train: epoch 0065, iter [01600, 05004], lr: 0.001000, loss: 1.2160 +2023-09-11 23:08:44 - train: epoch 0065, iter [01700, 05004], lr: 0.001000, loss: 1.3564 +2023-09-11 23:09:22 - train: epoch 0065, iter [01800, 05004], lr: 0.001000, loss: 1.1596 +2023-09-11 23:09:58 - train: epoch 0065, iter [01900, 05004], lr: 0.001000, loss: 1.1272 +2023-09-11 23:10:35 - train: epoch 0065, iter [02000, 05004], lr: 0.001000, loss: 1.2282 +2023-09-11 23:11:11 - train: epoch 0065, iter [02100, 05004], lr: 0.001000, loss: 1.1620 +2023-09-11 23:11:49 - train: epoch 0065, iter [02200, 05004], lr: 0.001000, loss: 1.2477 +2023-09-11 23:12:25 - train: epoch 0065, iter [02300, 05004], lr: 0.001000, loss: 1.3645 +2023-09-11 23:13:05 - train: epoch 0065, iter [02400, 05004], lr: 0.001000, loss: 1.2708 +2023-09-11 23:13:41 - train: epoch 0065, iter [02500, 05004], lr: 0.001000, loss: 1.2359 +2023-09-11 23:14:17 - train: epoch 0065, iter [02600, 05004], lr: 0.001000, loss: 1.4234 +2023-09-11 23:14:58 - train: epoch 0065, iter [02700, 05004], lr: 0.001000, loss: 1.2547 +2023-09-11 23:15:33 - train: epoch 0065, iter [02800, 05004], lr: 0.001000, loss: 1.3589 +2023-09-11 23:16:10 - train: epoch 0065, iter [02900, 05004], lr: 0.001000, loss: 1.2872 +2023-09-11 23:16:45 - train: epoch 0065, iter [03000, 05004], lr: 0.001000, loss: 1.2652 +2023-09-11 23:17:24 - train: epoch 0065, iter [03100, 05004], lr: 0.001000, loss: 1.1771 +2023-09-11 23:18:01 - train: epoch 0065, iter [03200, 05004], lr: 0.001000, loss: 1.4360 +2023-09-11 23:18:37 - train: epoch 0065, iter [03300, 05004], lr: 0.001000, loss: 1.2247 +2023-09-11 23:19:15 - train: epoch 0065, iter [03400, 05004], lr: 0.001000, loss: 1.1752 +2023-09-11 23:19:52 - train: epoch 0065, iter [03500, 05004], lr: 0.001000, loss: 1.4981 +2023-09-11 23:20:28 - train: epoch 0065, iter [03600, 05004], lr: 0.001000, loss: 1.3164 +2023-09-11 23:21:06 - train: epoch 0065, iter [03700, 05004], lr: 0.001000, loss: 1.2142 +2023-09-11 23:21:41 - train: epoch 0065, iter [03800, 05004], lr: 0.001000, loss: 1.5006 +2023-09-11 23:22:21 - train: epoch 0065, iter [03900, 05004], lr: 0.001000, loss: 1.4423 +2023-09-11 23:22:56 - train: epoch 0065, iter [04000, 05004], lr: 0.001000, loss: 1.5095 +2023-09-11 23:23:32 - train: epoch 0065, iter [04100, 05004], lr: 0.001000, loss: 1.3397 +2023-09-11 23:24:11 - train: epoch 0065, iter [04200, 05004], lr: 0.001000, loss: 1.3260 +2023-09-11 23:24:46 - train: epoch 0065, iter [04300, 05004], lr: 0.001000, loss: 1.3117 +2023-09-11 23:25:22 - train: epoch 0065, iter [04400, 05004], lr: 0.001000, loss: 1.2532 +2023-09-11 23:26:00 - train: epoch 0065, iter [04500, 05004], lr: 0.001000, loss: 1.2928 +2023-09-11 23:26:36 - train: epoch 0065, iter [04600, 05004], lr: 0.001000, loss: 1.3931 +2023-09-11 23:27:13 - train: epoch 0065, iter [04700, 05004], lr: 0.001000, loss: 1.3526 +2023-09-11 23:27:51 - train: epoch 0065, iter [04800, 05004], lr: 0.001000, loss: 1.2342 +2023-09-11 23:28:27 - train: epoch 0065, iter [04900, 05004], lr: 0.001000, loss: 1.4158 +2023-09-11 23:29:00 - train: epoch 0065, iter [05000, 05004], lr: 0.001000, loss: 1.5687 +2023-09-11 23:29:01 - train: epoch 065, train_loss: 1.2834 +2023-09-11 23:30:26 - eval: epoch: 065, acc1: 69.872%, acc5: 89.126%, test_loss: 1.2333, per_image_load_time: 1.362ms, per_image_inference_time: 0.319ms +2023-09-11 23:30:26 - until epoch: 065, best_acc1: 69.872% +2023-09-11 23:30:26 - epoch 066 lr: 0.001000 +2023-09-11 23:31:12 - train: epoch 0066, iter [00100, 05004], lr: 0.001000, loss: 1.1765 +2023-09-11 23:31:48 - train: epoch 0066, iter [00200, 05004], lr: 0.001000, loss: 1.4486 +2023-09-11 23:32:23 - train: epoch 0066, iter [00300, 05004], lr: 0.001000, loss: 1.2661 +2023-09-11 23:33:00 - train: epoch 0066, iter [00400, 05004], lr: 0.001000, loss: 1.1668 +2023-09-11 23:33:37 - train: epoch 0066, iter [00500, 05004], lr: 0.001000, loss: 1.3120 +2023-09-11 23:34:16 - train: epoch 0066, iter [00600, 05004], lr: 0.001000, loss: 1.1196 +2023-09-11 23:34:52 - train: epoch 0066, iter [00700, 05004], lr: 0.001000, loss: 1.3979 +2023-09-11 23:35:26 - train: epoch 0066, iter [00800, 05004], lr: 0.001000, loss: 1.5702 +2023-09-11 23:36:05 - train: epoch 0066, iter [00900, 05004], lr: 0.001000, loss: 1.1103 +2023-09-11 23:36:42 - train: epoch 0066, iter [01000, 05004], lr: 0.001000, loss: 1.0184 +2023-09-11 23:37:17 - train: epoch 0066, iter [01100, 05004], lr: 0.001000, loss: 1.3817 +2023-09-11 23:37:54 - train: epoch 0066, iter [01200, 05004], lr: 0.001000, loss: 1.4714 +2023-09-11 23:38:32 - train: epoch 0066, iter [01300, 05004], lr: 0.001000, loss: 1.2766 +2023-09-11 23:39:08 - train: epoch 0066, iter [01400, 05004], lr: 0.001000, loss: 1.1585 +2023-09-11 23:39:44 - train: epoch 0066, iter [01500, 05004], lr: 0.001000, loss: 1.3236 +2023-09-11 23:40:23 - train: epoch 0066, iter [01600, 05004], lr: 0.001000, loss: 1.2215 +2023-09-11 23:40:59 - train: epoch 0066, iter [01700, 05004], lr: 0.001000, loss: 1.3704 +2023-09-11 23:41:34 - train: epoch 0066, iter [01800, 05004], lr: 0.001000, loss: 1.1977 +2023-09-11 23:42:13 - train: epoch 0066, iter [01900, 05004], lr: 0.001000, loss: 1.4346 +2023-09-11 23:42:48 - train: epoch 0066, iter [02000, 05004], lr: 0.001000, loss: 1.0916 +2023-09-11 23:43:26 - train: epoch 0066, iter [02100, 05004], lr: 0.001000, loss: 1.2452 +2023-09-11 23:44:04 - train: epoch 0066, iter [02200, 05004], lr: 0.001000, loss: 1.2994 +2023-09-11 23:44:40 - train: epoch 0066, iter [02300, 05004], lr: 0.001000, loss: 1.3036 +2023-09-11 23:45:18 - train: epoch 0066, iter [02400, 05004], lr: 0.001000, loss: 1.2753 +2023-09-11 23:45:54 - train: epoch 0066, iter [02500, 05004], lr: 0.001000, loss: 1.3193 +2023-09-11 23:46:29 - train: epoch 0066, iter [02600, 05004], lr: 0.001000, loss: 1.1366 +2023-09-11 23:47:08 - train: epoch 0066, iter [02700, 05004], lr: 0.001000, loss: 1.4151 +2023-09-11 23:47:43 - train: epoch 0066, iter [02800, 05004], lr: 0.001000, loss: 1.1283 +2023-09-11 23:48:19 - train: epoch 0066, iter [02900, 05004], lr: 0.001000, loss: 1.1475 +2023-09-11 23:48:58 - train: epoch 0066, iter [03000, 05004], lr: 0.001000, loss: 1.3431 +2023-09-11 23:49:35 - train: epoch 0066, iter [03100, 05004], lr: 0.001000, loss: 1.3585 +2023-09-11 23:50:11 - train: epoch 0066, iter [03200, 05004], lr: 0.001000, loss: 1.2393 +2023-09-11 23:50:46 - train: epoch 0066, iter [03300, 05004], lr: 0.001000, loss: 1.0997 +2023-09-11 23:51:25 - train: epoch 0066, iter [03400, 05004], lr: 0.001000, loss: 1.2828 +2023-09-11 23:52:02 - train: epoch 0066, iter [03500, 05004], lr: 0.001000, loss: 1.2560 +2023-09-11 23:52:38 - train: epoch 0066, iter [03600, 05004], lr: 0.001000, loss: 1.2916 +2023-09-11 23:53:15 - train: epoch 0066, iter [03700, 05004], lr: 0.001000, loss: 1.2703 +2023-09-11 23:53:53 - train: epoch 0066, iter [03800, 05004], lr: 0.001000, loss: 1.3513 +2023-09-11 23:54:29 - train: epoch 0066, iter [03900, 05004], lr: 0.001000, loss: 1.2364 +2023-09-11 23:55:05 - train: epoch 0066, iter [04000, 05004], lr: 0.001000, loss: 1.2672 +2023-09-11 23:55:43 - train: epoch 0066, iter [04100, 05004], lr: 0.001000, loss: 1.0790 +2023-09-11 23:56:20 - train: epoch 0066, iter [04200, 05004], lr: 0.001000, loss: 1.1089 +2023-09-11 23:56:56 - train: epoch 0066, iter [04300, 05004], lr: 0.001000, loss: 1.1601 +2023-09-11 23:57:32 - train: epoch 0066, iter [04400, 05004], lr: 0.001000, loss: 1.1286 +2023-09-11 23:58:09 - train: epoch 0066, iter [04500, 05004], lr: 0.001000, loss: 1.4338 +2023-09-11 23:58:45 - train: epoch 0066, iter [04600, 05004], lr: 0.001000, loss: 1.2591 +2023-09-11 23:59:22 - train: epoch 0066, iter [04700, 05004], lr: 0.001000, loss: 1.1884 +2023-09-11 23:59:58 - train: epoch 0066, iter [04800, 05004], lr: 0.001000, loss: 1.3470