Upload folder using huggingface_hub
Browse files- 2026-04-12-183000-multi-frame-predictor/__pycache__/predict.cpython-311.pyc +0 -0
- 2026-04-12-183000-multi-frame-predictor/config.json +1 -0
- 2026-04-12-183000-multi-frame-predictor/model.pt +3 -0
- 2026-04-12-183000-multi-frame-predictor/predict.py +100 -0
- 2026-04-12-183000-multi-frame-predictor/train.log +190 -0
2026-04-12-183000-multi-frame-predictor/__pycache__/predict.cpython-311.pyc
ADDED
|
Binary file (6.55 kB). View file
|
|
|
2026-04-12-183000-multi-frame-predictor/config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"in_channels": 24, "channels": [32, 64, 128, 256], "context_len": 8, "num_future": 8, "model_class": "MultiFrameFlowWarpUNet"}
|
2026-04-12-183000-multi-frame-predictor/model.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:3662b2dda8d32d836c748d064856e854b413401a26e394ce46af98c4d1b9d737
|
| 3 |
+
size 15226058
|
2026-04-12-183000-multi-frame-predictor/predict.py
ADDED
|
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Inference for multi-frame predictor with caching + TTA."""
|
| 2 |
+
import json
|
| 3 |
+
import numpy as np
|
| 4 |
+
import torch
|
| 5 |
+
import sys
|
| 6 |
+
sys.path.insert(0, "/home/coder/code")
|
| 7 |
+
from multi_frame_model import MultiFrameFlowWarpUNet
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def load_model(model_dir: str):
|
| 11 |
+
with open(f"{model_dir}/config.json") as f:
|
| 12 |
+
config = json.load(f)
|
| 13 |
+
model = MultiFrameFlowWarpUNet(
|
| 14 |
+
in_channels=config["in_channels"],
|
| 15 |
+
channels=config["channels"],
|
| 16 |
+
num_future=config.get("num_future", 8),
|
| 17 |
+
)
|
| 18 |
+
sd = torch.load(f"{model_dir}/model.pt", map_location="cpu", weights_only=True)
|
| 19 |
+
sd = {k: v.float() for k, v in sd.items()}
|
| 20 |
+
model.load_state_dict(sd)
|
| 21 |
+
model.eval()
|
| 22 |
+
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
| 23 |
+
model = model.to(device)
|
| 24 |
+
return {
|
| 25 |
+
"model": model,
|
| 26 |
+
"device": device,
|
| 27 |
+
"context_len": config["context_len"],
|
| 28 |
+
"num_future": config.get("num_future", 8),
|
| 29 |
+
"cache": None, # Will store (context_hash, predictions_list)
|
| 30 |
+
"call_count": 0,
|
| 31 |
+
}
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def _context_hash(context_frames):
|
| 35 |
+
"""Hash first frame to identify a rollout."""
|
| 36 |
+
return context_frames[0].tobytes()[:1024]
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def _prepare_input(context_frames, context_len):
|
| 40 |
+
N = len(context_frames)
|
| 41 |
+
if N >= context_len:
|
| 42 |
+
frames = context_frames[-context_len:]
|
| 43 |
+
else:
|
| 44 |
+
pad = np.repeat(context_frames[:1], context_len - N, axis=0)
|
| 45 |
+
frames = np.concatenate([pad, context_frames], axis=0)
|
| 46 |
+
frames_f = frames.astype(np.float32) / 255.0
|
| 47 |
+
frames_f = np.transpose(frames_f, (0, 3, 1, 2))
|
| 48 |
+
context = frames_f.reshape(1, -1, 64, 64)
|
| 49 |
+
last_frame = frames_f[-1:]
|
| 50 |
+
return context, last_frame
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
def _run_model_with_tta(model, device, context_frames, context_len):
|
| 54 |
+
"""Run model with TTA (horizontal flip) and return all 8 predictions."""
|
| 55 |
+
ctx, last = _prepare_input(context_frames, context_len)
|
| 56 |
+
with torch.no_grad():
|
| 57 |
+
ctx_t = torch.from_numpy(ctx).to(device)
|
| 58 |
+
last_t = torch.from_numpy(last).to(device)
|
| 59 |
+
preds1, _ = model(ctx_t, last_t)
|
| 60 |
+
|
| 61 |
+
flipped_frames = context_frames[:, :, ::-1, :].copy()
|
| 62 |
+
ctx_f, last_f = _prepare_input(flipped_frames, context_len)
|
| 63 |
+
with torch.no_grad():
|
| 64 |
+
ctx_ft = torch.from_numpy(ctx_f).to(device)
|
| 65 |
+
last_ft = torch.from_numpy(last_f).to(device)
|
| 66 |
+
preds2, _ = model(ctx_ft, last_ft)
|
| 67 |
+
|
| 68 |
+
result = []
|
| 69 |
+
for p1, p2 in zip(preds1, preds2):
|
| 70 |
+
p2_flipped = p2.flip(-1)
|
| 71 |
+
avg = (p1 + p2_flipped) / 2.0
|
| 72 |
+
pred_np = avg[0].cpu().numpy()
|
| 73 |
+
pred_np = np.transpose(pred_np, (1, 2, 0))
|
| 74 |
+
result.append((pred_np * 255.0).clip(0, 255).astype(np.uint8))
|
| 75 |
+
return result
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
def predict_next_frame(model_dict, context_frames: np.ndarray) -> np.ndarray:
|
| 79 |
+
model = model_dict["model"]
|
| 80 |
+
device = model_dict["device"]
|
| 81 |
+
context_len = model_dict["context_len"]
|
| 82 |
+
|
| 83 |
+
# Check if we have a cached prediction for this rollout
|
| 84 |
+
ctx_hash = _context_hash(context_frames)
|
| 85 |
+
|
| 86 |
+
if model_dict["cache"] is not None:
|
| 87 |
+
cached_hash, cached_preds, cached_step = model_dict["cache"]
|
| 88 |
+
if cached_hash == ctx_hash and cached_step < len(cached_preds):
|
| 89 |
+
pred = cached_preds[cached_step]
|
| 90 |
+
model_dict["cache"] = (cached_hash, cached_preds, cached_step + 1)
|
| 91 |
+
return pred
|
| 92 |
+
|
| 93 |
+
# No cache hit - run full model prediction
|
| 94 |
+
# Use only the original context (first context_len frames)
|
| 95 |
+
base_context = context_frames[:context_len] if len(context_frames) >= context_len else context_frames
|
| 96 |
+
all_preds = _run_model_with_tta(model, device, base_context, context_len)
|
| 97 |
+
|
| 98 |
+
# Return first prediction and cache the rest
|
| 99 |
+
model_dict["cache"] = (ctx_hash, all_preds, 1)
|
| 100 |
+
return all_preds[0]
|
2026-04-12-183000-multi-frame-predictor/train.log
ADDED
|
@@ -0,0 +1,190 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[18:34:04] Model: MultiFrameFlowWarpUNet, 7,598,128 params, 8 future steps
|
| 2 |
+
[18:34:04] === Training Multi-Frame Predictor ===
|
| 3 |
+
[18:34:09] Train: 5549, Val: 682
|
| 4 |
+
[18:34:28] Ep 1/150 | Train: 0.270987 | Val: 0.265299 | LR: 3.00e-04
|
| 5 |
+
[18:34:28] -> Saved (val=0.265299)
|
| 6 |
+
[18:34:45] Ep 2/150 | Train: 0.238669 | Val: 0.249943 | LR: 3.00e-04
|
| 7 |
+
[18:34:45] -> Saved (val=0.249943)
|
| 8 |
+
[18:35:02] Ep 3/150 | Train: 0.227459 | Val: 0.245221 | LR: 3.00e-04
|
| 9 |
+
[18:35:02] -> Saved (val=0.245221)
|
| 10 |
+
[18:35:19] Ep 4/150 | Train: 0.219248 | Val: 0.233318 | LR: 2.99e-04
|
| 11 |
+
[18:35:19] -> Saved (val=0.233318)
|
| 12 |
+
[18:35:37] Ep 5/150 | Train: 0.213438 | Val: 0.228646 | LR: 2.99e-04
|
| 13 |
+
[18:35:37] -> Saved (val=0.228646)
|
| 14 |
+
[18:35:54] Ep 6/150 | Train: 0.209102 | Val: 0.227927 | LR: 2.99e-04
|
| 15 |
+
[18:35:54] -> Saved (val=0.227927)
|
| 16 |
+
[18:36:11] Ep 7/150 | Train: 0.205559 | Val: 0.226358 | LR: 2.98e-04
|
| 17 |
+
[18:36:11] -> Saved (val=0.226358)
|
| 18 |
+
[18:36:28] Ep 8/150 | Train: 0.202078 | Val: 0.219953 | LR: 2.98e-04
|
| 19 |
+
[18:36:28] -> Saved (val=0.219953)
|
| 20 |
+
[18:36:46] Ep 9/150 | Train: 0.199415 | Val: 0.217367 | LR: 2.97e-04
|
| 21 |
+
[18:36:46] -> Saved (val=0.217367)
|
| 22 |
+
[18:37:03] Ep 10/150 | Train: 0.196532 | Val: 0.214279 | LR: 2.97e-04
|
| 23 |
+
[18:37:03] -> Saved (val=0.214279)
|
| 24 |
+
[18:37:21] Ep 11/150 | Train: 0.193131 | Val: 0.213072 | LR: 2.96e-04
|
| 25 |
+
[18:37:21] -> Saved (val=0.213072)
|
| 26 |
+
[18:37:38] Ep 12/150 | Train: 0.190797 | Val: 0.208535 | LR: 2.95e-04
|
| 27 |
+
[18:37:38] -> Saved (val=0.208535)
|
| 28 |
+
[18:37:55] Ep 13/150 | Train: 0.188919 | Val: 0.209274 | LR: 2.94e-04
|
| 29 |
+
[18:38:13] Ep 14/150 | Train: 0.185595 | Val: 0.208103 | LR: 2.94e-04
|
| 30 |
+
[18:38:13] -> Saved (val=0.208103)
|
| 31 |
+
[18:38:29] Ep 15/150 | Train: 0.183514 | Val: 0.203595 | LR: 2.93e-04
|
| 32 |
+
[18:38:29] -> Saved (val=0.203595)
|
| 33 |
+
[18:38:46] Ep 16/150 | Train: 0.180417 | Val: 0.203265 | LR: 2.92e-04
|
| 34 |
+
[18:38:46] -> Saved (val=0.203265)
|
| 35 |
+
[18:39:02] Ep 17/150 | Train: 0.178071 | Val: 0.201084 | LR: 2.91e-04
|
| 36 |
+
[18:39:02] -> Saved (val=0.201084)
|
| 37 |
+
[18:39:20] Ep 18/150 | Train: 0.175347 | Val: 0.200966 | LR: 2.90e-04
|
| 38 |
+
[18:39:20] -> Saved (val=0.200966)
|
| 39 |
+
[18:39:36] Ep 19/150 | Train: 0.173549 | Val: 0.197991 | LR: 2.88e-04
|
| 40 |
+
[18:39:36] -> Saved (val=0.197991)
|
| 41 |
+
[18:39:53] Ep 20/150 | Train: 0.171005 | Val: 0.196150 | LR: 2.87e-04
|
| 42 |
+
[18:39:53] -> Saved (val=0.196150)
|
| 43 |
+
[18:40:11] Ep 21/150 | Train: 0.168353 | Val: 0.196155 | LR: 2.86e-04
|
| 44 |
+
[18:40:27] Ep 22/150 | Train: 0.166678 | Val: 0.195909 | LR: 2.84e-04
|
| 45 |
+
[18:40:27] -> Saved (val=0.195909)
|
| 46 |
+
[18:40:43] Ep 23/150 | Train: 0.164730 | Val: 0.193184 | LR: 2.83e-04
|
| 47 |
+
[18:40:43] -> Saved (val=0.193184)
|
| 48 |
+
[18:41:01] Ep 24/150 | Train: 0.162152 | Val: 0.192711 | LR: 2.82e-04
|
| 49 |
+
[18:41:01] -> Saved (val=0.192711)
|
| 50 |
+
[18:41:19] Ep 25/150 | Train: 0.160128 | Val: 0.191434 | LR: 2.80e-04
|
| 51 |
+
[18:41:19] -> Saved (val=0.191434)
|
| 52 |
+
[18:41:37] Ep 26/150 | Train: 0.158279 | Val: 0.189010 | LR: 2.78e-04
|
| 53 |
+
[18:41:37] -> Saved (val=0.189010)
|
| 54 |
+
[18:41:55] Ep 27/150 | Train: 0.155953 | Val: 0.189318 | LR: 2.77e-04
|
| 55 |
+
[18:42:11] Ep 28/150 | Train: 0.154951 | Val: 0.187958 | LR: 2.75e-04
|
| 56 |
+
[18:42:11] -> Saved (val=0.187958)
|
| 57 |
+
[18:42:28] Ep 29/150 | Train: 0.152447 | Val: 0.188579 | LR: 2.73e-04
|
| 58 |
+
[18:42:46] Ep 30/150 | Train: 0.150617 | Val: 0.187397 | LR: 2.71e-04
|
| 59 |
+
[18:42:46] -> Saved (val=0.187397)
|
| 60 |
+
[18:43:02] Ep 31/150 | Train: 0.148259 | Val: 0.183241 | LR: 2.70e-04
|
| 61 |
+
[18:43:02] -> Saved (val=0.183241)
|
| 62 |
+
[18:43:19] Ep 32/150 | Train: 0.146781 | Val: 0.184136 | LR: 2.68e-04
|
| 63 |
+
[18:43:37] Ep 33/150 | Train: 0.145218 | Val: 0.184332 | LR: 2.66e-04
|
| 64 |
+
[18:43:55] Ep 34/150 | Train: 0.143477 | Val: 0.183647 | LR: 2.64e-04
|
| 65 |
+
[18:44:13] Ep 35/150 | Train: 0.141384 | Val: 0.184218 | LR: 2.62e-04
|
| 66 |
+
[18:44:31] Ep 36/150 | Train: 0.140192 | Val: 0.183393 | LR: 2.59e-04
|
| 67 |
+
[18:44:49] Ep 37/150 | Train: 0.138951 | Val: 0.183975 | LR: 2.57e-04
|
| 68 |
+
[18:45:06] Ep 38/150 | Train: 0.136936 | Val: 0.180156 | LR: 2.55e-04
|
| 69 |
+
[18:45:06] -> Saved (val=0.180156)
|
| 70 |
+
[18:45:24] Ep 39/150 | Train: 0.134547 | Val: 0.182037 | LR: 2.53e-04
|
| 71 |
+
[18:45:39] Ep 40/150 | Train: 0.133898 | Val: 0.179793 | LR: 2.51e-04
|
| 72 |
+
[18:45:39] -> Saved (val=0.179793)
|
| 73 |
+
[18:45:57] Ep 41/150 | Train: 0.132669 | Val: 0.180468 | LR: 2.48e-04
|
| 74 |
+
[18:46:14] Ep 42/150 | Train: 0.130988 | Val: 0.179199 | LR: 2.46e-04
|
| 75 |
+
[18:46:14] -> Saved (val=0.179199)
|
| 76 |
+
[18:46:32] Ep 43/150 | Train: 0.129602 | Val: 0.178855 | LR: 2.43e-04
|
| 77 |
+
[18:46:32] -> Saved (val=0.178855)
|
| 78 |
+
[18:46:50] Ep 44/150 | Train: 0.128149 | Val: 0.177483 | LR: 2.41e-04
|
| 79 |
+
[18:46:50] -> Saved (val=0.177483)
|
| 80 |
+
[18:47:08] Ep 45/150 | Train: 0.126982 | Val: 0.180292 | LR: 2.38e-04
|
| 81 |
+
[18:47:26] Ep 46/150 | Train: 0.125357 | Val: 0.178541 | LR: 2.36e-04
|
| 82 |
+
[18:47:43] Ep 47/150 | Train: 0.124067 | Val: 0.177214 | LR: 2.33e-04
|
| 83 |
+
[18:47:43] -> Saved (val=0.177214)
|
| 84 |
+
[18:48:01] Ep 48/150 | Train: 0.123057 | Val: 0.179346 | LR: 2.31e-04
|
| 85 |
+
[18:48:19] Ep 49/150 | Train: 0.121519 | Val: 0.179305 | LR: 2.28e-04
|
| 86 |
+
[18:48:37] Ep 50/150 | Train: 0.121056 | Val: 0.179077 | LR: 2.25e-04
|
| 87 |
+
[18:48:55] Ep 51/150 | Train: 0.119882 | Val: 0.180993 | LR: 2.23e-04
|
| 88 |
+
[18:49:12] Ep 52/150 | Train: 0.118635 | Val: 0.180216 | LR: 2.20e-04
|
| 89 |
+
[18:49:30] Ep 53/150 | Train: 0.117344 | Val: 0.179428 | LR: 2.17e-04
|
| 90 |
+
[18:49:44] Ep 54/150 | Train: 0.116161 | Val: 0.176803 | LR: 2.14e-04
|
| 91 |
+
[18:49:44] -> Saved (val=0.176803)
|
| 92 |
+
[18:49:59] Ep 55/150 | Train: 0.115789 | Val: 0.176924 | LR: 2.11e-04
|
| 93 |
+
[18:50:17] Ep 56/150 | Train: 0.114291 | Val: 0.176342 | LR: 2.08e-04
|
| 94 |
+
[18:50:17] -> Saved (val=0.176342)
|
| 95 |
+
[18:50:33] Ep 57/150 | Train: 0.113186 | Val: 0.177557 | LR: 2.06e-04
|
| 96 |
+
[18:50:51] Ep 58/150 | Train: 0.112255 | Val: 0.178177 | LR: 2.03e-04
|
| 97 |
+
[18:51:08] Ep 59/150 | Train: 0.111903 | Val: 0.177198 | LR: 2.00e-04
|
| 98 |
+
[18:51:26] Ep 60/150 | Train: 0.110467 | Val: 0.179495 | LR: 1.97e-04
|
| 99 |
+
[18:51:44] Ep 61/150 | Train: 0.109932 | Val: 0.176206 | LR: 1.94e-04
|
| 100 |
+
[18:51:44] -> Saved (val=0.176206)
|
| 101 |
+
[18:52:00] Ep 62/150 | Train: 0.108810 | Val: 0.176676 | LR: 1.91e-04
|
| 102 |
+
[18:52:18] Ep 63/150 | Train: 0.108106 | Val: 0.176707 | LR: 1.88e-04
|
| 103 |
+
[18:52:35] Ep 64/150 | Train: 0.107083 | Val: 0.176978 | LR: 1.85e-04
|
| 104 |
+
[18:52:53] Ep 65/150 | Train: 0.106822 | Val: 0.176965 | LR: 1.82e-04
|
| 105 |
+
[18:53:11] Ep 66/150 | Train: 0.105645 | Val: 0.178744 | LR: 1.79e-04
|
| 106 |
+
[18:53:29] Ep 67/150 | Train: 0.105095 | Val: 0.176539 | LR: 1.75e-04
|
| 107 |
+
[18:53:44] Ep 68/150 | Train: 0.104511 | Val: 0.177088 | LR: 1.72e-04
|
| 108 |
+
[18:54:02] Ep 69/150 | Train: 0.103907 | Val: 0.178603 | LR: 1.69e-04
|
| 109 |
+
[18:54:19] Ep 70/150 | Train: 0.103250 | Val: 0.176327 | LR: 1.66e-04
|
| 110 |
+
[18:54:35] Ep 71/150 | Train: 0.102027 | Val: 0.177771 | LR: 1.63e-04
|
| 111 |
+
[18:54:51] Ep 72/150 | Train: 0.101291 | Val: 0.178515 | LR: 1.60e-04
|
| 112 |
+
[18:55:09] Ep 73/150 | Train: 0.100467 | Val: 0.178190 | LR: 1.57e-04
|
| 113 |
+
[18:55:27] Ep 74/150 | Train: 0.099838 | Val: 0.177502 | LR: 1.54e-04
|
| 114 |
+
[18:55:44] Ep 75/150 | Train: 0.099405 | Val: 0.177910 | LR: 1.50e-04
|
| 115 |
+
[18:55:56] Ep 76/150 | Train: 0.098765 | Val: 0.179082 | LR: 1.47e-04
|
| 116 |
+
[18:56:12] Ep 77/150 | Train: 0.098560 | Val: 0.180269 | LR: 1.44e-04
|
| 117 |
+
[18:56:30] Ep 78/150 | Train: 0.097326 | Val: 0.177552 | LR: 1.41e-04
|
| 118 |
+
[18:56:48] Ep 79/150 | Train: 0.096931 | Val: 0.178769 | LR: 1.38e-04
|
| 119 |
+
[18:57:05] Ep 80/150 | Train: 0.096304 | Val: 0.178495 | LR: 1.35e-04
|
| 120 |
+
[18:57:20] Ep 81/150 | Train: 0.095898 | Val: 0.179506 | LR: 1.32e-04
|
| 121 |
+
[18:57:35] Ep 82/150 | Train: 0.095115 | Val: 0.179242 | LR: 1.29e-04
|
| 122 |
+
[18:57:50] Ep 83/150 | Train: 0.095051 | Val: 0.180229 | LR: 1.26e-04
|
| 123 |
+
[18:58:08] Ep 84/150 | Train: 0.094142 | Val: 0.179763 | LR: 1.22e-04
|
| 124 |
+
[18:58:25] Ep 85/150 | Train: 0.093578 | Val: 0.179043 | LR: 1.19e-04
|
| 125 |
+
[18:58:43] Ep 86/150 | Train: 0.093195 | Val: 0.178754 | LR: 1.16e-04
|
| 126 |
+
[18:59:01] Ep 87/150 | Train: 0.092534 | Val: 0.180270 | LR: 1.13e-04
|
| 127 |
+
[18:59:17] Ep 88/150 | Train: 0.091913 | Val: 0.179877 | LR: 1.10e-04
|
| 128 |
+
[18:59:34] Ep 89/150 | Train: 0.091610 | Val: 0.180932 | LR: 1.07e-04
|
| 129 |
+
[18:59:52] Ep 90/150 | Train: 0.091188 | Val: 0.179976 | LR: 1.04e-04
|
| 130 |
+
[19:00:10] Ep 91/150 | Train: 0.090578 | Val: 0.180836 | LR: 1.01e-04
|
| 131 |
+
[19:00:27] Ep 92/150 | Train: 0.090313 | Val: 0.178567 | LR: 9.84e-05
|
| 132 |
+
[19:00:45] Ep 93/150 | Train: 0.089523 | Val: 0.180285 | LR: 9.55e-05
|
| 133 |
+
[19:01:03] Ep 94/150 | Train: 0.089179 | Val: 0.180738 | LR: 9.26e-05
|
| 134 |
+
[19:01:21] Ep 95/150 | Train: 0.088883 | Val: 0.180872 | LR: 8.97e-05
|
| 135 |
+
[19:01:39] Ep 96/150 | Train: 0.088539 | Val: 0.181221 | LR: 8.68e-05
|
| 136 |
+
[19:01:57] Ep 97/150 | Train: 0.087988 | Val: 0.181604 | LR: 8.40e-05
|
| 137 |
+
[19:02:15] Ep 98/150 | Train: 0.087637 | Val: 0.181991 | LR: 8.12e-05
|
| 138 |
+
[19:02:31] Ep 99/150 | Train: 0.087088 | Val: 0.182343 | LR: 7.85e-05
|
| 139 |
+
[19:02:49] Ep 100/150 | Train: 0.086710 | Val: 0.181566 | LR: 7.57e-05
|
| 140 |
+
[19:03:06] Ep 101/150 | Train: 0.086175 | Val: 0.182376 | LR: 7.31e-05
|
| 141 |
+
[19:03:24] Ep 102/150 | Train: 0.085925 | Val: 0.181504 | LR: 7.04e-05
|
| 142 |
+
[19:03:42] Ep 103/150 | Train: 0.085543 | Val: 0.182384 | LR: 6.78e-05
|
| 143 |
+
[19:03:59] Ep 104/150 | Train: 0.085254 | Val: 0.183121 | LR: 6.52e-05
|
| 144 |
+
[19:04:17] Ep 105/150 | Train: 0.084892 | Val: 0.182109 | LR: 6.26e-05
|
| 145 |
+
[19:04:34] Ep 106/150 | Train: 0.084532 | Val: 0.182574 | LR: 6.01e-05
|
| 146 |
+
[19:04:51] Ep 107/150 | Train: 0.084227 | Val: 0.182747 | LR: 5.76e-05
|
| 147 |
+
[19:05:09] Ep 108/150 | Train: 0.083896 | Val: 0.182982 | LR: 5.52e-05
|
| 148 |
+
[19:05:27] Ep 109/150 | Train: 0.083581 | Val: 0.183633 | LR: 5.28e-05
|
| 149 |
+
[19:05:45] Ep 110/150 | Train: 0.083239 | Val: 0.183232 | LR: 5.05e-05
|
| 150 |
+
[19:06:02] Ep 111/150 | Train: 0.082965 | Val: 0.184408 | LR: 4.82e-05
|
| 151 |
+
[19:06:20] Ep 112/150 | Train: 0.082642 | Val: 0.184344 | LR: 4.59e-05
|
| 152 |
+
[19:06:38] Ep 113/150 | Train: 0.082383 | Val: 0.185127 | LR: 4.37e-05
|
| 153 |
+
[19:06:56] Ep 114/150 | Train: 0.082182 | Val: 0.183392 | LR: 4.15e-05
|
| 154 |
+
[19:07:14] Ep 115/150 | Train: 0.081781 | Val: 0.184109 | LR: 3.94e-05
|
| 155 |
+
[19:07:30] Ep 116/150 | Train: 0.081485 | Val: 0.185165 | LR: 3.73e-05
|
| 156 |
+
[19:07:46] Ep 117/150 | Train: 0.081341 | Val: 0.185036 | LR: 3.53e-05
|
| 157 |
+
[19:08:04] Ep 118/150 | Train: 0.081065 | Val: 0.185143 | LR: 3.33e-05
|
| 158 |
+
[19:08:22] Ep 119/150 | Train: 0.080705 | Val: 0.185618 | LR: 3.14e-05
|
| 159 |
+
[19:08:40] Ep 120/150 | Train: 0.080531 | Val: 0.185500 | LR: 2.96e-05
|
| 160 |
+
[19:08:58] Ep 121/150 | Train: 0.080383 | Val: 0.185484 | LR: 2.77e-05
|
| 161 |
+
[19:09:15] Ep 122/150 | Train: 0.080158 | Val: 0.185682 | LR: 2.60e-05
|
| 162 |
+
[19:09:34] Ep 123/150 | Train: 0.079961 | Val: 0.185514 | LR: 2.43e-05
|
| 163 |
+
[19:09:52] Ep 124/150 | Train: 0.079878 | Val: 0.185978 | LR: 2.26e-05
|
| 164 |
+
[19:10:08] Ep 125/150 | Train: 0.079607 | Val: 0.186147 | LR: 2.10e-05
|
| 165 |
+
[19:10:26] Ep 126/150 | Train: 0.079404 | Val: 0.186743 | LR: 1.95e-05
|
| 166 |
+
[19:10:44] Ep 127/150 | Train: 0.079413 | Val: 0.186438 | LR: 1.80e-05
|
| 167 |
+
[19:11:01] Ep 128/150 | Train: 0.079241 | Val: 0.186822 | LR: 1.66e-05
|
| 168 |
+
[19:11:19] Ep 129/150 | Train: 0.078908 | Val: 0.186848 | LR: 1.52e-05
|
| 169 |
+
[19:11:37] Ep 130/150 | Train: 0.078896 | Val: 0.186839 | LR: 1.39e-05
|
| 170 |
+
[19:11:55] Ep 131/150 | Train: 0.078783 | Val: 0.186915 | LR: 1.27e-05
|
| 171 |
+
[19:12:13] Ep 132/150 | Train: 0.078701 | Val: 0.187543 | LR: 1.15e-05
|
| 172 |
+
[19:12:30] Ep 133/150 | Train: 0.078543 | Val: 0.187428 | LR: 1.04e-05
|
| 173 |
+
[19:12:46] Ep 134/150 | Train: 0.078507 | Val: 0.187269 | LR: 9.32e-06
|
| 174 |
+
[19:13:02] Ep 135/150 | Train: 0.078374 | Val: 0.187519 | LR: 8.32e-06
|
| 175 |
+
[19:13:20] Ep 136/150 | Train: 0.078200 | Val: 0.187711 | LR: 7.38e-06
|
| 176 |
+
[19:13:38] Ep 137/150 | Train: 0.078222 | Val: 0.187528 | LR: 6.51e-06
|
| 177 |
+
[19:13:56] Ep 138/150 | Train: 0.078089 | Val: 0.187592 | LR: 5.70e-06
|
| 178 |
+
[19:14:14] Ep 139/150 | Train: 0.078101 | Val: 0.187817 | LR: 4.95e-06
|
| 179 |
+
[19:14:30] Ep 140/150 | Train: 0.078047 | Val: 0.187897 | LR: 4.27e-06
|
| 180 |
+
[19:14:48] Ep 141/150 | Train: 0.078085 | Val: 0.187957 | LR: 3.65e-06
|
| 181 |
+
[19:15:05] Ep 142/150 | Train: 0.077895 | Val: 0.187949 | LR: 3.09e-06
|
| 182 |
+
[19:15:23] Ep 143/150 | Train: 0.077839 | Val: 0.188029 | LR: 2.60e-06
|
| 183 |
+
[19:15:41] Ep 144/150 | Train: 0.077922 | Val: 0.188082 | LR: 2.18e-06
|
| 184 |
+
[19:15:59] Ep 145/150 | Train: 0.077893 | Val: 0.188253 | LR: 1.82e-06
|
| 185 |
+
[19:16:17] Ep 146/150 | Train: 0.077711 | Val: 0.188122 | LR: 1.52e-06
|
| 186 |
+
[19:16:35] Ep 147/150 | Train: 0.077708 | Val: 0.188216 | LR: 1.30e-06
|
| 187 |
+
[19:16:53] Ep 148/150 | Train: 0.077580 | Val: 0.188216 | LR: 1.13e-06
|
| 188 |
+
[19:17:10] Ep 149/150 | Train: 0.077824 | Val: 0.188224 | LR: 1.03e-06
|
| 189 |
+
[19:17:28] Ep 150/150 | Train: 0.077790 | Val: 0.188195 | LR: 1.00e-06
|
| 190 |
+
[19:17:28] Training complete.
|