ojaffe commited on
Commit
24b6338
·
verified ·
1 Parent(s): 191dd75

Upload folder using huggingface_hub

Browse files
2026-04-12-114000-flow-warp-optimized-v1/__pycache__/predict.cpython-311.pyc ADDED
Binary file (5.07 kB). View file
 
2026-04-12-114000-flow-warp-optimized-v1/config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"in_channels": 24, "channels": [32, 64, 128, 256], "context_len": 8, "model_class": "FlowWarpAttnUNet"}
2026-04-12-114000-flow-warp-optimized-v1/model.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:11a1b096f58be23df8012b0dc9b082d7c0eeea9d343538648d9a88869e51ad8e
3
+ size 15223306
2026-04-12-114000-flow-warp-optimized-v1/predict.py ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Inference for optimized flow-warp model + TTA."""
2
+ import json
3
+ import numpy as np
4
+ import torch
5
+ import sys
6
+ sys.path.insert(0, "/home/coder/code")
7
+ from flow_warp_attn_model import FlowWarpAttnUNet
8
+
9
+
10
+ def load_model(model_dir: str):
11
+ with open(f"{model_dir}/config.json") as f:
12
+ config = json.load(f)
13
+ model = FlowWarpAttnUNet(in_channels=config["in_channels"], channels=config["channels"])
14
+ sd = torch.load(f"{model_dir}/model.pt", map_location="cpu", weights_only=True)
15
+ sd = {k: v.float() for k, v in sd.items()}
16
+ model.load_state_dict(sd)
17
+ model.eval()
18
+ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
19
+ model = model.to(device)
20
+ return {"model": model, "device": device, "context_len": config["context_len"]}
21
+
22
+
23
+ def _prepare_input(context_frames, context_len):
24
+ N = len(context_frames)
25
+ if N >= context_len:
26
+ frames = context_frames[-context_len:]
27
+ else:
28
+ pad = np.repeat(context_frames[:1], context_len - N, axis=0)
29
+ frames = np.concatenate([pad, context_frames], axis=0)
30
+ frames_f = frames.astype(np.float32) / 255.0
31
+ frames_f = np.transpose(frames_f, (0, 3, 1, 2))
32
+ context = frames_f.reshape(1, -1, 64, 64)
33
+ last_frame = frames_f[-1:]
34
+ return context, last_frame
35
+
36
+
37
+ def predict_next_frame(model_dict, context_frames: np.ndarray) -> np.ndarray:
38
+ model = model_dict["model"]
39
+ device = model_dict["device"]
40
+ context_len = model_dict["context_len"]
41
+
42
+ ctx, last = _prepare_input(context_frames, context_len)
43
+ with torch.no_grad():
44
+ ctx_t = torch.from_numpy(ctx).to(device)
45
+ last_t = torch.from_numpy(last).to(device)
46
+ pred1, _ = model(ctx_t, last_t)
47
+
48
+ flipped_frames = context_frames[:, :, ::-1, :].copy()
49
+ ctx_f, last_f = _prepare_input(flipped_frames, context_len)
50
+ with torch.no_grad():
51
+ ctx_ft = torch.from_numpy(ctx_f).to(device)
52
+ last_ft = torch.from_numpy(last_f).to(device)
53
+ pred2, _ = model(ctx_ft, last_ft)
54
+ pred2 = pred2.flip(-1)
55
+
56
+ pred = (pred1 + pred2) / 2.0
57
+ pred_np = pred[0].cpu().numpy()
58
+ pred_np = np.transpose(pred_np, (1, 2, 0))
59
+ return (pred_np * 255.0).clip(0, 255).astype(np.uint8)
2026-04-12-114000-flow-warp-optimized-v1/train.log ADDED
@@ -0,0 +1,334 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [11:43:20] Model: FlowWarpAttnUNet, 7,596,742 params
2
+ [11:43:20] === Phase 1: Single-step ===
3
+ [11:43:20] Loading data...
4
+ [11:43:25] Train: 44392, Val: 5456
5
+ [11:44:07] P1 Ep 1/200 | Train: 0.132223 | Val: 0.106632 | LR: 6.80e-05
6
+ [11:44:07] -> Saved (val=0.106632)
7
+ [11:44:47] P1 Ep 2/200 | Train: 0.096301 | Val: 0.092404 | LR: 1.26e-04
8
+ [11:44:47] -> Saved (val=0.092404)
9
+ [11:45:26] P1 Ep 3/200 | Train: 0.087938 | Val: 0.084875 | LR: 1.84e-04
10
+ [11:45:26] -> Saved (val=0.084875)
11
+ [11:46:06] P1 Ep 4/200 | Train: 0.083635 | Val: 0.082010 | LR: 2.42e-04
12
+ [11:46:06] -> Saved (val=0.082010)
13
+ [11:46:46] P1 Ep 5/200 | Train: 0.081080 | Val: 0.077552 | LR: 3.00e-04
14
+ [11:46:46] -> Saved (val=0.077552)
15
+ [11:47:26] P1 Ep 6/200 | Train: 0.077537 | Val: 0.074227 | LR: 2.99e-04
16
+ [11:47:26] -> Saved (val=0.074227)
17
+ [11:48:06] P1 Ep 7/200 | Train: 0.074417 | Val: 0.072837 | LR: 2.97e-04
18
+ [11:48:06] -> Saved (val=0.072837)
19
+ [11:48:47] P1 Ep 8/200 | Train: 0.072262 | Val: 0.070881 | LR: 2.93e-04
20
+ [11:48:47] -> Saved (val=0.070881)
21
+ [11:49:25] P1 Ep 9/200 | Train: 0.069055 | Val: 0.067521 | LR: 2.87e-04
22
+ [11:49:25] -> Saved (val=0.067521)
23
+ [11:50:05] P1 Ep 10/200 | Train: 0.067683 | Val: 0.067618 | LR: 2.80e-04
24
+ [11:50:45] P1 Ep 11/200 | Train: 0.066206 | Val: 0.065794 | LR: 2.71e-04
25
+ [11:50:45] -> Saved (val=0.065794)
26
+ [11:51:25] P1 Ep 12/200 | Train: 0.063833 | Val: 0.064820 | LR: 2.62e-04
27
+ [11:51:25] -> Saved (val=0.064820)
28
+ [11:52:06] P1 Ep 13/200 | Train: 0.062314 | Val: 0.062959 | LR: 2.51e-04
29
+ [11:52:06] -> Saved (val=0.062959)
30
+ [11:52:47] P1 Ep 14/200 | Train: 0.060547 | Val: 0.060485 | LR: 2.38e-04
31
+ [11:52:47] -> Saved (val=0.060485)
32
+ [11:53:27] P1 Ep 15/200 | Train: 0.058785 | Val: 0.061039 | LR: 2.25e-04
33
+ [11:54:05] P1 Ep 16/200 | Train: 0.057943 | Val: 0.060213 | LR: 2.11e-04
34
+ [11:54:05] -> Saved (val=0.060213)
35
+ [11:54:45] P1 Ep 17/200 | Train: 0.056070 | Val: 0.058919 | LR: 1.97e-04
36
+ [11:54:45] -> Saved (val=0.058919)
37
+ [11:55:26] P1 Ep 18/200 | Train: 0.054786 | Val: 0.058348 | LR: 1.82e-04
38
+ [11:55:26] -> Saved (val=0.058348)
39
+ [11:56:06] P1 Ep 19/200 | Train: 0.052987 | Val: 0.057440 | LR: 1.66e-04
40
+ [11:56:06] -> Saved (val=0.057440)
41
+ [11:56:47] P1 Ep 20/200 | Train: 0.052127 | Val: 0.056453 | LR: 1.50e-04
42
+ [11:56:47] -> Saved (val=0.056453)
43
+ [11:57:24] P1 Ep 21/200 | Train: 0.050730 | Val: 0.056410 | LR: 1.35e-04
44
+ [11:57:24] -> Saved (val=0.056410)
45
+ [11:58:04] P1 Ep 22/200 | Train: 0.049248 | Val: 0.054557 | LR: 1.19e-04
46
+ [11:58:04] -> Saved (val=0.054557)
47
+ [11:58:43] P1 Ep 23/200 | Train: 0.047884 | Val: 0.054218 | LR: 1.04e-04
48
+ [11:58:43] -> Saved (val=0.054218)
49
+ [11:59:26] P1 Ep 24/200 | Train: 0.046856 | Val: 0.054357 | LR: 8.97e-05
50
+ [12:00:10] P1 Ep 25/200 | Train: 0.045506 | Val: 0.052886 | LR: 7.58e-05
51
+ [12:00:10] -> Saved (val=0.052886)
52
+ [12:00:54] P1 Ep 26/200 | Train: 0.044909 | Val: 0.052853 | LR: 6.26e-05
53
+ [12:00:54] -> Saved (val=0.052853)
54
+ [12:01:45] P1 Ep 27/200 | Train: 0.043485 | Val: 0.052357 | LR: 5.05e-05
55
+ [12:01:45] -> Saved (val=0.052357)
56
+ [12:02:32] P1 Ep 28/200 | Train: 0.042811 | Val: 0.052252 | LR: 3.94e-05
57
+ [12:02:32] -> Saved (val=0.052252)
58
+ [12:03:21] P1 Ep 29/200 | Train: 0.041966 | Val: 0.051915 | LR: 2.96e-05
59
+ [12:03:21] -> Saved (val=0.051915)
60
+ [12:04:07] P1 Ep 30/200 | Train: 0.041643 | Val: 0.051829 | LR: 2.10e-05
61
+ [12:04:07] -> Saved (val=0.051829)
62
+ [12:04:55] P1 Ep 31/200 | Train: 0.041034 | Val: 0.051861 | LR: 1.39e-05
63
+ [12:05:44] P1 Ep 32/200 | Train: 0.040919 | Val: 0.051691 | LR: 8.32e-06
64
+ [12:05:44] -> Saved (val=0.051691)
65
+ [12:06:35] P1 Ep 33/200 | Train: 0.040340 | Val: 0.051810 | LR: 4.27e-06
66
+ [12:07:24] P1 Ep 34/200 | Train: 0.040149 | Val: 0.051759 | LR: 1.82e-06
67
+ [12:08:12] P1 Ep 35/200 | Train: 0.055239 | Val: 0.058286 | LR: 3.00e-04
68
+ [12:08:57] P1 Ep 36/200 | Train: 0.053118 | Val: 0.057930 | LR: 3.00e-04
69
+ [12:09:45] P1 Ep 37/200 | Train: 0.052679 | Val: 0.058497 | LR: 2.99e-04
70
+ [12:10:36] P1 Ep 38/200 | Train: 0.051763 | Val: 0.057724 | LR: 2.98e-04
71
+ [12:11:27] P1 Ep 39/200 | Train: 0.050405 | Val: 0.056450 | LR: 2.97e-04
72
+ [12:12:21] P1 Ep 40/200 | Train: 0.050083 | Val: 0.057638 | LR: 2.95e-04
73
+ [12:13:11] P1 Ep 41/200 | Train: 0.049271 | Val: 0.057326 | LR: 2.93e-04
74
+ [12:14:08] P1 Ep 42/200 | Train: 0.049123 | Val: 0.055690 | LR: 2.90e-04
75
+ [12:14:58] P1 Ep 43/200 | Train: 0.048641 | Val: 0.054518 | LR: 2.87e-04
76
+ [12:15:50] P1 Ep 44/200 | Train: 0.047449 | Val: 0.055773 | LR: 2.84e-04
77
+ [12:16:37] P1 Ep 45/200 | Train: 0.047337 | Val: 0.054818 | LR: 2.80e-04
78
+ [12:17:20] P1 Ep 46/200 | Train: 0.046448 | Val: 0.054007 | LR: 2.76e-04
79
+ [12:18:07] P1 Ep 47/200 | Train: 0.045271 | Val: 0.054450 | LR: 2.71e-04
80
+ [12:18:50] P1 Ep 48/200 | Train: 0.045169 | Val: 0.053309 | LR: 2.67e-04
81
+ [12:19:39] P1 Ep 49/200 | Train: 0.044397 | Val: 0.052718 | LR: 2.62e-04
82
+ [12:20:28] P1 Ep 50/200 | Train: 0.044364 | Val: 0.053838 | LR: 2.56e-04
83
+ [12:21:19] P1 Ep 51/200 | Train: 0.043304 | Val: 0.052757 | LR: 2.51e-04
84
+ [12:22:06] P1 Ep 52/200 | Train: 0.042207 | Val: 0.051916 | LR: 2.45e-04
85
+ [12:22:56] P1 Ep 53/200 | Train: 0.042069 | Val: 0.051808 | LR: 2.38e-04
86
+ [12:23:45] P1 Ep 54/200 | Train: 0.041493 | Val: 0.051668 | LR: 2.32e-04
87
+ [12:23:45] -> Saved (val=0.051668)
88
+ [12:24:35] P1 Ep 55/200 | Train: 0.040821 | Val: 0.051523 | LR: 2.25e-04
89
+ [12:24:35] -> Saved (val=0.051523)
90
+ [12:25:25] P1 Ep 56/200 | Train: 0.040320 | Val: 0.051325 | LR: 2.18e-04
91
+ [12:25:25] -> Saved (val=0.051325)
92
+ [12:26:12] P1 Ep 57/200 | Train: 0.039771 | Val: 0.052282 | LR: 2.11e-04
93
+ [12:26:59] P1 Ep 58/200 | Train: 0.038783 | Val: 0.050956 | LR: 2.04e-04
94
+ [12:26:59] -> Saved (val=0.050956)
95
+ [12:27:43] P1 Ep 59/200 | Train: 0.038284 | Val: 0.050635 | LR: 1.97e-04
96
+ [12:27:43] -> Saved (val=0.050635)
97
+ [12:28:33] P1 Ep 60/200 | Train: 0.037668 | Val: 0.050669 | LR: 1.89e-04
98
+ [12:29:18] P1 Ep 61/200 | Train: 0.036581 | Val: 0.051478 | LR: 1.82e-04
99
+ [12:30:08] P1 Ep 62/200 | Train: 0.036473 | Val: 0.050473 | LR: 1.74e-04
100
+ [12:30:08] -> Saved (val=0.050473)
101
+ [12:30:56] P1 Ep 63/200 | Train: 0.035829 | Val: 0.051005 | LR: 1.66e-04
102
+ [12:31:44] P1 Ep 64/200 | Train: 0.035538 | Val: 0.050598 | LR: 1.58e-04
103
+ [12:32:36] P1 Ep 65/200 | Train: 0.035032 | Val: 0.050305 | LR: 1.50e-04
104
+ [12:32:36] -> Saved (val=0.050305)
105
+ [12:33:23] P1 Ep 66/200 | Train: 0.034575 | Val: 0.049811 | LR: 1.43e-04
106
+ [12:33:23] -> Saved (val=0.049811)
107
+ [12:34:11] P1 Ep 67/200 | Train: 0.033940 | Val: 0.049943 | LR: 1.35e-04
108
+ [12:34:56] P1 Ep 68/200 | Train: 0.033441 | Val: 0.050368 | LR: 1.27e-04
109
+ [12:35:45] P1 Ep 69/200 | Train: 0.032699 | Val: 0.049368 | LR: 1.19e-04
110
+ [12:35:45] -> Saved (val=0.049368)
111
+ [12:36:36] P1 Ep 70/200 | Train: 0.032216 | Val: 0.049819 | LR: 1.12e-04
112
+ [12:37:30] P1 Ep 71/200 | Train: 0.032113 | Val: 0.049229 | LR: 1.04e-04
113
+ [12:37:30] -> Saved (val=0.049229)
114
+ [12:38:17] P1 Ep 72/200 | Train: 0.031783 | Val: 0.049613 | LR: 9.69e-05
115
+ [12:39:05] P1 Ep 73/200 | Train: 0.031139 | Val: 0.049639 | LR: 8.97e-05
116
+ [12:39:53] P1 Ep 74/200 | Train: 0.030909 | Val: 0.049352 | LR: 8.26e-05
117
+ [12:40:37] P1 Ep 75/200 | Train: 0.030026 | Val: 0.049245 | LR: 7.58e-05
118
+ [12:41:26] P1 Ep 76/200 | Train: 0.030021 | Val: 0.049434 | LR: 6.91e-05
119
+ [12:42:16] P1 Ep 77/200 | Train: 0.029630 | Val: 0.049695 | LR: 6.26e-05
120
+ [12:43:04] P1 Ep 78/200 | Train: 0.029215 | Val: 0.049583 | LR: 5.64e-05
121
+ [12:43:55] P1 Ep 79/200 | Train: 0.029007 | Val: 0.049329 | LR: 5.05e-05
122
+ [12:44:44] P1 Ep 80/200 | Train: 0.028776 | Val: 0.049381 | LR: 4.48e-05
123
+ [12:45:30] P1 Ep 81/200 | Train: 0.028516 | Val: 0.049369 | LR: 3.94e-05
124
+ [12:46:18] P1 Ep 82/200 | Train: 0.027793 | Val: 0.049461 | LR: 3.43e-05
125
+ [12:47:07] P1 Ep 83/200 | Train: 0.027906 | Val: 0.049403 | LR: 2.96e-05
126
+ [12:47:55] P1 Ep 84/200 | Train: 0.027911 | Val: 0.049490 | LR: 2.51e-05
127
+ [12:48:43] P1 Ep 85/200 | Train: 0.027716 | Val: 0.049412 | LR: 2.10e-05
128
+ [12:49:31] P1 Ep 86/200 | Train: 0.027362 | Val: 0.049466 | LR: 1.73e-05
129
+ [12:50:16] P1 Ep 87/200 | Train: 0.027360 | Val: 0.049359 | LR: 1.39e-05
130
+ [12:51:07] P1 Ep 88/200 | Train: 0.027394 | Val: 0.049412 | LR: 1.09e-05
131
+ [12:51:55] P1 Ep 89/200 | Train: 0.027289 | Val: 0.049422 | LR: 8.32e-06
132
+ [12:52:41] P1 Ep 90/200 | Train: 0.027201 | Val: 0.049411 | LR: 6.09e-06
133
+ [12:53:29] P1 Ep 91/200 | Train: 0.026896 | Val: 0.049422 | LR: 4.27e-06
134
+ [12:54:22] P1 Ep 92/200 | Train: 0.027046 | Val: 0.049424 | LR: 2.84e-06
135
+ [12:55:05] P1 Ep 93/200 | Train: 0.027085 | Val: 0.049430 | LR: 1.82e-06
136
+ [12:55:54] P1 Ep 94/200 | Train: 0.027253 | Val: 0.049430 | LR: 1.20e-06
137
+ [12:56:41] P1 Ep 95/200 | Train: 0.043982 | Val: 0.052984 | LR: 3.00e-04
138
+ [12:57:28] P1 Ep 96/200 | Train: 0.038222 | Val: 0.051824 | LR: 3.00e-04
139
+ [12:58:23] P1 Ep 97/200 | Train: 0.037723 | Val: 0.052015 | LR: 3.00e-04
140
+ [12:59:17] P1 Ep 98/200 | Train: 0.037357 | Val: 0.052355 | LR: 3.00e-04
141
+ [13:00:06] P1 Ep 99/200 | Train: 0.037049 | Val: 0.052177 | LR: 2.99e-04
142
+ [13:01:02] P1 Ep 100/200 | Train: 0.037179 | Val: 0.051571 | LR: 2.99e-04
143
+ [13:01:57] P1 Ep 101/200 | Train: 0.036772 | Val: 0.051822 | LR: 2.98e-04
144
+ [13:02:54] P1 Ep 102/200 | Train: 0.036668 | Val: 0.051385 | LR: 2.97e-04
145
+ [13:03:43] P1 Ep 103/200 | Train: 0.035946 | Val: 0.051932 | LR: 2.97e-04
146
+ [13:04:31] P1 Ep 104/200 | Train: 0.035762 | Val: 0.051701 | LR: 2.96e-04
147
+ [13:05:30] P1 Ep 105/200 | Train: 0.035620 | Val: 0.051833 | LR: 2.95e-04
148
+ [13:06:27] P1 Ep 106/200 | Train: 0.035753 | Val: 0.051283 | LR: 2.94e-04
149
+ [13:07:27] P1 Ep 107/200 | Train: 0.035071 | Val: 0.051059 | LR: 2.93e-04
150
+ [13:08:20] P1 Ep 108/200 | Train: 0.035021 | Val: 0.051350 | LR: 2.91e-04
151
+ [13:09:15] P1 Ep 109/200 | Train: 0.034773 | Val: 0.050634 | LR: 2.90e-04
152
+ [13:10:11] P1 Ep 110/200 | Train: 0.034314 | Val: 0.051428 | LR: 2.89e-04
153
+ [13:11:09] P1 Ep 111/200 | Train: 0.034267 | Val: 0.050659 | LR: 2.87e-04
154
+ [13:12:08] P1 Ep 112/200 | Train: 0.034251 | Val: 0.050590 | LR: 2.85e-04
155
+ [13:13:07] P1 Ep 113/200 | Train: 0.033978 | Val: 0.050513 | LR: 2.84e-04
156
+ [13:14:05] P1 Ep 114/200 | Train: 0.033642 | Val: 0.050449 | LR: 2.82e-04
157
+ [13:15:03] P1 Ep 115/200 | Train: 0.033057 | Val: 0.050976 | LR: 2.80e-04
158
+ [13:16:02] P1 Ep 116/200 | Train: 0.033147 | Val: 0.050206 | LR: 2.78e-04
159
+ [13:16:58] P1 Ep 117/200 | Train: 0.032697 | Val: 0.050497 | LR: 2.76e-04
160
+ [13:17:27] P1 Ep 118/200 | Train: 0.032659 | Val: 0.050170 | LR: 2.74e-04
161
+ [13:18:14] P1 Ep 119/200 | Train: 0.032495 | Val: 0.050028 | LR: 2.71e-04
162
+ [13:19:13] P1 Ep 120/200 | Train: 0.031716 | Val: 0.050162 | LR: 2.69e-04
163
+ [13:20:10] P1 Ep 121/200 | Train: 0.031809 | Val: 0.050182 | LR: 2.67e-04
164
+ [13:21:07] P1 Ep 122/200 | Train: 0.031383 | Val: 0.049631 | LR: 2.64e-04
165
+ [13:22:02] P1 Ep 123/200 | Train: 0.031122 | Val: 0.050360 | LR: 2.62e-04
166
+ [13:22:58] P1 Ep 124/200 | Train: 0.030889 | Val: 0.050236 | LR: 2.59e-04
167
+ [13:23:50] P1 Ep 125/200 | Train: 0.031136 | Val: 0.048960 | LR: 2.56e-04
168
+ [13:23:50] -> Saved (val=0.048960)
169
+ [13:24:46] P1 Ep 126/200 | Train: 0.030786 | Val: 0.050036 | LR: 2.53e-04
170
+ [13:25:45] P1 Ep 127/200 | Train: 0.030375 | Val: 0.049891 | LR: 2.51e-04
171
+ [13:26:43] P1 Ep 128/200 | Train: 0.030349 | Val: 0.049853 | LR: 2.48e-04
172
+ [13:27:31] P1 Ep 129/200 | Train: 0.030246 | Val: 0.049284 | LR: 2.45e-04
173
+ [13:28:29] P1 Ep 130/200 | Train: 0.029636 | Val: 0.049733 | LR: 2.42e-04
174
+ [13:29:27] P1 Ep 131/200 | Train: 0.029388 | Val: 0.049764 | LR: 2.38e-04
175
+ [13:30:21] P1 Ep 132/200 | Train: 0.029453 | Val: 0.049230 | LR: 2.35e-04
176
+ [13:31:20] P1 Ep 133/200 | Train: 0.028949 | Val: 0.050013 | LR: 2.32e-04
177
+ [13:32:18] P1 Ep 134/200 | Train: 0.028947 | Val: 0.049440 | LR: 2.29e-04
178
+ [13:33:15] P1 Ep 135/200 | Train: 0.028454 | Val: 0.049427 | LR: 2.25e-04
179
+ [13:34:13] P1 Ep 136/200 | Train: 0.028648 | Val: 0.049217 | LR: 2.22e-04
180
+ [13:35:11] P1 Ep 137/200 | Train: 0.028013 | Val: 0.049009 | LR: 2.18e-04
181
+ [13:36:10] P1 Ep 138/200 | Train: 0.028056 | Val: 0.048858 | LR: 2.15e-04
182
+ [13:36:10] -> Saved (val=0.048858)
183
+ [13:37:09] P1 Ep 139/200 | Train: 0.027858 | Val: 0.049688 | LR: 2.11e-04
184
+ [13:38:05] P1 Ep 140/200 | Train: 0.027625 | Val: 0.048765 | LR: 2.08e-04
185
+ [13:38:05] -> Saved (val=0.048765)
186
+ [13:39:04] P1 Ep 141/200 | Train: 0.027465 | Val: 0.049118 | LR: 2.04e-04
187
+ [13:40:02] P1 Ep 142/200 | Train: 0.027037 | Val: 0.049199 | LR: 2.00e-04
188
+ [13:40:48] P1 Ep 143/200 | Train: 0.027307 | Val: 0.049164 | LR: 1.97e-04
189
+ [13:41:47] P1 Ep 144/200 | Train: 0.026933 | Val: 0.049194 | LR: 1.93e-04
190
+ [13:42:44] P1 Ep 145/200 | Train: 0.026669 | Val: 0.049068 | LR: 1.89e-04
191
+ [13:43:42] P1 Ep 146/200 | Train: 0.026216 | Val: 0.049076 | LR: 1.85e-04
192
+ [13:44:41] P1 Ep 147/200 | Train: 0.026247 | Val: 0.049737 | LR: 1.82e-04
193
+ [13:45:30] P1 Ep 148/200 | Train: 0.026166 | Val: 0.048985 | LR: 1.78e-04
194
+ [13:46:26] P1 Ep 149/200 | Train: 0.025979 | Val: 0.048745 | LR: 1.74e-04
195
+ [13:46:26] -> Saved (val=0.048745)
196
+ [13:47:23] P1 Ep 150/200 | Train: 0.025572 | Val: 0.048911 | LR: 1.70e-04
197
+ [13:48:22] P1 Ep 151/200 | Train: 0.025407 | Val: 0.048562 | LR: 1.66e-04
198
+ [13:48:22] -> Saved (val=0.048562)
199
+ [13:49:20] P1 Ep 152/200 | Train: 0.025395 | Val: 0.048419 | LR: 1.62e-04
200
+ [13:49:20] -> Saved (val=0.048419)
201
+ [13:50:15] P1 Ep 153/200 | Train: 0.025127 | Val: 0.048605 | LR: 1.58e-04
202
+ [13:51:14] P1 Ep 154/200 | Train: 0.024723 | Val: 0.048968 | LR: 1.54e-04
203
+ [13:52:11] P1 Ep 155/200 | Train: 0.024588 | Val: 0.048606 | LR: 1.50e-04
204
+ [13:53:06] P1 Ep 156/200 | Train: 0.024353 | Val: 0.048610 | LR: 1.47e-04
205
+ [13:54:05] P1 Ep 157/200 | Train: 0.024312 | Val: 0.048450 | LR: 1.43e-04
206
+ [13:55:04] P1 Ep 158/200 | Train: 0.023935 | Val: 0.048543 | LR: 1.39e-04
207
+ [13:56:02] P1 Ep 159/200 | Train: 0.023915 | Val: 0.048543 | LR: 1.35e-04
208
+ [13:57:01] P1 Ep 160/200 | Train: 0.023805 | Val: 0.048652 | LR: 1.31e-04
209
+ [13:58:00] P1 Ep 161/200 | Train: 0.023502 | Val: 0.048555 | LR: 1.27e-04
210
+ [13:58:58] P1 Ep 162/200 | Train: 0.023778 | Val: 0.048518 | LR: 1.23e-04
211
+ [13:59:54] P1 Ep 163/200 | Train: 0.023441 | Val: 0.048782 | LR: 1.19e-04
212
+ [14:00:49] P1 Ep 164/200 | Train: 0.023266 | Val: 0.048662 | LR: 1.16e-04
213
+ [14:01:47] P1 Ep 165/200 | Train: 0.022997 | Val: 0.048559 | LR: 1.12e-04
214
+ [14:02:45] P1 Ep 166/200 | Train: 0.023184 | Val: 0.048875 | LR: 1.08e-04
215
+ [14:03:43] P1 Ep 167/200 | Train: 0.022644 | Val: 0.048803 | LR: 1.04e-04
216
+ [14:04:30] P1 Ep 168/200 | Train: 0.022601 | Val: 0.048760 | LR: 1.01e-04
217
+ [14:05:29] P1 Ep 169/200 | Train: 0.022378 | Val: 0.048454 | LR: 9.69e-05
218
+ [14:06:28] P1 Ep 170/200 | Train: 0.022573 | Val: 0.048729 | LR: 9.33e-05
219
+ [14:07:27] P1 Ep 171/200 | Train: 0.022055 | Val: 0.048680 | LR: 8.97e-05
220
+ [14:08:26] P1 Ep 172/200 | Train: 0.021912 | Val: 0.048579 | LR: 8.61e-05
221
+ [14:09:19] P1 Ep 173/200 | Train: 0.021908 | Val: 0.048504 | LR: 8.26e-05
222
+ [14:10:16] P1 Ep 174/200 | Train: 0.021788 | Val: 0.048779 | LR: 7.92e-05
223
+ [14:11:13] P1 Ep 175/200 | Train: 0.021829 | Val: 0.048873 | LR: 7.58e-05
224
+ [14:12:12] P1 Ep 176/200 | Train: 0.021424 | Val: 0.048851 | LR: 7.24e-05
225
+ [14:13:11] P1 Ep 177/200 | Train: 0.021471 | Val: 0.048978 | LR: 6.91e-05
226
+ [14:14:08] P1 Ep 178/200 | Train: 0.021406 | Val: 0.048607 | LR: 6.58e-05
227
+ [14:15:06] P1 Ep 179/200 | Train: 0.021363 | Val: 0.048925 | LR: 6.26e-05
228
+ [14:16:04] P1 Ep 180/200 | Train: 0.021005 | Val: 0.049062 | LR: 5.95e-05
229
+ [14:17:02] P1 Ep 181/200 | Train: 0.020893 | Val: 0.048982 | LR: 5.64e-05
230
+ [14:18:01] P1 Ep 182/200 | Train: 0.020886 | Val: 0.048928 | LR: 5.34e-05
231
+ [14:19:00] P1 Ep 183/200 | Train: 0.020844 | Val: 0.048955 | LR: 5.05e-05
232
+ [14:19:58] P1 Ep 184/200 | Train: 0.020568 | Val: 0.049070 | LR: 4.76e-05
233
+ [14:20:48] P1 Ep 185/200 | Train: 0.020681 | Val: 0.048876 | LR: 4.48e-05
234
+ [14:21:42] P1 Ep 186/200 | Train: 0.020673 | Val: 0.049073 | LR: 4.21e-05
235
+ [14:22:41] P1 Ep 187/200 | Train: 0.020577 | Val: 0.048918 | LR: 3.94e-05
236
+ [14:23:37] P1 Ep 188/200 | Train: 0.020130 | Val: 0.049030 | LR: 3.68e-05
237
+ [14:24:33] P1 Ep 189/200 | Train: 0.020268 | Val: 0.049043 | LR: 3.43e-05
238
+ [14:25:31] P1 Ep 190/200 | Train: 0.020096 | Val: 0.049269 | LR: 3.19e-05
239
+ [14:26:29] P1 Ep 191/200 | Train: 0.020342 | Val: 0.049151 | LR: 2.96e-05
240
+ [14:27:06] P1 Ep 192/200 | Train: 0.020199 | Val: 0.049238 | LR: 2.73e-05
241
+ [14:28:05] P1 Ep 193/200 | Train: 0.019946 | Val: 0.049268 | LR: 2.51e-05
242
+ [14:29:02] P1 Ep 194/200 | Train: 0.019838 | Val: 0.049197 | LR: 2.30e-05
243
+ [14:29:58] P1 Ep 195/200 | Train: 0.019971 | Val: 0.049099 | LR: 2.10e-05
244
+ [14:30:55] P1 Ep 196/200 | Train: 0.019818 | Val: 0.049309 | LR: 1.91e-05
245
+ [14:31:54] P1 Ep 197/200 | Train: 0.019684 | Val: 0.049463 | LR: 1.73e-05
246
+ [14:32:48] P1 Ep 198/200 | Train: 0.019664 | Val: 0.049344 | LR: 1.56e-05
247
+ [14:33:43] P1 Ep 199/200 | Train: 0.019628 | Val: 0.049247 | LR: 1.39e-05
248
+ [14:34:30] P1 Ep 200/200 | Train: 0.019722 | Val: 0.049477 | LR: 1.24e-05
249
+ [14:34:30] === Phase 2: 4-step AR ===
250
+ [14:34:30] Loading data...
251
+ [14:34:35] Train: 10919, Val: 1342
252
+ [14:35:16] P2 Ep 1/80 | Train: 0.027432 | Val: 0.103173 | TF: 0.80 | LR: 5.00e-05
253
+ [14:35:16] -> Saved (val=0.103173)
254
+ [14:35:57] P2 Ep 2/80 | Train: 0.026443 | Val: 0.102155 | TF: 0.79 | LR: 4.99e-05
255
+ [14:35:57] -> Saved (val=0.102155)
256
+ [14:36:37] P2 Ep 3/80 | Train: 0.026112 | Val: 0.103400 | TF: 0.78 | LR: 4.98e-05
257
+ [14:37:17] P2 Ep 4/80 | Train: 0.026135 | Val: 0.103002 | TF: 0.77 | LR: 4.97e-05
258
+ [14:37:57] P2 Ep 5/80 | Train: 0.026104 | Val: 0.102934 | TF: 0.76 | LR: 4.95e-05
259
+ [14:38:37] P2 Ep 6/80 | Train: 0.026040 | Val: 0.104183 | TF: 0.75 | LR: 4.93e-05
260
+ [14:39:18] P2 Ep 7/80 | Train: 0.026577 | Val: 0.102271 | TF: 0.74 | LR: 4.91e-05
261
+ [14:39:59] P2 Ep 8/80 | Train: 0.026711 | Val: 0.103229 | TF: 0.73 | LR: 4.88e-05
262
+ [14:40:39] P2 Ep 9/80 | Train: 0.025872 | Val: 0.102907 | TF: 0.72 | LR: 4.85e-05
263
+ [14:41:19] P2 Ep 10/80 | Train: 0.027010 | Val: 0.102454 | TF: 0.71 | LR: 4.81e-05
264
+ [14:41:58] P2 Ep 11/80 | Train: 0.027384 | Val: 0.103081 | TF: 0.70 | LR: 4.77e-05
265
+ [14:42:37] P2 Ep 12/80 | Train: 0.027054 | Val: 0.103563 | TF: 0.69 | LR: 4.73e-05
266
+ [14:43:16] P2 Ep 13/80 | Train: 0.026737 | Val: 0.102811 | TF: 0.68 | LR: 4.69e-05
267
+ [14:43:55] P2 Ep 14/80 | Train: 0.026959 | Val: 0.102991 | TF: 0.67 | LR: 4.64e-05
268
+ [14:44:34] P2 Ep 15/80 | Train: 0.026120 | Val: 0.102582 | TF: 0.66 | LR: 4.59e-05
269
+ [14:45:13] P2 Ep 16/80 | Train: 0.027048 | Val: 0.102859 | TF: 0.65 | LR: 4.53e-05
270
+ [14:45:52] P2 Ep 17/80 | Train: 0.027360 | Val: 0.103328 | TF: 0.64 | LR: 4.47e-05
271
+ [14:46:31] P2 Ep 18/80 | Train: 0.027369 | Val: 0.102728 | TF: 0.63 | LR: 4.41e-05
272
+ [14:47:10] P2 Ep 19/80 | Train: 0.027836 | Val: 0.103762 | TF: 0.62 | LR: 4.35e-05
273
+ [14:47:49] P2 Ep 20/80 | Train: 0.027746 | Val: 0.103353 | TF: 0.61 | LR: 4.28e-05
274
+ [14:48:28] P2 Ep 21/80 | Train: 0.027820 | Val: 0.103254 | TF: 0.60 | LR: 4.21e-05
275
+ [14:49:08] P2 Ep 22/80 | Train: 0.027977 | Val: 0.102890 | TF: 0.59 | LR: 4.14e-05
276
+ [14:49:46] P2 Ep 23/80 | Train: 0.027953 | Val: 0.103614 | TF: 0.58 | LR: 4.07e-05
277
+ [14:50:24] P2 Ep 24/80 | Train: 0.028208 | Val: 0.103023 | TF: 0.57 | LR: 3.99e-05
278
+ [14:51:03] P2 Ep 25/80 | Train: 0.028400 | Val: 0.103118 | TF: 0.56 | LR: 3.91e-05
279
+ [14:51:42] P2 Ep 26/80 | Train: 0.028591 | Val: 0.103500 | TF: 0.55 | LR: 3.83e-05
280
+ [14:52:20] P2 Ep 27/80 | Train: 0.028352 | Val: 0.104166 | TF: 0.54 | LR: 3.75e-05
281
+ [14:53:00] P2 Ep 28/80 | Train: 0.028690 | Val: 0.103656 | TF: 0.53 | LR: 3.66e-05
282
+ [14:53:39] P2 Ep 29/80 | Train: 0.028526 | Val: 0.103488 | TF: 0.52 | LR: 3.58e-05
283
+ [14:54:18] P2 Ep 30/80 | Train: 0.028787 | Val: 0.104129 | TF: 0.51 | LR: 3.49e-05
284
+ [14:54:56] P2 Ep 31/80 | Train: 0.028874 | Val: 0.103472 | TF: 0.50 | LR: 3.40e-05
285
+ [14:55:35] P2 Ep 32/80 | Train: 0.029396 | Val: 0.103783 | TF: 0.49 | LR: 3.31e-05
286
+ [14:56:14] P2 Ep 33/80 | Train: 0.028706 | Val: 0.104092 | TF: 0.48 | LR: 3.22e-05
287
+ [14:56:52] P2 Ep 34/80 | Train: 0.029219 | Val: 0.103201 | TF: 0.47 | LR: 3.12e-05
288
+ [14:57:30] P2 Ep 35/80 | Train: 0.029438 | Val: 0.104062 | TF: 0.46 | LR: 3.03e-05
289
+ [14:58:09] P2 Ep 36/80 | Train: 0.029572 | Val: 0.104010 | TF: 0.45 | LR: 2.93e-05
290
+ [14:58:49] P2 Ep 37/80 | Train: 0.030473 | Val: 0.103655 | TF: 0.44 | LR: 2.84e-05
291
+ [14:59:28] P2 Ep 38/80 | Train: 0.030193 | Val: 0.103855 | TF: 0.43 | LR: 2.74e-05
292
+ [15:00:06] P2 Ep 39/80 | Train: 0.030343 | Val: 0.104072 | TF: 0.42 | LR: 2.65e-05
293
+ [15:00:46] P2 Ep 40/80 | Train: 0.030170 | Val: 0.104149 | TF: 0.41 | LR: 2.55e-05
294
+ [15:01:25] P2 Ep 41/80 | Train: 0.030926 | Val: 0.103745 | TF: 0.40 | LR: 2.45e-05
295
+ [15:02:03] P2 Ep 42/80 | Train: 0.030092 | Val: 0.104060 | TF: 0.39 | LR: 2.36e-05
296
+ [15:02:42] P2 Ep 43/80 | Train: 0.030578 | Val: 0.103904 | TF: 0.38 | LR: 2.26e-05
297
+ [15:03:21] P2 Ep 44/80 | Train: 0.030360 | Val: 0.103380 | TF: 0.37 | LR: 2.17e-05
298
+ [15:03:58] P2 Ep 45/80 | Train: 0.030826 | Val: 0.104229 | TF: 0.36 | LR: 2.07e-05
299
+ [15:04:35] P2 Ep 46/80 | Train: 0.031332 | Val: 0.104352 | TF: 0.35 | LR: 1.98e-05
300
+ [15:05:14] P2 Ep 47/80 | Train: 0.030756 | Val: 0.104121 | TF: 0.34 | LR: 1.88e-05
301
+ [15:05:52] P2 Ep 48/80 | Train: 0.030972 | Val: 0.103803 | TF: 0.33 | LR: 1.79e-05
302
+ [15:06:31] P2 Ep 49/80 | Train: 0.031960 | Val: 0.104120 | TF: 0.32 | LR: 1.70e-05
303
+ [15:07:10] P2 Ep 50/80 | Train: 0.031968 | Val: 0.103845 | TF: 0.31 | LR: 1.61e-05
304
+ [15:07:49] P2 Ep 51/80 | Train: 0.031260 | Val: 0.104130 | TF: 0.30 | LR: 1.52e-05
305
+ [15:08:28] P2 Ep 52/80 | Train: 0.031812 | Val: 0.104280 | TF: 0.29 | LR: 1.44e-05
306
+ [15:09:07] P2 Ep 53/80 | Train: 0.031784 | Val: 0.103928 | TF: 0.28 | LR: 1.35e-05
307
+ [15:09:45] P2 Ep 54/80 | Train: 0.032383 | Val: 0.104695 | TF: 0.27 | LR: 1.27e-05
308
+ [15:10:24] P2 Ep 55/80 | Train: 0.032580 | Val: 0.104084 | TF: 0.26 | LR: 1.19e-05
309
+ [15:11:03] P2 Ep 56/80 | Train: 0.032533 | Val: 0.104478 | TF: 0.25 | LR: 1.11e-05
310
+ [15:11:42] P2 Ep 57/80 | Train: 0.032584 | Val: 0.104229 | TF: 0.24 | LR: 1.03e-05
311
+ [15:12:19] P2 Ep 58/80 | Train: 0.032894 | Val: 0.104471 | TF: 0.23 | LR: 9.59e-06
312
+ [15:12:58] P2 Ep 59/80 | Train: 0.032340 | Val: 0.104835 | TF: 0.22 | LR: 8.87e-06
313
+ [15:13:37] P2 Ep 60/80 | Train: 0.032757 | Val: 0.104728 | TF: 0.21 | LR: 8.18e-06
314
+ [15:14:17] P2 Ep 61/80 | Train: 0.033680 | Val: 0.104454 | TF: 0.20 | LR: 7.51e-06
315
+ [15:14:58] P2 Ep 62/80 | Train: 0.033474 | Val: 0.105213 | TF: 0.19 | LR: 6.87e-06
316
+ [15:15:38] P2 Ep 63/80 | Train: 0.034075 | Val: 0.104406 | TF: 0.18 | LR: 6.26e-06
317
+ [15:16:19] P2 Ep 64/80 | Train: 0.033939 | Val: 0.104535 | TF: 0.17 | LR: 5.68e-06
318
+ [15:16:58] P2 Ep 65/80 | Train: 0.034397 | Val: 0.104622 | TF: 0.16 | LR: 5.13e-06
319
+ [15:17:38] P2 Ep 66/80 | Train: 0.033935 | Val: 0.104737 | TF: 0.15 | LR: 4.61e-06
320
+ [15:18:19] P2 Ep 67/80 | Train: 0.033717 | Val: 0.104785 | TF: 0.14 | LR: 4.12e-06
321
+ [15:18:59] P2 Ep 68/80 | Train: 0.035352 | Val: 0.104855 | TF: 0.13 | LR: 3.67e-06
322
+ [15:19:39] P2 Ep 69/80 | Train: 0.034247 | Val: 0.104812 | TF: 0.12 | LR: 3.25e-06
323
+ [15:20:19] P2 Ep 70/80 | Train: 0.034564 | Val: 0.104788 | TF: 0.11 | LR: 2.86e-06
324
+ [15:20:59] P2 Ep 71/80 | Train: 0.034817 | Val: 0.104847 | TF: 0.10 | LR: 2.51e-06
325
+ [15:21:39] P2 Ep 72/80 | Train: 0.035483 | Val: 0.104859 | TF: 0.09 | LR: 2.20e-06
326
+ [15:22:19] P2 Ep 73/80 | Train: 0.035523 | Val: 0.104798 | TF: 0.08 | LR: 1.92e-06
327
+ [15:22:59] P2 Ep 74/80 | Train: 0.035441 | Val: 0.104855 | TF: 0.07 | LR: 1.68e-06
328
+ [15:23:39] P2 Ep 75/80 | Train: 0.035759 | Val: 0.105016 | TF: 0.06 | LR: 1.47e-06
329
+ [15:24:19] P2 Ep 76/80 | Train: 0.036529 | Val: 0.105032 | TF: 0.05 | LR: 1.30e-06
330
+ [15:24:59] P2 Ep 77/80 | Train: 0.036083 | Val: 0.105028 | TF: 0.04 | LR: 1.17e-06
331
+ [15:25:39] P2 Ep 78/80 | Train: 0.036298 | Val: 0.105053 | TF: 0.03 | LR: 1.08e-06
332
+ [15:26:20] P2 Ep 79/80 | Train: 0.036781 | Val: 0.105163 | TF: 0.02 | LR: 1.02e-06
333
+ [15:27:00] P2 Ep 80/80 | Train: 0.037035 | Val: 0.105116 | TF: 0.01 | LR: 1.00e-06
334
+ [15:27:00] Training complete.