Itoifi commited on
Commit
b5c4bde
1 Parent(s): 2b94860

Upload 48 files

Browse files
32k-est/D_12000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ee1cf33e022d89ea425a6b9097a091de3eec9e5f5b04f25fd9eedc23ed442ee1
3
+ size 561098185
32k-est/D_18000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eb924a482100c858d89d7b94b893d7aa05161a970bbc42ab480a988f89decb83
3
+ size 561098185
32k-est/D_24000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:58b0ccd96e30cae65717c0a49ad405283eb6bca8fa077fe14979ebe62d730584
3
+ size 561098185
32k-est/D_30000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ab1765fc5b17e87615fca97ffb10ab3a98d3868b12582d95b8b8f7843dab2e2
3
+ size 561098185
32k-est/D_35000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8a845da9600e686b50d5ce35b8a655a57aff0bec43d604d413741ee6496e731a
3
+ size 561098185
32k-est/D_6000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ccdc98331850dea415ba14781b8f089efb418ff7b6b30c771ba517f505f39210
3
+ size 561098185
32k-est/G_12000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ddf37037109df84a114d121961fd39142fc05f2dc9f354bcd6127f64ccdd4a3f
3
+ size 699505437
32k-est/G_18000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:357debf8b5442449e542f3e66859c3349688182198588a3f8c913734b37ce337
3
+ size 699505437
32k-est/G_24000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9bd47eb3736240651444005d6c45f5c69900d2c6e7af24b016c269a2c3c2ef2b
3
+ size 699505437
32k-est/G_30000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7938688c2c59634e360033d0e54a37afa8b8a151581560108941eb17e589a05d
3
+ size 699505437
32k-est/G_35000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:03b50fe8e53f536b0704f6aa0ff707de79919efe69aa24b7a4567a120acd3d7e
3
+ size 699505437
32k-est/G_6000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:56c15dcc64fe441fd365e1717a3287174c1968acfee39577bbaac204ec330540
3
+ size 699505437
32k-est/config.json ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "train": {
3
+ "log_interval": 200,
4
+ "eval_interval": 1000,
5
+ "seed": 1234,
6
+ "epochs": 10000,
7
+ "learning_rate": 0.0001,
8
+ "betas": [
9
+ 0.8,
10
+ 0.99
11
+ ],
12
+ "eps": 1e-09,
13
+ "batch_size": 6,
14
+ "fp16_run": false,
15
+ "lr_decay": 0.999875,
16
+ "segment_size": 17920,
17
+ "init_lr_ratio": 1,
18
+ "warmup_epochs": 0,
19
+ "c_mel": 45,
20
+ "c_kl": 1.0,
21
+ "use_sr": true,
22
+ "max_speclen": 384,
23
+ "port": "8001"
24
+ },
25
+ "data": {
26
+ "training_files": "filelists/train.txt",
27
+ "validation_files": "filelists/val.txt",
28
+ "max_wav_value": 32768.0,
29
+ "sampling_rate": 32000,
30
+ "filter_length": 1280,
31
+ "hop_length": 320,
32
+ "win_length": 1280,
33
+ "n_mel_channels": 80,
34
+ "mel_fmin": 0.0,
35
+ "mel_fmax": null
36
+ },
37
+ "model": {
38
+ "inter_channels": 192,
39
+ "hidden_channels": 192,
40
+ "filter_channels": 768,
41
+ "n_heads": 2,
42
+ "n_layers": 6,
43
+ "kernel_size": 3,
44
+ "p_dropout": 0.1,
45
+ "resblock": "1",
46
+ "resblock_kernel_sizes": [
47
+ 3,
48
+ 7,
49
+ 11
50
+ ],
51
+ "resblock_dilation_sizes": [
52
+ [
53
+ 1,
54
+ 3,
55
+ 5
56
+ ],
57
+ [
58
+ 1,
59
+ 3,
60
+ 5
61
+ ],
62
+ [
63
+ 1,
64
+ 3,
65
+ 5
66
+ ]
67
+ ],
68
+ "upsample_rates": [
69
+ 10,
70
+ 8,
71
+ 2,
72
+ 2
73
+ ],
74
+ "upsample_initial_channel": 512,
75
+ "upsample_kernel_sizes": [
76
+ 16,
77
+ 16,
78
+ 4,
79
+ 4
80
+ ],
81
+ "n_layers_q": 3,
82
+ "use_spectral_norm": false,
83
+ "gin_channels": 256,
84
+ "ssl_dim": 256,
85
+ "n_speakers": 2
86
+ },
87
+ "spk": {
88
+ "est": 0
89
+ }
90
+ }
32k-est/train.log ADDED
@@ -0,0 +1,786 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2023-02-22 00:08:12,618 32k INFO {'train': {'log_interval': 200, 'eval_interval': 1000, 'seed': 1234, 'epochs': 10000, 'learning_rate': 0.0001, 'betas': [0.8, 0.99], 'eps': 1e-09, 'batch_size': 6, 'fp16_run': False, 'lr_decay': 0.999875, 'segment_size': 17920, 'init_lr_ratio': 1, 'warmup_epochs': 0, 'c_mel': 45, 'c_kl': 1.0, 'use_sr': True, 'max_speclen': 384, 'port': '8001'}, 'data': {'training_files': 'filelists/train.txt', 'validation_files': 'filelists/val.txt', 'max_wav_value': 32768.0, 'sampling_rate': 32000, 'filter_length': 1280, 'hop_length': 320, 'win_length': 1280, 'n_mel_channels': 80, 'mel_fmin': 0.0, 'mel_fmax': None}, 'model': {'inter_channels': 192, 'hidden_channels': 192, 'filter_channels': 768, 'n_heads': 2, 'n_layers': 6, 'kernel_size': 3, 'p_dropout': 0.1, 'resblock': '1', 'resblock_kernel_sizes': [3, 7, 11], 'resblock_dilation_sizes': [[1, 3, 5], [1, 3, 5], [1, 3, 5]], 'upsample_rates': [10, 8, 2, 2], 'upsample_initial_channel': 512, 'upsample_kernel_sizes': [16, 16, 4, 4], 'n_layers_q': 3, 'use_spectral_norm': False, 'gin_channels': 256, 'ssl_dim': 256, 'n_speakers': 2}, 'spk': {'est': 0}, 'model_dir': './logs\\32k'}
2
+ 2023-02-22 00:08:12,618 32k WARNING K:\AI\so-vits-svc-32k is not a git repository, therefore hash value comparison will be ignored.
3
+ 2023-02-22 00:08:34,870 32k INFO Loaded checkpoint './logs\32k\G_0.pth' (iteration 1)
4
+ 2023-02-22 00:08:37,960 32k INFO Loaded checkpoint './logs\32k\D_0.pth' (iteration 1)
5
+ 2023-02-22 00:09:12,596 32k INFO Train Epoch: 1 [0%]
6
+ 2023-02-22 00:09:12,597 32k INFO [2.145352363586426, 3.4001522064208984, 12.26406478881836, 40.874549865722656, 8.646808624267578, 0, 0.0001]
7
+ 2023-02-22 00:09:19,053 32k INFO Saving model and optimizer state at iteration 1 to ./logs\32k\G_0.pth
8
+ 2023-02-22 00:09:37,859 32k INFO Saving model and optimizer state at iteration 1 to ./logs\32k\D_0.pth
9
+ 2023-02-22 00:11:10,285 32k INFO ====> Epoch: 1
10
+ 2023-02-22 00:12:56,112 32k INFO ====> Epoch: 2
11
+ 2023-02-22 00:13:21,893 32k INFO Train Epoch: 3 [4%]
12
+ 2023-02-22 00:13:21,893 32k INFO [2.3486380577087402, 2.4642438888549805, 14.349164009094238, 24.486454010009766, 1.6935454607009888, 200, 9.99750015625e-05]
13
+ 2023-02-22 00:14:42,277 32k INFO ====> Epoch: 3
14
+ 2023-02-22 00:16:27,846 32k INFO ====> Epoch: 4
15
+ 2023-02-22 00:16:56,860 32k INFO Train Epoch: 5 [8%]
16
+ 2023-02-22 00:16:56,860 32k INFO [2.4858646392822266, 2.255676746368408, 11.70196533203125, 19.060020446777344, 1.3148207664489746, 400, 9.995000937421877e-05]
17
+ 2023-02-22 00:18:13,793 32k INFO ====> Epoch: 5
18
+ 2023-02-22 00:19:59,483 32k INFO ====> Epoch: 6
19
+ 2023-02-22 00:20:31,940 32k INFO Train Epoch: 7 [12%]
20
+ 2023-02-22 00:20:31,940 32k INFO [2.3629703521728516, 2.449742078781128, 9.971107482910156, 18.171764373779297, 1.2438346147537231, 600, 9.99250234335941e-05]
21
+ 2023-02-22 00:21:45,560 32k INFO ====> Epoch: 7
22
+ 2023-02-22 00:23:31,255 32k INFO ====> Epoch: 8
23
+ 2023-02-22 00:24:07,127 32k INFO Train Epoch: 9 [16%]
24
+ 2023-02-22 00:24:07,127 32k INFO [2.7334389686584473, 2.1502294540405273, 7.149779319763184, 18.11358642578125, 1.2457014322280884, 800, 9.990004373906418e-05]
25
+ 2023-02-22 00:25:17,128 32k INFO ====> Epoch: 9
26
+ 2023-02-22 00:27:02,598 32k INFO ====> Epoch: 10
27
+ 2023-02-22 00:27:41,816 32k INFO Train Epoch: 11 [20%]
28
+ 2023-02-22 00:27:41,816 32k INFO [2.525851011276245, 2.173820734024048, 10.240619659423828, 18.13275146484375, 1.0487183332443237, 1000, 9.987507028906759e-05]
29
+ 2023-02-22 00:27:46,068 32k INFO Saving model and optimizer state at iteration 11 to ./logs\32k\G_1000.pth
30
+ 2023-02-22 00:28:05,134 32k INFO Saving model and optimizer state at iteration 11 to ./logs\32k\D_1000.pth
31
+ 2023-02-22 00:29:14,629 32k INFO ====> Epoch: 11
32
+ 2023-02-22 00:31:00,159 32k INFO ====> Epoch: 12
33
+ 2023-02-22 00:31:42,863 32k INFO Train Epoch: 13 [24%]
34
+ 2023-02-22 00:31:42,863 32k INFO [2.4192304611206055, 2.4427719116210938, 12.2838773727417, 23.12200164794922, 1.0217249393463135, 1200, 9.98501030820433e-05]
35
+ 2023-02-22 00:32:46,074 32k INFO ====> Epoch: 13
36
+ 2023-02-22 00:34:31,694 32k INFO ====> Epoch: 14
37
+ 2023-02-22 00:35:17,797 32k INFO Train Epoch: 15 [29%]
38
+ 2023-02-22 00:35:17,798 32k INFO [2.5181407928466797, 2.393437385559082, 8.553373336791992, 21.508071899414062, 0.9627037048339844, 1400, 9.982514211643064e-05]
39
+ 2023-02-22 00:36:17,539 32k INFO ====> Epoch: 15
40
+ 2023-02-22 00:38:03,121 32k INFO ====> Epoch: 16
41
+ 2023-02-22 00:38:52,596 32k INFO Train Epoch: 17 [33%]
42
+ 2023-02-22 00:38:52,597 32k INFO [2.3772032260894775, 2.1716203689575195, 12.80620002746582, 22.022525787353516, 1.2130091190338135, 1600, 9.980018739066937e-05]
43
+ 2023-02-22 00:39:48,945 32k INFO ====> Epoch: 17
44
+ 2023-02-22 00:41:34,608 32k INFO ====> Epoch: 18
45
+ 2023-02-22 00:42:27,609 32k INFO Train Epoch: 19 [37%]
46
+ 2023-02-22 00:42:27,609 32k INFO [2.588810920715332, 2.084688901901245, 10.23686408996582, 17.19050407409668, 1.0559662580490112, 1800, 9.977523890319963e-05]
47
+ 2023-02-22 00:43:20,562 32k INFO ====> Epoch: 19
48
+ 2023-02-22 00:45:06,198 32k INFO ====> Epoch: 20
49
+ 2023-02-22 00:46:02,554 32k INFO Train Epoch: 21 [41%]
50
+ 2023-02-22 00:46:02,555 32k INFO [2.3476009368896484, 2.4465882778167725, 14.103896141052246, 22.818317413330078, 0.7795078158378601, 2000, 9.975029665246193e-05]
51
+ 2023-02-22 00:46:06,816 32k INFO Saving model and optimizer state at iteration 21 to ./logs\32k\G_2000.pth
52
+ 2023-02-22 00:46:24,690 32k INFO Saving model and optimizer state at iteration 21 to ./logs\32k\D_2000.pth
53
+ 2023-02-22 00:47:17,669 32k INFO ====> Epoch: 21
54
+ 2023-02-22 00:49:03,347 32k INFO ====> Epoch: 22
55
+ 2023-02-22 00:50:03,181 32k INFO Train Epoch: 23 [45%]
56
+ 2023-02-22 00:50:03,181 32k INFO [2.331178903579712, 2.411133289337158, 13.773752212524414, 21.044940948486328, 1.075492262840271, 2200, 9.972536063689719e-05]
57
+ 2023-02-22 00:50:49,196 32k INFO ====> Epoch: 23
58
+ 2023-02-22 00:52:35,089 32k INFO ====> Epoch: 24
59
+ 2023-02-22 00:53:38,492 32k INFO Train Epoch: 25 [49%]
60
+ 2023-02-22 00:53:38,492 32k INFO [2.7379379272460938, 2.1680490970611572, 10.426996231079102, 19.272850036621094, 1.2769395112991333, 2400, 9.970043085494672e-05]
61
+ 2023-02-22 00:54:21,188 32k INFO ====> Epoch: 25
62
+ 2023-02-22 00:56:06,784 32k INFO ====> Epoch: 26
63
+ 2023-02-22 00:57:13,707 32k INFO Train Epoch: 27 [53%]
64
+ 2023-02-22 00:57:13,707 32k INFO [2.4311702251434326, 2.5075151920318604, 9.771134376525879, 13.715150833129883, 0.9923850297927856, 2600, 9.967550730505221e-05]
65
+ 2023-02-22 00:57:53,034 32k INFO ====> Epoch: 27
66
+ 2023-02-22 00:59:38,682 32k INFO ====> Epoch: 28
67
+ 2023-02-22 01:00:48,916 32k INFO Train Epoch: 29 [57%]
68
+ 2023-02-22 01:00:48,916 32k INFO [2.397207736968994, 2.2835323810577393, 11.884821891784668, 21.50246810913086, 0.7772465944290161, 2800, 9.965058998565574e-05]
69
+ 2023-02-22 01:01:24,697 32k INFO ====> Epoch: 29
70
+ 2023-02-22 01:03:10,447 32k INFO ====> Epoch: 30
71
+ 2023-02-22 01:04:23,993 32k INFO Train Epoch: 31 [61%]
72
+ 2023-02-22 01:04:23,994 32k INFO [2.4665157794952393, 2.223632574081421, 10.100894927978516, 15.292652130126953, 1.0024449825286865, 3000, 9.962567889519979e-05]
73
+ 2023-02-22 01:04:28,221 32k INFO Saving model and optimizer state at iteration 31 to ./logs\32k\G_3000.pth
74
+ 2023-02-22 01:04:48,543 32k INFO Saving model and optimizer state at iteration 31 to ./logs\32k\D_3000.pth
75
+ 2023-02-22 01:05:24,206 32k INFO ====> Epoch: 31
76
+ 2023-02-22 01:07:09,674 32k INFO ====> Epoch: 32
77
+ 2023-02-22 01:08:26,794 32k INFO Train Epoch: 33 [65%]
78
+ 2023-02-22 01:08:26,795 32k INFO [2.4395065307617188, 2.381229877471924, 11.58455753326416, 21.415176391601562, 1.0025029182434082, 3200, 9.960077403212722e-05]
79
+ 2023-02-22 01:08:55,697 32k INFO ====> Epoch: 33
80
+ 2023-02-22 01:10:41,472 32k INFO ====> Epoch: 34
81
+ 2023-02-22 01:12:02,104 32k INFO Train Epoch: 35 [69%]
82
+ 2023-02-22 01:12:02,105 32k INFO [2.388833522796631, 2.4029440879821777, 9.948843002319336, 17.525562286376953, 0.9836262464523315, 3400, 9.957587539488128e-05]
83
+ 2023-02-22 01:12:27,570 32k INFO ====> Epoch: 35
84
+ 2023-02-22 01:14:13,128 32k INFO ====> Epoch: 36
85
+ 2023-02-22 01:15:37,186 32k INFO Train Epoch: 37 [73%]
86
+ 2023-02-22 01:15:37,187 32k INFO [2.396942615509033, 2.3084261417388916, 7.296802997589111, 13.69768238067627, 0.9189913868904114, 3600, 9.95509829819056e-05]
87
+ 2023-02-22 01:15:59,261 32k INFO ====> Epoch: 37
88
+ 2023-02-22 01:17:45,166 32k INFO ====> Epoch: 38
89
+ 2023-02-22 01:19:12,693 32k INFO Train Epoch: 39 [78%]
90
+ 2023-02-22 01:19:12,694 32k INFO [2.34450626373291, 2.3164803981781006, 13.175307273864746, 21.369274139404297, 0.5636721849441528, 3800, 9.952609679164422e-05]
91
+ 2023-02-22 01:19:31,254 32k INFO ====> Epoch: 39
92
+ 2023-02-22 01:21:17,022 32k INFO ====> Epoch: 40
93
+ 2023-02-22 01:22:47,780 32k INFO Train Epoch: 41 [82%]
94
+ 2023-02-22 01:22:47,781 32k INFO [2.6599762439727783, 2.186920642852783, 8.917391777038574, 14.608092308044434, 0.7955180406570435, 4000, 9.950121682254156e-05]
95
+ 2023-02-22 01:22:52,052 32k INFO Saving model and optimizer state at iteration 41 to ./logs\32k\G_4000.pth
96
+ 2023-02-22 01:23:13,030 32k INFO Saving model and optimizer state at iteration 41 to ./logs\32k\D_4000.pth
97
+ 2023-02-22 01:23:31,585 32k INFO ====> Epoch: 41
98
+ 2023-02-22 01:25:17,001 32k INFO ====> Epoch: 42
99
+ 2023-02-22 01:26:51,140 32k INFO Train Epoch: 43 [86%]
100
+ 2023-02-22 01:26:51,141 32k INFO [2.218780994415283, 2.8232128620147705, 10.92943286895752, 15.00414752960205, 0.885425865650177, 4200, 9.947634307304244e-05]
101
+ 2023-02-22 01:27:02,817 32k INFO ====> Epoch: 43
102
+ 2023-02-22 01:28:48,594 32k INFO ====> Epoch: 44
103
+ 2023-02-22 01:30:26,338 32k INFO Train Epoch: 45 [90%]
104
+ 2023-02-22 01:30:26,338 32k INFO [2.6101627349853516, 2.4599862098693848, 10.045958518981934, 18.106704711914062, 0.9387855529785156, 4400, 9.945147554159202e-05]
105
+ 2023-02-22 01:30:34,593 32k INFO ====> Epoch: 45
106
+ 2023-02-22 01:32:20,323 32k INFO ====> Epoch: 46
107
+ 2023-02-22 01:34:01,568 32k INFO Train Epoch: 47 [94%]
108
+ 2023-02-22 01:34:01,569 32k INFO [2.4062795639038086, 2.434760093688965, 9.70323371887207, 17.096527099609375, 0.6637702584266663, 4600, 9.942661422663591e-05]
109
+ 2023-02-22 01:34:06,513 32k INFO ====> Epoch: 47
110
+ 2023-02-22 01:35:52,323 32k INFO ====> Epoch: 48
111
+ 2023-02-22 01:37:36,966 32k INFO Train Epoch: 49 [98%]
112
+ 2023-02-22 01:37:36,967 32k INFO [2.4078311920166016, 2.423760175704956, 11.222769737243652, 20.222986221313477, 0.9975738525390625, 4800, 9.940175912662009e-05]
113
+ 2023-02-22 01:37:38,357 32k INFO ====> Epoch: 49
114
+ 2023-02-22 01:39:23,989 32k INFO ====> Epoch: 50
115
+ 2023-02-22 01:41:09,562 32k INFO ====> Epoch: 51
116
+ 2023-02-22 01:41:33,357 32k INFO Train Epoch: 52 [2%]
117
+ 2023-02-22 01:41:33,358 32k INFO [2.568455934524536, 2.097076177597046, 8.081390380859375, 16.72420310974121, 1.0670697689056396, 5000, 9.936448812621091e-05]
118
+ 2023-02-22 01:41:37,575 32k INFO Saving model and optimizer state at iteration 52 to ./logs\32k\G_5000.pth
119
+ 2023-02-22 01:41:53,583 32k INFO Saving model and optimizer state at iteration 52 to ./logs\32k\D_5000.pth
120
+ 2023-02-22 01:43:19,034 32k INFO ====> Epoch: 52
121
+ 2023-02-22 01:45:04,531 32k INFO ====> Epoch: 53
122
+ 2023-02-22 01:45:31,828 32k INFO Train Epoch: 54 [6%]
123
+ 2023-02-22 01:45:31,829 32k INFO [2.367290735244751, 2.359792947769165, 12.402170181274414, 22.279163360595703, 0.5312451124191284, 5200, 9.933964855674948e-05]
124
+ 2023-02-22 01:46:50,519 32k INFO ====> Epoch: 54
125
+ 2023-02-22 01:48:36,114 32k INFO ====> Epoch: 55
126
+ 2023-02-22 01:49:06,731 32k INFO Train Epoch: 56 [10%]
127
+ 2023-02-22 01:49:06,731 32k INFO [2.4941694736480713, 2.2833311557769775, 11.451539039611816, 18.980377197265625, 1.122371792793274, 5400, 9.931481519679228e-05]
128
+ 2023-02-22 01:50:22,005 32k INFO ====> Epoch: 56
129
+ 2023-02-22 01:52:08,000 32k INFO ====> Epoch: 57
130
+ 2023-02-22 01:52:43,577 32k INFO Train Epoch: 58 [14%]
131
+ 2023-02-22 01:52:43,577 32k INFO [2.5777227878570557, 2.013221025466919, 7.400770664215088, 16.267370223999023, 0.9109489917755127, 5600, 9.928998804478705e-05]
132
+ 2023-02-22 01:53:55,400 32k INFO ====> Epoch: 58
133
+ 2023-02-22 01:55:41,267 32k INFO ====> Epoch: 59
134
+ 2023-02-22 01:56:18,836 32k INFO Train Epoch: 60 [18%]
135
+ 2023-02-22 01:56:18,837 32k INFO [2.3286633491516113, 2.273066282272339, 11.276134490966797, 19.17591667175293, 0.6657251715660095, 5800, 9.926516709918191e-05]
136
+ 2023-02-22 01:57:27,294 32k INFO ====> Epoch: 60
137
+ 2023-02-22 01:59:12,967 32k INFO ====> Epoch: 61
138
+ 2023-02-22 01:59:53,907 32k INFO Train Epoch: 62 [22%]
139
+ 2023-02-22 01:59:53,907 32k INFO [2.572532892227173, 2.336395025253296, 12.095869064331055, 18.98446273803711, 0.7074256539344788, 6000, 9.924035235842533e-05]
140
+ 2023-02-22 01:59:58,246 32k INFO Saving model and optimizer state at iteration 62 to ./logs\32k\G_6000.pth
141
+ 2023-02-22 02:00:15,073 32k INFO Saving model and optimizer state at iteration 62 to ./logs\32k\D_6000.pth
142
+ 2023-02-22 02:01:23,134 32k INFO ====> Epoch: 62
143
+ 2023-02-22 02:03:08,840 32k INFO ====> Epoch: 63
144
+ 2023-02-22 02:03:53,319 32k INFO Train Epoch: 64 [27%]
145
+ 2023-02-22 02:03:53,320 32k INFO [2.5473315715789795, 2.3198702335357666, 8.785454750061035, 16.218992233276367, 0.8188357949256897, 6200, 9.921554382096622e-05]
146
+ 2023-02-22 02:04:54,908 32k INFO ====> Epoch: 64
147
+ 2023-02-22 02:06:40,700 32k INFO ====> Epoch: 65
148
+ 2023-02-22 02:07:28,567 32k INFO Train Epoch: 66 [31%]
149
+ 2023-02-22 02:07:28,567 32k INFO [2.507612705230713, 2.3138248920440674, 6.625302791595459, 13.481208801269531, 0.4462297260761261, 6400, 9.919074148525384e-05]
150
+ 2023-02-22 02:08:26,757 32k INFO ====> Epoch: 66
151
+ 2023-02-22 02:10:12,508 32k INFO ====> Epoch: 67
152
+ 2023-02-22 02:11:03,725 32k INFO Train Epoch: 68 [35%]
153
+ 2023-02-22 02:11:03,725 32k INFO [2.5980536937713623, 2.279015064239502, 7.950907230377197, 13.451935768127441, 1.1401939392089844, 6600, 9.916594534973787e-05]
154
+ 2023-02-22 02:11:58,445 32k INFO ====> Epoch: 68
155
+ 2023-02-22 02:13:44,135 32k INFO ====> Epoch: 69
156
+ 2023-02-22 02:14:38,891 32k INFO Train Epoch: 70 [39%]
157
+ 2023-02-22 02:14:38,892 32k INFO [2.5319972038269043, 2.51550030708313, 10.939152717590332, 19.677751541137695, 0.63681960105896, 6800, 9.914115541286833e-05]
158
+ 2023-02-22 02:15:30,175 32k INFO ====> Epoch: 70
159
+ 2023-02-22 02:17:15,908 32k INFO ====> Epoch: 71
160
+ 2023-02-22 02:18:14,245 32k INFO Train Epoch: 72 [43%]
161
+ 2023-02-22 02:18:14,245 32k INFO [2.5482029914855957, 2.192294120788574, 9.581657409667969, 15.53216552734375, 1.2906405925750732, 7000, 9.911637167309565e-05]
162
+ 2023-02-22 02:18:18,620 32k INFO Saving model and optimizer state at iteration 72 to ./logs\32k\G_7000.pth
163
+ 2023-02-22 02:18:35,298 32k INFO Saving model and optimizer state at iteration 72 to ./logs\32k\D_7000.pth
164
+ 2023-02-22 02:19:26,420 32k INFO ====> Epoch: 72
165
+ 2023-02-22 02:21:12,370 32k INFO ====> Epoch: 73
166
+ 2023-02-22 02:22:13,800 32k INFO Train Epoch: 74 [47%]
167
+ 2023-02-22 02:22:13,801 32k INFO [2.5285887718200684, 2.162809371948242, 13.120260238647461, 21.690521240234375, 0.7244228720664978, 7200, 9.909159412887068e-05]
168
+ 2023-02-22 02:22:58,230 32k INFO ====> Epoch: 74
169
+ 2023-02-22 02:24:44,154 32k INFO ====> Epoch: 75
170
+ 2023-02-22 02:25:49,172 32k INFO Train Epoch: 76 [51%]
171
+ 2023-02-22 02:25:49,172 32k INFO [2.3859612941741943, 2.5308663845062256, 9.781866073608398, 18.245580673217773, 0.9907944798469543, 7400, 9.906682277864462e-05]
172
+ 2023-02-22 02:26:30,190 32k INFO ====> Epoch: 76
173
+ 2023-02-22 02:28:15,895 32k INFO ====> Epoch: 77
174
+ 2023-02-22 02:29:24,473 32k INFO Train Epoch: 78 [55%]
175
+ 2023-02-22 02:29:24,473 32k INFO [2.386472702026367, 2.3571693897247314, 11.112114906311035, 19.415468215942383, 0.8537010550498962, 7600, 9.904205762086905e-05]
176
+ 2023-02-22 02:30:01,938 32k INFO ====> Epoch: 78
177
+ 2023-02-22 02:31:47,642 32k INFO ====> Epoch: 79
178
+ 2023-02-22 02:32:59,591 32k INFO Train Epoch: 80 [59%]
179
+ 2023-02-22 02:32:59,592 32k INFO [2.4535465240478516, 2.43351411819458, 10.742626190185547, 20.20539093017578, 0.905364990234375, 7800, 9.901729865399597e-05]
180
+ 2023-02-22 02:33:33,554 32k INFO ====> Epoch: 80
181
+ 2023-02-22 02:35:19,249 32k INFO ====> Epoch: 81
182
+ 2023-02-22 02:36:34,659 32k INFO Train Epoch: 82 [63%]
183
+ 2023-02-22 02:36:34,659 32k INFO [2.5064964294433594, 2.268216848373413, 9.974581718444824, 17.910846710205078, 0.9886245727539062, 8000, 9.899254587647776e-05]
184
+ 2023-02-22 02:36:38,899 32k INFO Saving model and optimizer state at iteration 82 to ./logs\32k\G_8000.pth
185
+ 2023-02-22 02:36:55,742 32k INFO Saving model and optimizer state at iteration 82 to ./logs\32k\D_8000.pth
186
+ 2023-02-22 02:37:29,423 32k INFO ====> Epoch: 82
187
+ 2023-02-22 02:39:15,168 32k INFO ====> Epoch: 83
188
+ 2023-02-22 02:40:34,013 32k INFO Train Epoch: 84 [67%]
189
+ 2023-02-22 02:40:34,013 32k INFO [2.248044490814209, 2.346158266067505, 11.082698822021484, 14.95010757446289, 0.6171810626983643, 8200, 9.896779928676716e-05]
190
+ 2023-02-22 02:41:01,209 32k INFO ====> Epoch: 84
191
+ 2023-02-22 02:42:47,112 32k INFO ====> Epoch: 85
192
+ 2023-02-22 02:44:09,353 32k INFO Train Epoch: 86 [71%]
193
+ 2023-02-22 02:44:09,353 32k INFO [2.6061596870422363, 2.314436912536621, 10.66403865814209, 19.200468063354492, 1.07802414894104, 8400, 9.894305888331732e-05]
194
+ 2023-02-22 02:44:33,096 32k INFO ====> Epoch: 86
195
+ 2023-02-22 02:46:18,750 32k INFO ====> Epoch: 87
196
+ 2023-02-22 02:47:44,525 32k INFO Train Epoch: 88 [76%]
197
+ 2023-02-22 02:47:44,526 32k INFO [2.4097466468811035, 2.220452308654785, 11.689645767211914, 18.19406509399414, 0.7397383451461792, 8600, 9.891832466458178e-05]
198
+ 2023-02-22 02:48:04,901 32k INFO ====> Epoch: 88
199
+ 2023-02-22 02:49:50,659 32k INFO ====> Epoch: 89
200
+ 2023-02-22 02:51:19,746 32k INFO Train Epoch: 90 [80%]
201
+ 2023-02-22 02:51:19,746 32k INFO [2.4018168449401855, 2.383319139480591, 12.725435256958008, 18.570293426513672, 1.2559199333190918, 8800, 9.889359662901445e-05]
202
+ 2023-02-22 02:51:36,642 32k INFO ====> Epoch: 90
203
+ 2023-02-22 02:53:22,585 32k INFO ====> Epoch: 91
204
+ 2023-02-22 02:54:55,326 32k INFO Train Epoch: 92 [84%]
205
+ 2023-02-22 02:54:55,327 32k INFO [2.290731430053711, 2.6917805671691895, 11.489081382751465, 20.181421279907227, 0.704178512096405, 9000, 9.886887477506964e-05]
206
+ 2023-02-22 02:54:59,569 32k INFO Saving model and optimizer state at iteration 92 to ./logs\32k\G_9000.pth
207
+ 2023-02-22 02:55:15,826 32k INFO Saving model and optimizer state at iteration 92 to ./logs\32k\D_9000.pth
208
+ 2023-02-22 02:55:32,580 32k INFO ====> Epoch: 92
209
+ 2023-02-22 02:57:18,717 32k INFO ====> Epoch: 93
210
+ 2023-02-22 02:58:55,426 32k INFO Train Epoch: 94 [88%]
211
+ 2023-02-22 02:58:55,426 32k INFO [2.7609176635742188, 2.471208333969116, 5.6250128746032715, 12.03889274597168, 0.7381411790847778, 9200, 9.884415910120204e-05]
212
+ 2023-02-22 02:59:05,407 32k INFO ====> Epoch: 94
213
+ 2023-02-22 03:00:51,801 32k INFO ====> Epoch: 95
214
+ 2023-02-22 03:02:31,827 32k INFO Train Epoch: 96 [92%]
215
+ 2023-02-22 03:02:31,827 32k INFO [2.406148672103882, 2.274200677871704, 11.835630416870117, 21.682048797607422, 1.1413294076919556, 9400, 9.881944960586671e-05]
216
+ 2023-02-22 03:02:38,366 32k INFO ====> Epoch: 96
217
+ 2023-02-22 03:04:24,238 32k INFO ====> Epoch: 97
218
+ 2023-02-22 03:06:07,737 32k INFO Train Epoch: 98 [96%]
219
+ 2023-02-22 03:06:07,737 32k INFO [2.4189469814300537, 2.4262475967407227, 7.4442033767700195, 12.63837718963623, 0.5815107822418213, 9600, 9.879474628751914e-05]
220
+ 2023-02-22 03:06:10,846 32k INFO ====> Epoch: 98
221
+ 2023-02-22 03:07:57,269 32k INFO ====> Epoch: 99
222
+ 2023-02-22 03:09:42,957 32k INFO ====> Epoch: 100
223
+ 2023-02-22 03:10:05,045 32k INFO Train Epoch: 101 [0%]
224
+ 2023-02-22 03:10:05,046 32k INFO [2.5269906520843506, 2.3146581649780273, 7.8620123863220215, 17.898523330688477, 0.5046048760414124, 9800, 9.875770288847208e-05]
225
+ 2023-02-22 03:11:28,848 32k INFO ====> Epoch: 101
226
+ 2023-02-22 03:13:14,566 32k INFO ====> Epoch: 102
227
+ 2023-02-22 03:13:40,056 32k INFO Train Epoch: 103 [4%]
228
+ 2023-02-22 03:13:40,057 32k INFO [2.1958470344543457, 2.465301752090454, 14.825515747070312, 22.445404052734375, 0.4519413709640503, 10000, 9.873301500583906e-05]
229
+ 2023-02-22 03:13:44,302 32k INFO Saving model and optimizer state at iteration 103 to ./logs\32k\G_10000.pth
230
+ 2023-02-22 03:14:01,957 32k INFO Saving model and optimizer state at iteration 103 to ./logs\32k\D_10000.pth
231
+ 2023-02-22 03:15:25,803 32k INFO ====> Epoch: 103
232
+ 2023-02-22 03:17:12,136 32k INFO ====> Epoch: 104
233
+ 2023-02-22 03:17:41,752 32k INFO Train Epoch: 105 [8%]
234
+ 2023-02-22 03:17:41,752 32k INFO [2.530261993408203, 2.3227694034576416, 11.736129760742188, 16.371919631958008, 0.7204845547676086, 10200, 9.870833329479095e-05]
235
+ 2023-02-22 03:18:58,780 32k INFO ====> Epoch: 105
236
+ 2023-02-22 03:20:45,072 32k INFO ====> Epoch: 106
237
+ 2023-02-22 03:21:18,158 32k INFO Train Epoch: 107 [12%]
238
+ 2023-02-22 03:21:18,159 32k INFO [2.4681787490844727, 2.8919472694396973, 8.824729919433594, 16.09588623046875, 0.8613712787628174, 10400, 9.868365775378495e-05]
239
+ 2023-02-22 03:22:31,624 32k INFO ====> Epoch: 107
240
+ 2023-02-22 03:24:17,336 32k INFO ====> Epoch: 108
241
+ 2023-02-22 03:24:53,255 32k INFO Train Epoch: 109 [16%]
242
+ 2023-02-22 03:24:53,256 32k INFO [2.483661413192749, 2.4146034717559814, 9.786486625671387, 18.913461685180664, 1.075326919555664, 10600, 9.865898838127865e-05]
243
+ 2023-02-22 03:26:03,360 32k INFO ====> Epoch: 109
244
+ 2023-02-22 03:27:49,697 32k INFO ====> Epoch: 110
245
+ 2023-02-22 03:28:29,576 32k INFO Train Epoch: 111 [20%]
246
+ 2023-02-22 03:28:29,577 32k INFO [2.538566827774048, 2.12762713432312, 7.408393859863281, 13.515085220336914, 0.8555375933647156, 10800, 9.863432517573002e-05]
247
+ 2023-02-22 03:29:36,385 32k INFO ====> Epoch: 111
248
+ 2023-02-22 03:31:22,092 32k INFO ====> Epoch: 112
249
+ 2023-02-22 03:32:04,865 32k INFO Train Epoch: 113 [24%]
250
+ 2023-02-22 03:32:04,866 32k INFO [2.1984810829162598, 2.4401934146881104, 14.276609420776367, 22.693824768066406, 0.9264913201332092, 11000, 9.86096681355974e-05]
251
+ 2023-02-22 03:32:09,113 32k INFO Saving model and optimizer state at iteration 113 to ./logs\32k\G_11000.pth
252
+ 2023-02-22 03:32:25,992 32k INFO Saving model and optimizer state at iteration 113 to ./logs\32k\D_11000.pth
253
+ 2023-02-22 03:33:32,900 32k INFO ====> Epoch: 113
254
+ 2023-02-22 03:35:19,193 32k INFO ====> Epoch: 114
255
+ 2023-02-22 03:36:06,007 32k INFO Train Epoch: 115 [29%]
256
+ 2023-02-22 03:36:06,008 32k INFO [2.3687469959259033, 2.8906643390655518, 9.432950019836426, 19.32032585144043, 0.8109574913978577, 11200, 9.858501725933955e-05]
257
+ 2023-02-22 03:37:05,975 32k INFO ====> Epoch: 115
258
+ 2023-02-22 03:38:52,586 32k INFO ====> Epoch: 116
259
+ 2023-02-22 03:39:42,332 32k INFO Train Epoch: 117 [33%]
260
+ 2023-02-22 03:39:42,333 32k INFO [2.178363561630249, 2.667116641998291, 13.681983947753906, 20.458284378051758, 0.8319520950317383, 11400, 9.85603725454156e-05]
261
+ 2023-02-22 03:40:38,811 32k INFO ====> Epoch: 117
262
+ 2023-02-22 03:42:24,730 32k INFO ====> Epoch: 118
263
+ 2023-02-22 03:43:19,888 32k INFO Train Epoch: 119 [37%]
264
+ 2023-02-22 03:43:19,889 32k INFO [2.546008586883545, 2.2007601261138916, 10.31205940246582, 18.787328720092773, 1.2723021507263184, 11600, 9.853573399228505e-05]
265
+ 2023-02-22 03:44:12,958 32k INFO ====> Epoch: 119
266
+ 2023-02-22 03:45:58,715 32k INFO ====> Epoch: 120
267
+ 2023-02-22 03:46:55,149 32k INFO Train Epoch: 121 [41%]
268
+ 2023-02-22 03:46:55,149 32k INFO [2.446214437484741, 2.449553966522217, 13.851907730102539, 21.115680694580078, 0.7339575886726379, 11800, 9.851110159840781e-05]
269
+ 2023-02-22 03:47:44,775 32k INFO ====> Epoch: 121
270
+ 2023-02-22 03:49:30,581 32k INFO ====> Epoch: 122
271
+ 2023-02-22 03:50:30,560 32k INFO Train Epoch: 123 [45%]
272
+ 2023-02-22 03:50:30,560 32k INFO [2.35149884223938, 2.75274658203125, 12.845666885375977, 18.79739761352539, 1.1703294515609741, 12000, 9.848647536224416e-05]
273
+ 2023-02-22 03:50:34,776 32k INFO Saving model and optimizer state at iteration 123 to ./logs\32k\G_12000.pth
274
+ 2023-02-22 03:50:53,144 32k INFO Saving model and optimizer state at iteration 123 to ./logs\32k\D_12000.pth
275
+ 2023-02-22 03:51:42,693 32k INFO ====> Epoch: 123
276
+ 2023-02-22 03:53:28,929 32k INFO ====> Epoch: 124
277
+ 2023-02-22 03:54:32,842 32k INFO Train Epoch: 125 [49%]
278
+ 2023-02-22 03:54:32,842 32k INFO [2.589078664779663, 2.1445438861846924, 11.166776657104492, 17.973941802978516, 0.7241864204406738, 12200, 9.846185528225477e-05]
279
+ 2023-02-22 03:55:15,637 32k INFO ====> Epoch: 125
280
+ 2023-02-22 03:57:01,184 32k INFO ====> Epoch: 126
281
+ 2023-02-22 03:58:08,640 32k INFO Train Epoch: 127 [53%]
282
+ 2023-02-22 03:58:08,640 32k INFO [2.5327906608581543, 2.081197500228882, 7.769866943359375, 14.17745590209961, 0.6943719983100891, 12400, 9.84372413569007e-05]
283
+ 2023-02-22 03:58:47,757 32k INFO ====> Epoch: 127
284
+ 2023-02-22 04:00:34,155 32k INFO ====> Epoch: 128
285
+ 2023-02-22 04:01:44,324 32k INFO Train Epoch: 129 [57%]
286
+ 2023-02-22 04:01:44,325 32k INFO [2.5065693855285645, 2.289240837097168, 8.885828971862793, 15.302033424377441, 1.1030964851379395, 12600, 9.841263358464336e-05]
287
+ 2023-02-22 04:02:20,017 32k INFO ====> Epoch: 129
288
+ 2023-02-22 04:04:05,745 32k INFO ====> Epoch: 130
289
+ 2023-02-22 04:05:19,448 32k INFO Train Epoch: 131 [61%]
290
+ 2023-02-22 04:05:19,448 32k INFO [2.339315414428711, 2.6337029933929443, 12.177719116210938, 20.183080673217773, 1.1095930337905884, 12800, 9.838803196394459e-05]
291
+ 2023-02-22 04:05:51,845 32k INFO ====> Epoch: 131
292
+ 2023-02-22 04:07:37,384 32k INFO ====> Epoch: 132
293
+ 2023-02-22 04:08:54,557 32k INFO Train Epoch: 133 [65%]
294
+ 2023-02-22 04:08:54,557 32k INFO [2.3692660331726074, 2.2918972969055176, 13.822396278381348, 20.760438919067383, 0.7944438457489014, 13000, 9.836343649326659e-05]
295
+ 2023-02-22 04:08:58,801 32k INFO Saving model and optimizer state at iteration 133 to ./logs\32k\G_13000.pth
296
+ 2023-02-22 04:09:17,244 32k INFO Saving model and optimizer state at iteration 133 to ./logs\32k\D_13000.pth
297
+ 2023-02-22 04:09:49,268 32k INFO ====> Epoch: 133
298
+ 2023-02-22 04:11:35,690 32k INFO ====> Epoch: 134
299
+ 2023-02-22 04:12:56,725 32k INFO Train Epoch: 135 [69%]
300
+ 2023-02-22 04:12:56,725 32k INFO [2.538072109222412, 2.147736072540283, 8.285018920898438, 15.206883430480957, 0.571492075920105, 13200, 9.833884717107196e-05]
301
+ 2023-02-22 04:13:22,345 32k INFO ====> Epoch: 135
302
+ 2023-02-22 04:15:08,661 32k INFO ====> Epoch: 136
303
+ 2023-02-22 04:16:32,598 32k INFO Train Epoch: 137 [73%]
304
+ 2023-02-22 04:16:32,599 32k INFO [2.5354621410369873, 2.3016741275787354, 11.92038345336914, 18.641353607177734, 0.539569616317749, 13400, 9.831426399582366e-05]
305
+ 2023-02-22 04:16:54,692 32k INFO ====> Epoch: 137
306
+ 2023-02-22 04:18:41,307 32k INFO ====> Epoch: 138
307
+ 2023-02-22 04:20:09,435 32k INFO Train Epoch: 139 [78%]
308
+ 2023-02-22 04:20:09,436 32k INFO [2.333071231842041, 2.484257221221924, 12.461816787719727, 19.274198532104492, 0.8236677050590515, 13600, 9.828968696598508e-05]
309
+ 2023-02-22 04:20:28,071 32k INFO ====> Epoch: 139
310
+ 2023-02-22 04:22:14,167 32k INFO ====> Epoch: 140
311
+ 2023-02-22 04:23:45,593 32k INFO Train Epoch: 141 [82%]
312
+ 2023-02-22 04:23:45,594 32k INFO [2.4791781902313232, 2.1751325130462646, 10.909151077270508, 14.634515762329102, 0.7781238555908203, 13800, 9.826511608001993e-05]
313
+ 2023-02-22 04:24:00,820 32k INFO ====> Epoch: 141
314
+ 2023-02-22 04:25:46,692 32k INFO ====> Epoch: 142
315
+ 2023-02-22 04:27:20,993 32k INFO Train Epoch: 143 [86%]
316
+ 2023-02-22 04:27:20,993 32k INFO [2.346167802810669, 2.548734188079834, 13.352293014526367, 18.923927307128906, 0.5950165390968323, 14000, 9.824055133639235e-05]
317
+ 2023-02-22 04:27:25,245 32k INFO Saving model and optimizer state at iteration 143 to ./logs\32k\G_14000.pth
318
+ 2023-02-22 04:27:45,494 32k INFO Saving model and optimizer state at iteration 143 to ./logs\32k\D_14000.pth
319
+ 2023-02-22 04:28:00,501 32k INFO ====> Epoch: 143
320
+ 2023-02-22 04:29:46,683 32k INFO ====> Epoch: 144
321
+ 2023-02-22 04:31:25,056 32k INFO Train Epoch: 145 [90%]
322
+ 2023-02-22 04:31:25,056 32k INFO [2.4927594661712646, 2.3070590496063232, 10.073404312133789, 16.529218673706055, 0.670913815498352, 14200, 9.821599273356685e-05]
323
+ 2023-02-22 04:31:33,427 32k INFO ====> Epoch: 145
324
+ 2023-02-22 04:33:19,691 32k INFO ====> Epoch: 146
325
+ 2023-02-22 04:35:00,838 32k INFO Train Epoch: 147 [94%]
326
+ 2023-02-22 04:35:00,839 32k INFO [2.4546353816986084, 2.278806686401367, 8.069828987121582, 13.182048797607422, 0.5054904222488403, 14400, 9.819144027000834e-05]
327
+ 2023-02-22 04:35:05,693 32k INFO ====> Epoch: 147
328
+ 2023-02-22 04:36:51,295 32k INFO ====> Epoch: 148
329
+ 2023-02-22 04:38:35,846 32k INFO Train Epoch: 149 [98%]
330
+ 2023-02-22 04:38:35,847 32k INFO [2.5683984756469727, 2.2057595252990723, 10.475971221923828, 16.20850944519043, 0.9734923839569092, 14600, 9.816689394418209e-05]
331
+ 2023-02-22 04:38:37,247 32k INFO ====> Epoch: 149
332
+ 2023-02-22 04:40:25,138 32k INFO ====> Epoch: 150
333
+ 2023-02-22 04:42:11,207 32k INFO ====> Epoch: 151
334
+ 2023-02-22 04:42:35,029 32k INFO Train Epoch: 152 [2%]
335
+ 2023-02-22 04:42:35,030 32k INFO [2.4951038360595703, 2.260463237762451, 11.500944137573242, 17.902042388916016, 0.9977881908416748, 14800, 9.813008596033443e-05]
336
+ 2023-02-22 04:43:57,118 32k INFO ====> Epoch: 152
337
+ 2023-02-22 04:45:42,690 32k INFO ====> Epoch: 153
338
+ 2023-02-22 04:46:10,019 32k INFO Train Epoch: 154 [6%]
339
+ 2023-02-22 04:46:10,019 32k INFO [2.4445221424102783, 2.4485650062561035, 12.310842514038086, 20.25748634338379, 0.7647718191146851, 15000, 9.810555497212693e-05]
340
+ 2023-02-22 04:46:14,290 32k INFO Saving model and optimizer state at iteration 154 to ./logs\32k\G_15000.pth
341
+ 2023-02-22 04:46:31,659 32k INFO Saving model and optimizer state at iteration 154 to ./logs\32k\D_15000.pth
342
+ 2023-02-22 04:47:53,918 32k INFO ====> Epoch: 154
343
+ 2023-02-22 04:49:40,479 32k INFO ====> Epoch: 155
344
+ 2023-02-22 04:50:11,813 32k INFO Train Epoch: 156 [10%]
345
+ 2023-02-22 04:50:11,813 32k INFO [2.5242230892181396, 2.2266323566436768, 12.2184419631958, 17.383615493774414, 1.0418349504470825, 15200, 9.808103011628319e-05]
346
+ 2023-02-22 04:51:27,234 32k INFO ====> Epoch: 156
347
+ 2023-02-22 04:53:13,568 32k INFO ====> Epoch: 157
348
+ 2023-02-22 04:53:47,515 32k INFO Train Epoch: 158 [14%]
349
+ 2023-02-22 04:53:47,516 32k INFO [2.5447096824645996, 2.3157331943511963, 8.80300521850586, 14.484112739562988, 0.7142147421836853, 15400, 9.80565113912702e-05]
350
+ 2023-02-22 04:54:59,247 32k INFO ====> Epoch: 158
351
+ 2023-02-22 04:56:44,971 32k INFO ====> Epoch: 159
352
+ 2023-02-22 04:57:22,561 32k INFO Train Epoch: 160 [18%]
353
+ 2023-02-22 04:57:22,561 32k INFO [2.3957338333129883, 2.372086524963379, 8.605619430541992, 15.040207862854004, 0.9432356357574463, 15600, 9.803199879555537e-05]
354
+ 2023-02-22 04:58:30,960 32k INFO ====> Epoch: 160
355
+ 2023-02-22 05:00:16,754 32k INFO ====> Epoch: 161
356
+ 2023-02-22 05:00:57,776 32k INFO Train Epoch: 162 [22%]
357
+ 2023-02-22 05:00:57,776 32k INFO [2.501303195953369, 2.3233895301818848, 9.497570991516113, 15.891914367675781, 0.9494279026985168, 15800, 9.800749232760646e-05]
358
+ 2023-02-22 05:02:02,727 32k INFO ====> Epoch: 162
359
+ 2023-02-22 05:03:48,404 32k INFO ====> Epoch: 163
360
+ 2023-02-22 05:04:32,896 32k INFO Train Epoch: 164 [27%]
361
+ 2023-02-22 05:04:32,896 32k INFO [2.362159013748169, 2.503201484680176, 10.656201362609863, 16.783613204956055, 0.9532788395881653, 16000, 9.798299198589162e-05]
362
+ 2023-02-22 05:04:37,788 32k INFO Saving model and optimizer state at iteration 164 to ./logs\32k\G_16000.pth
363
+ 2023-02-22 05:04:56,829 32k INFO Saving model and optimizer state at iteration 164 to ./logs\32k\D_16000.pth
364
+ 2023-02-22 05:06:01,710 32k INFO ====> Epoch: 164
365
+ 2023-02-22 05:07:48,176 32k INFO ====> Epoch: 165
366
+ 2023-02-22 05:08:36,653 32k INFO Train Epoch: 166 [31%]
367
+ 2023-02-22 05:08:36,653 32k INFO [2.57973575592041, 1.983064889907837, 8.993478775024414, 16.000688552856445, 0.700424075126648, 16200, 9.795849776887939e-05]
368
+ 2023-02-22 05:09:34,858 32k INFO ====> Epoch: 166
369
+ 2023-02-22 05:11:21,054 32k INFO ====> Epoch: 167
370
+ 2023-02-22 05:12:13,078 32k INFO Train Epoch: 168 [35%]
371
+ 2023-02-22 05:12:13,079 32k INFO [2.464907646179199, 2.1761608123779297, 8.09338665008545, 13.37960433959961, 0.97138512134552, 16400, 9.79340096750387e-05]
372
+ 2023-02-22 05:13:07,694 32k INFO ====> Epoch: 168
373
+ 2023-02-22 05:14:53,372 32k INFO ====> Epoch: 169
374
+ 2023-02-22 05:15:48,194 32k INFO Train Epoch: 170 [39%]
375
+ 2023-02-22 05:15:48,195 32k INFO [2.4897146224975586, 2.3846330642700195, 9.467925071716309, 17.97328758239746, 0.2975856363773346, 16600, 9.790952770283884e-05]
376
+ 2023-02-22 05:16:39,376 32k INFO ====> Epoch: 170
377
+ 2023-02-22 05:18:24,901 32k INFO ====> Epoch: 171
378
+ 2023-02-22 05:19:22,981 32k INFO Train Epoch: 172 [43%]
379
+ 2023-02-22 05:19:22,981 32k INFO [2.409517288208008, 2.347320795059204, 13.86928939819336, 19.764402389526367, 0.5637670159339905, 16800, 9.78850518507495e-05]
380
+ 2023-02-22 05:20:10,773 32k INFO ====> Epoch: 172
381
+ 2023-02-22 05:21:56,270 32k INFO ====> Epoch: 173
382
+ 2023-02-22 05:22:57,877 32k INFO Train Epoch: 174 [47%]
383
+ 2023-02-22 05:22:57,877 32k INFO [2.4215376377105713, 2.1729211807250977, 11.39505386352539, 18.73835563659668, 0.5306751728057861, 17000, 9.786058211724074e-05]
384
+ 2023-02-22 05:23:02,291 32k INFO Saving model and optimizer state at iteration 174 to ./logs\32k\G_17000.pth
385
+ 2023-02-22 05:23:19,621 32k INFO Saving model and optimizer state at iteration 174 to ./logs\32k\D_17000.pth
386
+ 2023-02-22 05:24:07,507 32k INFO ====> Epoch: 174
387
+ 2023-02-22 05:25:53,899 32k INFO ====> Epoch: 175
388
+ 2023-02-22 05:26:59,648 32k INFO Train Epoch: 176 [51%]
389
+ 2023-02-22 05:26:59,648 32k INFO [2.41857647895813, 2.1925084590911865, 10.57753849029541, 18.43206214904785, 0.6052053570747375, 17200, 9.783611850078301e-05]
390
+ 2023-02-22 05:27:40,697 32k INFO ====> Epoch: 176
391
+ 2023-02-22 05:29:27,079 32k INFO ====> Epoch: 177
392
+ 2023-02-22 05:30:35,514 32k INFO Train Epoch: 178 [55%]
393
+ 2023-02-22 05:30:35,514 32k INFO [2.3244147300720215, 2.360485315322876, 14.300114631652832, 19.833555221557617, 0.5028417110443115, 17400, 9.781166099984716e-05]
394
+ 2023-02-22 05:31:12,982 32k INFO ====> Epoch: 178
395
+ 2023-02-22 05:33:00,756 32k INFO ====> Epoch: 179
396
+ 2023-02-22 05:34:14,763 32k INFO Train Epoch: 180 [59%]
397
+ 2023-02-22 05:34:14,763 32k INFO [2.428579807281494, 2.262065887451172, 10.987186431884766, 19.099437713623047, 0.6582584381103516, 17600, 9.778720961290439e-05]
398
+ 2023-02-22 05:34:48,821 32k INFO ====> Epoch: 180
399
+ 2023-02-22 05:36:34,639 32k INFO ====> Epoch: 181
400
+ 2023-02-22 05:37:49,982 32k INFO Train Epoch: 182 [63%]
401
+ 2023-02-22 05:37:49,983 32k INFO [2.61875581741333, 1.9758424758911133, 9.652741432189941, 14.875738143920898, 0.9741345643997192, 17800, 9.776276433842631e-05]
402
+ 2023-02-22 05:38:20,692 32k INFO ====> Epoch: 182
403
+ 2023-02-22 05:40:06,124 32k INFO ====> Epoch: 183
404
+ 2023-02-22 05:41:24,889 32k INFO Train Epoch: 184 [67%]
405
+ 2023-02-22 05:41:24,890 32k INFO [2.540966033935547, 2.3001551628112793, 9.484389305114746, 17.637712478637695, 0.7832726240158081, 18000, 9.773832517488488e-05]
406
+ 2023-02-22 05:41:29,138 32k INFO Saving model and optimizer state at iteration 184 to ./logs\32k\G_18000.pth
407
+ 2023-02-22 05:41:45,744 32k INFO Saving model and optimizer state at iteration 184 to ./logs\32k\D_18000.pth
408
+ 2023-02-22 05:42:16,267 32k INFO ====> Epoch: 184
409
+ 2023-02-22 05:44:02,354 32k INFO ====> Epoch: 185
410
+ 2023-02-22 05:45:25,035 32k INFO Train Epoch: 186 [71%]
411
+ 2023-02-22 05:45:25,035 32k INFO [2.5191123485565186, 2.5359508991241455, 11.501497268676758, 19.635244369506836, 0.6616455912590027, 18200, 9.771389212075249e-05]
412
+ 2023-02-22 05:45:48,797 32k INFO ====> Epoch: 186
413
+ 2023-02-22 05:47:34,913 32k INFO ====> Epoch: 187
414
+ 2023-02-22 05:49:00,561 32k INFO Train Epoch: 188 [76%]
415
+ 2023-02-22 05:49:00,562 32k INFO [2.280853748321533, 2.274311065673828, 13.668123245239258, 18.913005828857422, 1.0380589962005615, 18400, 9.768946517450186e-05]
416
+ 2023-02-22 05:49:20,932 32k INFO ====> Epoch: 188
417
+ 2023-02-22 05:51:06,481 32k INFO ====> Epoch: 189
418
+ 2023-02-22 05:52:35,577 32k INFO Train Epoch: 190 [80%]
419
+ 2023-02-22 05:52:35,577 32k INFO [2.2568392753601074, 2.6230015754699707, 11.525728225708008, 18.298168182373047, 0.6746451258659363, 18600, 9.766504433460612e-05]
420
+ 2023-02-22 05:52:52,435 32k INFO ====> Epoch: 190
421
+ 2023-02-22 05:54:38,241 32k INFO ====> Epoch: 191
422
+ 2023-02-22 05:56:10,787 32k INFO Train Epoch: 192 [84%]
423
+ 2023-02-22 05:56:10,788 32k INFO [2.5578360557556152, 2.209872007369995, 8.583542823791504, 16.00850486755371, 1.0789265632629395, 18800, 9.764062959953878e-05]
424
+ 2023-02-22 05:56:24,223 32k INFO ====> Epoch: 192
425
+ 2023-02-22 05:58:09,905 32k INFO ====> Epoch: 193
426
+ 2023-02-22 05:59:45,738 32k INFO Train Epoch: 194 [88%]
427
+ 2023-02-22 05:59:45,738 32k INFO [2.387702703475952, 2.4445672035217285, 13.355128288269043, 16.8066463470459, 1.147564172744751, 19000, 9.761622096777372e-05]
428
+ 2023-02-22 05:59:50,090 32k INFO Saving model and optimizer state at iteration 194 to ./logs\32k\G_19000.pth
429
+ 2023-02-22 06:00:07,465 32k INFO Saving model and optimizer state at iteration 194 to ./logs\32k\D_19000.pth
430
+ 2023-02-22 06:00:21,346 32k INFO ====> Epoch: 194
431
+ 2023-02-22 06:02:07,550 32k INFO ====> Epoch: 195
432
+ 2023-02-22 06:03:47,560 32k INFO Train Epoch: 196 [92%]
433
+ 2023-02-22 06:03:47,561 32k INFO [2.5213208198547363, 2.257534980773926, 12.258462905883789, 19.621543884277344, 0.6611289978027344, 19200, 9.759181843778522e-05]
434
+ 2023-02-22 06:03:54,101 32k INFO ====> Epoch: 196
435
+ 2023-02-22 06:05:40,426 32k INFO ====> Epoch: 197
436
+ 2023-02-22 06:07:23,844 32k INFO Train Epoch: 198 [96%]
437
+ 2023-02-22 06:07:23,845 32k INFO [2.470740795135498, 2.21482253074646, 9.043266296386719, 14.386473655700684, 0.9298809766769409, 19400, 9.756742200804793e-05]
438
+ 2023-02-22 06:07:26,950 32k INFO ====> Epoch: 198
439
+ 2023-02-22 06:09:12,577 32k INFO ====> Epoch: 199
440
+ 2023-02-22 06:10:58,804 32k INFO ====> Epoch: 200
441
+ 2023-02-22 06:11:21,552 32k INFO Train Epoch: 201 [0%]
442
+ 2023-02-22 06:11:21,553 32k INFO [2.2393460273742676, 2.660759210586548, 12.342625617980957, 18.233539581298828, 0.6276586651802063, 19600, 9.753083879807726e-05]
443
+ 2023-02-22 06:12:45,383 32k INFO ====> Epoch: 201
444
+ 2023-02-22 06:14:31,637 32k INFO ====> Epoch: 202
445
+ 2023-02-22 06:14:57,801 32k INFO Train Epoch: 203 [4%]
446
+ 2023-02-22 06:14:57,801 32k INFO [2.072805166244507, 2.761627435684204, 14.632217407226562, 19.264434814453125, 0.6756956577301025, 19800, 9.750645761229709e-05]
447
+ 2023-02-22 06:16:18,193 32k INFO ====> Epoch: 203
448
+ 2023-02-22 06:18:03,843 32k INFO ====> Epoch: 204
449
+ 2023-02-22 06:18:32,695 32k INFO Train Epoch: 205 [8%]
450
+ 2023-02-22 06:18:32,695 32k INFO [2.238548994064331, 2.5923948287963867, 13.83581829071045, 18.70966148376465, 0.808910071849823, 20000, 9.748208252143241e-05]
451
+ 2023-02-22 06:18:36,991 32k INFO Saving model and optimizer state at iteration 205 to ./logs\32k\G_20000.pth
452
+ 2023-02-22 06:18:52,780 32k INFO Saving model and optimizer state at iteration 205 to ./logs\32k\D_20000.pth
453
+ 2023-02-22 06:20:13,632 32k INFO ====> Epoch: 205
454
+ 2023-02-22 06:21:59,799 32k INFO ====> Epoch: 206
455
+ 2023-02-22 06:22:32,683 32k INFO Train Epoch: 207 [12%]
456
+ 2023-02-22 06:22:32,684 32k INFO [2.494340181350708, 2.35404372215271, 10.402792930603027, 18.646995544433594, 1.194170594215393, 20200, 9.745771352395957e-05]
457
+ 2023-02-22 06:23:46,237 32k INFO ====> Epoch: 207
458
+ 2023-02-22 06:25:31,755 32k INFO ====> Epoch: 208
459
+ 2023-02-22 06:26:07,498 32k INFO Train Epoch: 209 [16%]
460
+ 2023-02-22 06:26:07,499 32k INFO [2.5353317260742188, 2.2308082580566406, 10.610563278198242, 16.8080997467041, 0.9148491621017456, 20400, 9.743335061835535e-05]
461
+ 2023-02-22 06:27:17,681 32k INFO ====> Epoch: 209
462
+ 2023-02-22 06:29:03,252 32k INFO ====> Epoch: 210
463
+ 2023-02-22 06:29:42,394 32k INFO Train Epoch: 211 [20%]
464
+ 2023-02-22 06:29:42,395 32k INFO [2.6447272300720215, 1.964892864227295, 8.727736473083496, 12.838239669799805, 0.6997866630554199, 20600, 9.740899380309685e-05]
465
+ 2023-02-22 06:30:49,054 32k INFO ====> Epoch: 211
466
+ 2023-02-22 06:32:35,297 32k INFO ====> Epoch: 212
467
+ 2023-02-22 06:33:18,576 32k INFO Train Epoch: 213 [24%]
468
+ 2023-02-22 06:33:18,577 32k INFO [2.2379984855651855, 2.403615713119507, 15.184333801269531, 22.139081954956055, 0.9633049368858337, 20800, 9.73846430766616e-05]
469
+ 2023-02-22 06:34:21,815 32k INFO ====> Epoch: 213
470
+ 2023-02-22 06:36:08,032 32k INFO ====> Epoch: 214
471
+ 2023-02-22 06:36:54,126 32k INFO Train Epoch: 215 [29%]
472
+ 2023-02-22 06:36:54,127 32k INFO [2.6470189094543457, 2.246066093444824, 8.519152641296387, 15.820573806762695, 1.0386731624603271, 21000, 9.736029843752747e-05]
473
+ 2023-02-22 06:36:58,320 32k INFO Saving model and optimizer state at iteration 215 to ./logs\32k\G_21000.pth
474
+ 2023-02-22 06:37:15,347 32k INFO Saving model and optimizer state at iteration 215 to ./logs\32k\D_21000.pth
475
+ 2023-02-22 06:38:18,371 32k INFO ====> Epoch: 215
476
+ 2023-02-22 06:40:04,492 32k INFO ====> Epoch: 216
477
+ 2023-02-22 06:40:54,670 32k INFO Train Epoch: 217 [33%]
478
+ 2023-02-22 06:40:54,670 32k INFO [2.090405225753784, 2.5237374305725098, 15.738249778747559, 21.418203353881836, 1.2358826398849487, 21200, 9.733595988417275e-05]
479
+ 2023-02-22 06:41:51,113 32k INFO ====> Epoch: 217
480
+ 2023-02-22 06:43:37,251 32k INFO ====> Epoch: 218
481
+ 2023-02-22 06:44:30,781 32k INFO Train Epoch: 219 [37%]
482
+ 2023-02-22 06:44:30,781 32k INFO [2.5889697074890137, 2.2686333656311035, 8.668608665466309, 14.998424530029297, 1.0810593366622925, 21400, 9.731162741507607e-05]
483
+ 2023-02-22 06:45:23,689 32k INFO ====> Epoch: 219
484
+ 2023-02-22 06:47:09,292 32k INFO ====> Epoch: 220
485
+ 2023-02-22 06:48:06,268 32k INFO Train Epoch: 221 [41%]
486
+ 2023-02-22 06:48:06,269 32k INFO [2.2671685218811035, 2.67838978767395, 14.996197700500488, 21.1551570892334, 0.7494913339614868, 21600, 9.728730102871649e-05]
487
+ 2023-02-22 06:48:55,957 32k INFO ====> Epoch: 221
488
+ 2023-02-22 06:50:42,311 32k INFO ====> Epoch: 222
489
+ 2023-02-22 06:51:42,046 32k INFO Train Epoch: 223 [45%]
490
+ 2023-02-22 06:51:42,046 32k INFO [2.547105312347412, 2.134889841079712, 9.186394691467285, 14.266695022583008, 0.6058226823806763, 21800, 9.726298072357337e-05]
491
+ 2023-02-22 06:52:28,072 32k INFO ====> Epoch: 223
492
+ 2023-02-22 06:54:14,252 32k INFO ====> Epoch: 224
493
+ 2023-02-22 06:55:18,050 32k INFO Train Epoch: 225 [49%]
494
+ 2023-02-22 06:55:18,050 32k INFO [2.3165359497070312, 2.48864483833313, 13.522871971130371, 20.89539337158203, 0.46696582436561584, 22000, 9.723866649812655e-05]
495
+ 2023-02-22 06:55:22,919 32k INFO Saving model and optimizer state at iteration 225 to ./logs\32k\G_22000.pth
496
+ 2023-02-22 06:55:42,032 32k INFO Saving model and optimizer state at iteration 225 to ./logs\32k\D_22000.pth
497
+ 2023-02-22 06:56:28,028 32k INFO ====> Epoch: 225
498
+ 2023-02-22 06:58:14,309 32k INFO ====> Epoch: 226
499
+ 2023-02-22 06:59:21,533 32k INFO Train Epoch: 227 [53%]
500
+ 2023-02-22 06:59:21,534 32k INFO [2.635965347290039, 2.3602662086486816, 8.628167152404785, 12.540104866027832, 0.9171032309532166, 22200, 9.721435835085619e-05]
501
+ 2023-02-22 07:00:00,662 32k INFO ====> Epoch: 227
502
+ 2023-02-22 07:01:46,728 32k INFO ====> Epoch: 228
503
+ 2023-02-22 07:02:57,519 32k INFO Train Epoch: 229 [57%]
504
+ 2023-02-22 07:02:57,519 32k INFO [2.310483932495117, 2.573824405670166, 13.390860557556152, 19.26698112487793, 0.859022855758667, 22400, 9.719005628024282e-05]
505
+ 2023-02-22 07:03:33,283 32k INFO ====> Epoch: 229
506
+ 2023-02-22 07:05:18,825 32k INFO ====> Epoch: 230
507
+ 2023-02-22 07:06:32,406 32k INFO Train Epoch: 231 [61%]
508
+ 2023-02-22 07:06:32,406 32k INFO [2.4883522987365723, 2.472439765930176, 11.817012786865234, 18.11069107055664, 0.8754430413246155, 22600, 9.716576028476738e-05]
509
+ 2023-02-22 07:07:04,836 32k INFO ====> Epoch: 231
510
+ 2023-02-22 07:08:50,484 32k INFO ====> Epoch: 232
511
+ 2023-02-22 07:10:07,417 32k INFO Train Epoch: 233 [65%]
512
+ 2023-02-22 07:10:07,418 32k INFO [2.2918412685394287, 2.6064612865448, 12.61550235748291, 19.839508056640625, 0.7568845748901367, 22800, 9.714147036291117e-05]
513
+ 2023-02-22 07:10:36,292 32k INFO ====> Epoch: 233
514
+ 2023-02-22 07:12:21,934 32k INFO ====> Epoch: 234
515
+ 2023-02-22 07:13:42,479 32k INFO Train Epoch: 235 [69%]
516
+ 2023-02-22 07:13:42,479 32k INFO [2.367769479751587, 2.290179967880249, 12.814183235168457, 18.604658126831055, 0.7774146199226379, 23000, 9.711718651315591e-05]
517
+ 2023-02-22 07:13:46,725 32k INFO Saving model and optimizer state at iteration 235 to ./logs\32k\G_23000.pth
518
+ 2023-02-22 07:14:02,024 32k INFO Saving model and optimizer state at iteration 235 to ./logs\32k\D_23000.pth
519
+ 2023-02-22 07:14:31,102 32k INFO ====> Epoch: 235
520
+ 2023-02-22 07:16:17,362 32k INFO ====> Epoch: 236
521
+ 2023-02-22 07:17:41,855 32k INFO Train Epoch: 237 [73%]
522
+ 2023-02-22 07:17:41,855 32k INFO [2.2795772552490234, 2.5844802856445312, 11.45567512512207, 18.593366622924805, 0.8924825191497803, 23200, 9.709290873398365e-05]
523
+ 2023-02-22 07:18:03,931 32k INFO ====> Epoch: 237
524
+ 2023-02-22 07:19:50,029 32k INFO ====> Epoch: 238
525
+ 2023-02-22 07:21:17,892 32k INFO Train Epoch: 239 [78%]
526
+ 2023-02-22 07:21:17,893 32k INFO [2.455188274383545, 2.681325912475586, 12.097657203674316, 20.17988395690918, 1.3684245347976685, 23400, 9.706863702387684e-05]
527
+ 2023-02-22 07:21:36,551 32k INFO ====> Epoch: 239
528
+ 2023-02-22 07:23:24,284 32k INFO ====> Epoch: 240
529
+ 2023-02-22 07:24:57,015 32k INFO Train Epoch: 241 [82%]
530
+ 2023-02-22 07:24:57,016 32k INFO [2.629354953765869, 2.118459701538086, 10.275418281555176, 14.83414363861084, 0.5261375904083252, 23600, 9.704437138131832e-05]
531
+ 2023-02-22 07:25:12,296 32k INFO ====> Epoch: 241
532
+ 2023-02-22 07:26:57,752 32k INFO ====> Epoch: 242
533
+ 2023-02-22 07:28:32,000 32k INFO Train Epoch: 243 [86%]
534
+ 2023-02-22 07:28:32,001 32k INFO [2.329456329345703, 2.3450889587402344, 12.364802360534668, 16.953144073486328, 0.7070972919464111, 23800, 9.702011180479129e-05]
535
+ 2023-02-22 07:28:43,709 32k INFO ====> Epoch: 243
536
+ 2023-02-22 07:30:29,209 32k INFO ====> Epoch: 244
537
+ 2023-02-22 07:32:06,746 32k INFO Train Epoch: 245 [90%]
538
+ 2023-02-22 07:32:06,747 32k INFO [2.4288482666015625, 2.3079452514648438, 14.041245460510254, 19.745018005371094, 0.862689733505249, 24000, 9.699585829277933e-05]
539
+ 2023-02-22 07:32:10,956 32k INFO Saving model and optimizer state at iteration 245 to ./logs\32k\G_24000.pth
540
+ 2023-02-22 07:32:29,071 32k INFO Saving model and optimizer state at iteration 245 to ./logs\32k\D_24000.pth
541
+ 2023-02-22 07:32:40,767 32k INFO ====> Epoch: 245
542
+ 2023-02-22 07:34:26,808 32k INFO ====> Epoch: 246
543
+ 2023-02-22 07:36:08,402 32k INFO Train Epoch: 247 [94%]
544
+ 2023-02-22 07:36:08,403 32k INFO [2.373241901397705, 2.098410129547119, 10.478768348693848, 16.175146102905273, 0.6502178311347961, 24200, 9.69716108437664e-05]
545
+ 2023-02-22 07:36:13,231 32k INFO ====> Epoch: 247
546
+ 2023-02-22 07:37:59,442 32k INFO ====> Epoch: 248
547
+ 2023-02-22 07:39:43,876 32k INFO Train Epoch: 249 [98%]
548
+ 2023-02-22 07:39:43,877 32k INFO [2.40415096282959, 2.40627121925354, 10.043416976928711, 15.431482315063477, 0.813330888748169, 24400, 9.694736945623688e-05]
549
+ 2023-02-22 07:39:45,264 32k INFO ====> Epoch: 249
550
+ 2023-02-22 07:41:31,487 32k INFO ====> Epoch: 250
551
+ 2023-02-22 07:43:17,279 32k INFO ====> Epoch: 251
552
+ 2023-02-22 07:43:40,973 32k INFO Train Epoch: 252 [2%]
553
+ 2023-02-22 07:43:40,973 32k INFO [2.2679550647735596, 2.6712074279785156, 11.691126823425293, 17.59365463256836, 0.4011988639831543, 24600, 9.691101873690936e-05]
554
+ 2023-02-22 07:45:03,101 32k INFO ====> Epoch: 252
555
+ 2023-02-22 07:46:48,461 32k INFO ====> Epoch: 253
556
+ 2023-02-22 07:47:15,562 32k INFO Train Epoch: 254 [6%]
557
+ 2023-02-22 07:47:15,562 32k INFO [2.176159381866455, 2.6822993755340576, 14.428181648254395, 21.809728622436523, 0.8452849984169006, 24800, 9.68867924964598e-05]
558
+ 2023-02-22 07:48:34,159 32k INFO ====> Epoch: 254
559
+ 2023-02-22 07:50:19,905 32k INFO ====> Epoch: 255
560
+ 2023-02-22 07:50:51,111 32k INFO Train Epoch: 256 [10%]
561
+ 2023-02-22 07:50:51,111 32k INFO [2.5852203369140625, 2.461092710494995, 10.04835319519043, 14.898118019104004, 0.956852376461029, 25000, 9.68625723121918e-05]
562
+ 2023-02-22 07:50:55,927 32k INFO Saving model and optimizer state at iteration 256 to ./logs\32k\G_25000.pth
563
+ 2023-02-22 07:51:13,413 32k INFO Saving model and optimizer state at iteration 256 to ./logs\32k\D_25000.pth
564
+ 2023-02-22 07:52:32,056 32k INFO ====> Epoch: 256
565
+ 2023-02-22 07:54:18,097 32k INFO ====> Epoch: 257
566
+ 2023-02-22 07:54:52,734 32k INFO Train Epoch: 258 [14%]
567
+ 2023-02-22 07:54:52,734 32k INFO [2.432253837585449, 2.456174850463867, 9.509150505065918, 16.501110076904297, 0.4222906529903412, 25200, 9.683835818259144e-05]
568
+ 2023-02-22 07:56:04,434 32k INFO ====> Epoch: 258
569
+ 2023-02-22 07:57:50,485 32k INFO ====> Epoch: 259
570
+ 2023-02-22 07:58:27,976 32k INFO Train Epoch: 260 [18%]
571
+ 2023-02-22 07:58:27,977 32k INFO [2.4247806072235107, 2.345371723175049, 8.857375144958496, 15.63666820526123, 0.7407233119010925, 25400, 9.681415010614512e-05]
572
+ 2023-02-22 07:59:36,335 32k INFO ====> Epoch: 260
573
+ 2023-02-22 08:01:21,939 32k INFO ====> Epoch: 261
574
+ 2023-02-22 08:02:02,960 32k INFO Train Epoch: 262 [22%]
575
+ 2023-02-22 08:02:02,960 32k INFO [2.6320114135742188, 2.4106760025024414, 8.685118675231934, 12.927850723266602, 0.9680966138839722, 25600, 9.678994808133967e-05]
576
+ 2023-02-22 08:03:07,846 32k INFO ====> Epoch: 262
577
+ 2023-02-22 08:04:53,437 32k INFO ====> Epoch: 263
578
+ 2023-02-22 08:05:37,784 32k INFO Train Epoch: 264 [27%]
579
+ 2023-02-22 08:05:37,784 32k INFO [2.500497341156006, 2.2675929069519043, 8.80232048034668, 15.311875343322754, 0.8597989678382874, 25800, 9.676575210666227e-05]
580
+ 2023-02-22 08:06:39,268 32k INFO ====> Epoch: 264
581
+ 2023-02-22 08:08:24,850 32k INFO ====> Epoch: 265
582
+ 2023-02-22 08:09:12,688 32k INFO Train Epoch: 266 [31%]
583
+ 2023-02-22 08:09:12,688 32k INFO [2.3594794273376465, 2.2052268981933594, 10.455811500549316, 16.885915756225586, 0.6565972566604614, 26000, 9.674156218060047e-05]
584
+ 2023-02-22 08:09:17,007 32k INFO Saving model and optimizer state at iteration 266 to ./logs\32k\G_26000.pth
585
+ 2023-02-22 08:09:29,919 32k INFO Saving model and optimizer state at iteration 266 to ./logs\32k\D_26000.pth
586
+ 2023-02-22 08:10:31,024 32k INFO ====> Epoch: 266
587
+ 2023-02-22 08:12:17,253 32k INFO ====> Epoch: 267
588
+ 2023-02-22 08:13:09,136 32k INFO Train Epoch: 268 [35%]
589
+ 2023-02-22 08:13:09,137 32k INFO [2.6458592414855957, 2.1084747314453125, 10.409239768981934, 15.455072402954102, 0.5523946285247803, 26200, 9.671737830164223e-05]
590
+ 2023-02-22 08:14:03,759 32k INFO ====> Epoch: 268
591
+ 2023-02-22 08:15:49,415 32k INFO ====> Epoch: 269
592
+ 2023-02-22 08:16:44,117 32k INFO Train Epoch: 270 [39%]
593
+ 2023-02-22 08:16:44,118 32k INFO [2.1286776065826416, 2.643979549407959, 11.154251098632812, 16.767776489257812, 0.3980034291744232, 26400, 9.669320046827584e-05]
594
+ 2023-02-22 08:17:35,389 32k INFO ====> Epoch: 270
595
+ 2023-02-22 08:19:21,150 32k INFO ====> Epoch: 271
596
+ 2023-02-22 08:20:19,919 32k INFO Train Epoch: 272 [43%]
597
+ 2023-02-22 08:20:19,919 32k INFO [2.36246919631958, 2.305928945541382, 13.173051834106445, 20.20313262939453, 0.4515596628189087, 26600, 9.666902867899003e-05]
598
+ 2023-02-22 08:21:07,775 32k INFO ====> Epoch: 272
599
+ 2023-02-22 08:22:54,059 32k INFO ====> Epoch: 273
600
+ 2023-02-22 08:23:55,545 32k INFO Train Epoch: 274 [47%]
601
+ 2023-02-22 08:23:55,545 32k INFO [2.1859402656555176, 2.502265214920044, 10.459367752075195, 15.742925643920898, 0.7773081660270691, 26800, 9.664486293227385e-05]
602
+ 2023-02-22 08:24:39,934 32k INFO ====> Epoch: 274
603
+ 2023-02-22 08:26:25,586 32k INFO ====> Epoch: 275
604
+ 2023-02-22 08:27:30,547 32k INFO Train Epoch: 276 [51%]
605
+ 2023-02-22 08:27:30,548 32k INFO [2.3840584754943848, 2.286221742630005, 10.370285034179688, 17.517276763916016, 0.36077797412872314, 27000, 9.662070322661676e-05]
606
+ 2023-02-22 08:27:34,765 32k INFO Saving model and optimizer state at iteration 276 to ./logs\32k\G_27000.pth
607
+ 2023-02-22 08:27:49,261 32k INFO Saving model and optimizer state at iteration 276 to ./logs\32k\D_27000.pth
608
+ 2023-02-22 08:28:33,782 32k INFO ====> Epoch: 276
609
+ 2023-02-22 08:30:20,077 32k INFO ====> Epoch: 277
610
+ 2023-02-22 08:31:29,161 32k INFO Train Epoch: 278 [55%]
611
+ 2023-02-22 08:31:29,161 32k INFO [2.207501173019409, 2.513786792755127, 13.21751880645752, 19.042682647705078, 0.7013548612594604, 27200, 9.659654956050859e-05]
612
+ 2023-02-22 08:32:06,611 32k INFO ====> Epoch: 278
613
+ 2023-02-22 08:33:52,136 32k INFO ====> Epoch: 279
614
+ 2023-02-22 08:35:04,124 32k INFO Train Epoch: 280 [59%]
615
+ 2023-02-22 08:35:04,124 32k INFO [2.2693753242492676, 2.6720073223114014, 12.635223388671875, 20.047088623046875, 1.1133509874343872, 27400, 9.657240193243954e-05]
616
+ 2023-02-22 08:35:38,250 32k INFO ====> Epoch: 280
617
+ 2023-02-22 08:37:23,913 32k INFO ====> Epoch: 281
618
+ 2023-02-22 08:38:39,215 32k INFO Train Epoch: 282 [63%]
619
+ 2023-02-22 08:38:39,216 32k INFO [2.389529228210449, 2.2568767070770264, 10.048675537109375, 14.323448181152344, 0.7161829471588135, 27600, 9.65482603409002e-05]
620
+ 2023-02-22 08:39:09,969 32k INFO ====> Epoch: 282
621
+ 2023-02-22 08:40:56,377 32k INFO ====> Epoch: 283
622
+ 2023-02-22 08:42:15,609 32k INFO Train Epoch: 284 [67%]
623
+ 2023-02-22 08:42:15,609 32k INFO [2.511157751083374, 2.453148603439331, 11.416619300842285, 17.989450454711914, 0.6057873964309692, 27800, 9.652412478438153e-05]
624
+ 2023-02-22 08:42:42,783 32k INFO ====> Epoch: 284
625
+ 2023-02-22 08:44:28,460 32k INFO ====> Epoch: 285
626
+ 2023-02-22 08:45:50,789 32k INFO Train Epoch: 286 [71%]
627
+ 2023-02-22 08:45:50,789 32k INFO [2.419379472732544, 2.3715896606445312, 8.623769760131836, 15.313409805297852, 0.5617188811302185, 28000, 9.649999526137489e-05]
628
+ 2023-02-22 08:45:55,739 32k INFO Saving model and optimizer state at iteration 286 to ./logs\32k\G_28000.pth
629
+ 2023-02-22 08:46:09,553 32k INFO Saving model and optimizer state at iteration 286 to ./logs\32k\D_28000.pth
630
+ 2023-02-22 08:46:37,027 32k INFO ====> Epoch: 286
631
+ 2023-02-22 08:48:23,304 32k INFO ====> Epoch: 287
632
+ 2023-02-22 08:49:49,517 32k INFO Train Epoch: 288 [76%]
633
+ 2023-02-22 08:49:49,518 32k INFO [2.231926202774048, 2.57316255569458, 16.306894302368164, 19.932767868041992, 0.9207838773727417, 28200, 9.647587177037196e-05]
634
+ 2023-02-22 08:50:09,780 32k INFO ====> Epoch: 288
635
+ 2023-02-22 08:51:56,021 32k INFO ====> Epoch: 289
636
+ 2023-02-22 08:53:25,110 32k INFO Train Epoch: 290 [80%]
637
+ 2023-02-22 08:53:25,110 32k INFO [2.5337061882019043, 2.4603958129882812, 10.723613739013672, 16.972007751464844, 0.9773488640785217, 28400, 9.645175430986486e-05]
638
+ 2023-02-22 08:53:41,993 32k INFO ====> Epoch: 290
639
+ 2023-02-22 08:55:27,579 32k INFO ====> Epoch: 291
640
+ 2023-02-22 08:57:00,042 32k INFO Train Epoch: 292 [84%]
641
+ 2023-02-22 08:57:00,043 32k INFO [2.457164764404297, 2.5140910148620605, 12.109970092773438, 19.576763153076172, 0.6455909609794617, 28600, 9.642764287834605e-05]
642
+ 2023-02-22 08:57:13,617 32k INFO ====> Epoch: 292
643
+ 2023-02-22 08:58:59,241 32k INFO ====> Epoch: 293
644
+ 2023-02-22 09:00:35,167 32k INFO Train Epoch: 294 [88%]
645
+ 2023-02-22 09:00:35,167 32k INFO [3.1267271041870117, 1.7442620992660522, 5.101763725280762, 9.992081642150879, 0.8443043231964111, 28800, 9.640353747430838e-05]
646
+ 2023-02-22 09:00:45,190 32k INFO ====> Epoch: 294
647
+ 2023-02-22 09:02:30,950 32k INFO ====> Epoch: 295
648
+ 2023-02-22 09:04:10,463 32k INFO Train Epoch: 296 [92%]
649
+ 2023-02-22 09:04:10,463 32k INFO [2.4258294105529785, 2.153702735900879, 11.16634464263916, 18.30190658569336, 0.6709051728248596, 29000, 9.637943809624507e-05]
650
+ 2023-02-22 09:04:14,772 32k INFO Saving model and optimizer state at iteration 296 to ./logs\32k\G_29000.pth
651
+ 2023-02-22 09:04:30,920 32k INFO Saving model and optimizer state at iteration 296 to ./logs\32k\D_29000.pth
652
+ 2023-02-22 09:04:41,056 32k INFO ====> Epoch: 296
653
+ 2023-02-22 09:06:27,402 32k INFO ====> Epoch: 297
654
+ 2023-02-22 09:08:10,882 32k INFO Train Epoch: 298 [96%]
655
+ 2023-02-22 09:08:10,882 32k INFO [2.3428242206573486, 2.506030559539795, 9.039055824279785, 12.474124908447266, 0.5565758347511292, 29200, 9.635534474264972e-05]
656
+ 2023-02-22 09:08:13,993 32k INFO ====> Epoch: 298
657
+ 2023-02-22 09:10:00,361 32k INFO ====> Epoch: 299
658
+ 2023-02-22 09:11:46,836 32k INFO ====> Epoch: 300
659
+ 2023-02-22 09:12:10,849 32k INFO Train Epoch: 301 [0%]
660
+ 2023-02-22 09:12:10,849 32k INFO [2.0102734565734863, 3.098827838897705, 12.834500312805176, 17.658754348754883, 0.8110590577125549, 29400, 9.631921600483981e-05]
661
+ 2023-02-22 09:13:34,929 32k INFO ====> Epoch: 301
662
+ 2023-02-22 09:15:22,579 32k INFO ====> Epoch: 302
663
+ 2023-02-22 09:15:48,125 32k INFO Train Epoch: 303 [4%]
664
+ 2023-02-22 09:15:48,125 32k INFO [2.292261838912964, 2.6530299186706543, 15.810569763183594, 19.79923439025879, 0.8924477100372314, 29600, 9.629513770582634e-05]
665
+ 2023-02-22 09:17:08,489 32k INFO ====> Epoch: 303
666
+ 2023-02-22 09:18:54,037 32k INFO ====> Epoch: 304
667
+ 2023-02-22 09:19:22,932 32k INFO Train Epoch: 305 [8%]
668
+ 2023-02-22 09:19:22,933 32k INFO [2.2535953521728516, 2.7421841621398926, 14.76373291015625, 17.443092346191406, 0.9546372294425964, 29800, 9.627106542601141e-05]
669
+ 2023-02-22 09:20:39,815 32k INFO ====> Epoch: 305
670
+ 2023-02-22 09:22:25,311 32k INFO ====> Epoch: 306
671
+ 2023-02-22 09:22:57,677 32k INFO Train Epoch: 307 [12%]
672
+ 2023-02-22 09:22:57,677 32k INFO [2.326064109802246, 2.3965041637420654, 7.343259811401367, 12.741463661193848, 0.6292210817337036, 30000, 9.62469991638903e-05]
673
+ 2023-02-22 09:23:01,921 32k INFO Saving model and optimizer state at iteration 307 to ./logs\32k\G_30000.pth
674
+ 2023-02-22 09:23:20,758 32k INFO Saving model and optimizer state at iteration 307 to ./logs\32k\D_30000.pth
675
+ 2023-02-22 09:24:37,562 32k INFO ====> Epoch: 307
676
+ 2023-02-22 09:26:23,740 32k INFO ====> Epoch: 308
677
+ 2023-02-22 09:26:59,601 32k INFO Train Epoch: 309 [16%]
678
+ 2023-02-22 09:26:59,601 32k INFO [2.4903805255889893, 2.4432358741760254, 9.264269828796387, 16.382070541381836, 0.8371724486351013, 30200, 9.622293891795867e-05]
679
+ 2023-02-22 09:28:09,664 32k INFO ====> Epoch: 309
680
+ 2023-02-22 09:29:55,317 32k INFO ====> Epoch: 310
681
+ 2023-02-22 09:30:34,487 32k INFO Train Epoch: 311 [20%]
682
+ 2023-02-22 09:30:34,487 32k INFO [2.6089067459106445, 2.180260181427002, 6.335816860198975, 10.976472854614258, 0.98526930809021, 30400, 9.619888468671259e-05]
683
+ 2023-02-22 09:31:41,120 32k INFO ====> Epoch: 311
684
+ 2023-02-22 09:33:27,469 32k INFO ====> Epoch: 312
685
+ 2023-02-22 09:34:10,970 32k INFO Train Epoch: 313 [24%]
686
+ 2023-02-22 09:34:10,970 32k INFO [2.2308130264282227, 2.525303363800049, 13.083917617797852, 19.732345581054688, 0.6941409707069397, 30600, 9.617483646864849e-05]
687
+ 2023-02-22 09:35:14,173 32k INFO ====> Epoch: 313
688
+ 2023-02-22 09:36:59,697 32k INFO ====> Epoch: 314
689
+ 2023-02-22 09:37:45,817 32k INFO Train Epoch: 315 [29%]
690
+ 2023-02-22 09:37:45,818 32k INFO [2.5213077068328857, 2.46886944770813, 10.760324478149414, 19.334060668945312, 0.5704622268676758, 30800, 9.615079426226314e-05]
691
+ 2023-02-22 09:38:45,598 32k INFO ====> Epoch: 315
692
+ 2023-02-22 09:40:31,200 32k INFO ====> Epoch: 316
693
+ 2023-02-22 09:41:21,493 32k INFO Train Epoch: 317 [33%]
694
+ 2023-02-22 09:41:21,493 32k INFO [2.1243271827697754, 2.5788512229919434, 12.918538093566895, 20.69605827331543, 0.9645518064498901, 31000, 9.612675806605373e-05]
695
+ 2023-02-22 09:41:26,457 32k INFO Saving model and optimizer state at iteration 317 to ./logs\32k\G_31000.pth
696
+ 2023-02-22 09:41:44,385 32k INFO Saving model and optimizer state at iteration 317 to ./logs\32k\D_31000.pth
697
+ 2023-02-22 09:42:44,191 32k INFO ====> Epoch: 317
698
+ 2023-02-22 09:44:30,505 32k INFO ====> Epoch: 318
699
+ 2023-02-22 09:45:24,342 32k INFO Train Epoch: 319 [37%]
700
+ 2023-02-22 09:45:24,343 32k INFO [2.4621365070343018, 2.3512632846832275, 11.532454490661621, 16.673147201538086, 1.0516586303710938, 31200, 9.61027278785178e-05]
701
+ 2023-02-22 09:46:17,300 32k INFO ====> Epoch: 319
702
+ 2023-02-22 09:48:02,873 32k INFO ====> Epoch: 320
703
+ 2023-02-22 09:48:59,267 32k INFO Train Epoch: 321 [41%]
704
+ 2023-02-22 09:48:59,267 32k INFO [2.293076992034912, 2.460491180419922, 15.437559127807617, 19.99898910522461, 0.8674603700637817, 31400, 9.60787036981533e-05]
705
+ 2023-02-22 09:49:48,764 32k INFO ====> Epoch: 321
706
+ 2023-02-22 09:51:34,708 32k INFO ====> Epoch: 322
707
+ 2023-02-22 09:52:34,630 32k INFO Train Epoch: 323 [45%]
708
+ 2023-02-22 09:52:34,630 32k INFO [2.3757669925689697, 2.392949104309082, 14.556235313415527, 18.955732345581055, 0.8770878911018372, 31600, 9.60546855234585e-05]
709
+ 2023-02-22 09:53:20,914 32k INFO ====> Epoch: 323
710
+ 2023-02-22 09:55:07,269 32k INFO ====> Epoch: 324
711
+ 2023-02-22 09:56:10,506 32k INFO Train Epoch: 325 [49%]
712
+ 2023-02-22 09:56:10,507 32k INFO [2.3360633850097656, 2.545222520828247, 11.59161376953125, 17.103670120239258, 0.7123858332633972, 31800, 9.603067335293209e-05]
713
+ 2023-02-22 09:56:53,220 32k INFO ====> Epoch: 325
714
+ 2023-02-22 09:58:38,902 32k INFO ====> Epoch: 326
715
+ 2023-02-22 09:59:45,549 32k INFO Train Epoch: 327 [53%]
716
+ 2023-02-22 09:59:45,549 32k INFO [2.3887226581573486, 2.382225275039673, 9.169524192810059, 15.562906265258789, 0.9378204345703125, 32000, 9.600666718507311e-05]
717
+ 2023-02-22 09:59:50,519 32k INFO Saving model and optimizer state at iteration 327 to ./logs\32k\G_32000.pth
718
+ 2023-02-22 10:00:08,092 32k INFO Saving model and optimizer state at iteration 327 to ./logs\32k\D_32000.pth
719
+ 2023-02-22 10:00:50,689 32k INFO ====> Epoch: 327
720
+ 2023-02-22 10:02:36,996 32k INFO ====> Epoch: 328
721
+ 2023-02-22 10:03:47,898 32k INFO Train Epoch: 329 [57%]
722
+ 2023-02-22 10:03:47,899 32k INFO [2.1435623168945312, 2.491894483566284, 13.545025825500488, 17.95233726501465, 0.8276446461677551, 32200, 9.5982667018381e-05]
723
+ 2023-02-22 10:04:23,585 32k INFO ====> Epoch: 329
724
+ 2023-02-22 10:06:09,160 32k INFO ====> Epoch: 330
725
+ 2023-02-22 10:07:22,888 32k INFO Train Epoch: 331 [61%]
726
+ 2023-02-22 10:07:22,889 32k INFO [2.53352952003479, 2.79293155670166, 10.536802291870117, 17.276477813720703, 0.7348306179046631, 32400, 9.595867285135558e-05]
727
+ 2023-02-22 10:07:55,166 32k INFO ====> Epoch: 331
728
+ 2023-02-22 10:09:54,170 32k INFO ====> Epoch: 332
729
+ 2023-02-22 10:11:11,362 32k INFO Train Epoch: 333 [65%]
730
+ 2023-02-22 10:11:11,362 32k INFO [2.3337302207946777, 2.2436773777008057, 11.43968391418457, 16.059717178344727, 0.6464142203330994, 32600, 9.5934684682497e-05]
731
+ 2023-02-22 10:11:40,327 32k INFO ====> Epoch: 333
732
+ 2023-02-22 10:13:26,239 32k INFO ====> Epoch: 334
733
+ 2023-02-22 10:14:46,735 32k INFO Train Epoch: 335 [69%]
734
+ 2023-02-22 10:14:46,736 32k INFO [2.341135025024414, 2.178755044937134, 12.009360313415527, 17.662372589111328, 0.6575217247009277, 32800, 9.591070251030582e-05]
735
+ 2023-02-22 10:15:12,226 32k INFO ====> Epoch: 335
736
+ 2023-02-22 10:16:58,071 32k INFO ====> Epoch: 336
737
+ 2023-02-22 10:18:22,210 32k INFO Train Epoch: 337 [73%]
738
+ 2023-02-22 10:18:22,210 32k INFO [2.5522103309631348, 2.188314914703369, 10.32480525970459, 15.917875289916992, 0.5481040477752686, 33000, 9.588672633328296e-05]
739
+ 2023-02-22 10:18:26,497 32k INFO Saving model and optimizer state at iteration 337 to ./logs\32k\G_33000.pth
740
+ 2023-02-22 10:18:44,754 32k INFO Saving model and optimizer state at iteration 337 to ./logs\32k\D_33000.pth
741
+ 2023-02-22 10:19:10,032 32k INFO ====> Epoch: 337
742
+ 2023-02-22 10:20:56,519 32k INFO ====> Epoch: 338
743
+ 2023-02-22 10:22:23,859 32k INFO Train Epoch: 339 [78%]
744
+ 2023-02-22 10:22:23,859 32k INFO [2.4190073013305664, 2.2712206840515137, 11.071868896484375, 18.608346939086914, 1.2736413478851318, 33200, 9.586275614992974e-05]
745
+ 2023-02-22 10:22:42,496 32k INFO ====> Epoch: 339
746
+ 2023-02-22 10:24:28,908 32k INFO ====> Epoch: 340
747
+ 2023-02-22 10:26:00,569 32k INFO Train Epoch: 341 [82%]
748
+ 2023-02-22 10:26:00,569 32k INFO [2.5759196281433105, 2.485172748565674, 11.334334373474121, 16.11148452758789, 0.9113945364952087, 33400, 9.583879195874782e-05]
749
+ 2023-02-22 10:26:15,792 32k INFO ====> Epoch: 341
750
+ 2023-02-22 10:28:02,420 32k INFO ====> Epoch: 342
751
+ 2023-02-22 10:29:37,733 32k INFO Train Epoch: 343 [86%]
752
+ 2023-02-22 10:29:37,733 32k INFO [2.478440046310425, 2.6277501583099365, 11.949339866638184, 17.164081573486328, 0.7420394420623779, 33600, 9.581483375823925e-05]
753
+ 2023-02-22 10:29:49,553 32k INFO ====> Epoch: 343
754
+ 2023-02-22 10:31:35,370 32k INFO ====> Epoch: 344
755
+ 2023-02-22 10:33:13,278 32k INFO Train Epoch: 345 [90%]
756
+ 2023-02-22 10:33:13,278 32k INFO [2.514538288116455, 2.42301344871521, 11.596924781799316, 17.29451560974121, 0.6897502541542053, 33800, 9.579088154690645e-05]
757
+ 2023-02-22 10:33:21,550 32k INFO ====> Epoch: 345
758
+ 2023-02-22 10:35:07,455 32k INFO ====> Epoch: 346
759
+ 2023-02-22 10:36:48,752 32k INFO Train Epoch: 347 [94%]
760
+ 2023-02-22 10:36:48,752 32k INFO [2.37970232963562, 2.336655378341675, 12.431614875793457, 18.605751037597656, 0.8811375498771667, 34000, 9.576693532325224e-05]
761
+ 2023-02-22 10:36:53,130 32k INFO Saving model and optimizer state at iteration 347 to ./logs\32k\G_34000.pth
762
+ 2023-02-22 10:37:10,041 32k INFO Saving model and optimizer state at iteration 347 to ./logs\32k\D_34000.pth
763
+ 2023-02-22 10:37:18,304 32k INFO ====> Epoch: 347
764
+ 2023-02-22 10:39:05,169 32k INFO ====> Epoch: 348
765
+ 2023-02-22 10:40:49,972 32k INFO Train Epoch: 349 [98%]
766
+ 2023-02-22 10:40:49,972 32k INFO [2.4378786087036133, 2.316551446914673, 10.715349197387695, 14.96591854095459, 0.5559961199760437, 34200, 9.574299508577979e-05]
767
+ 2023-02-22 10:40:51,354 32k INFO ====> Epoch: 349
768
+ 2023-02-22 10:42:37,890 32k INFO ====> Epoch: 350
769
+ 2023-02-22 10:44:24,654 32k INFO ====> Epoch: 351
770
+ 2023-02-22 10:44:49,299 32k INFO Train Epoch: 352 [2%]
771
+ 2023-02-22 10:44:49,300 32k INFO [2.4335520267486572, 2.271144151687622, 9.197824478149414, 14.361316680908203, 0.9935851693153381, 34400, 9.570709595038851e-05]
772
+ 2023-02-22 10:46:11,605 32k INFO ====> Epoch: 352
773
+ 2023-02-22 10:47:58,244 32k INFO ====> Epoch: 353
774
+ 2023-02-22 10:48:25,603 32k INFO Train Epoch: 354 [6%]
775
+ 2023-02-22 10:48:25,604 32k INFO [2.503721237182617, 2.299598217010498, 11.784101486206055, 17.45063018798828, 0.7507224678993225, 34600, 9.568317067182427e-05]
776
+ 2023-02-22 10:49:44,510 32k INFO ====> Epoch: 354
777
+ 2023-02-22 10:51:30,297 32k INFO ====> Epoch: 355
778
+ 2023-02-22 10:52:01,064 32k INFO Train Epoch: 356 [10%]
779
+ 2023-02-22 10:52:01,064 32k INFO [2.358137369155884, 2.391530990600586, 12.17978572845459, 17.948204040527344, 0.6102503538131714, 34800, 9.565925137420586e-05]
780
+ 2023-02-22 10:53:16,260 32k INFO ====> Epoch: 356
781
+ 2023-02-22 10:55:05,634 32k INFO ====> Epoch: 357
782
+ 2023-02-22 10:55:40,768 32k INFO Train Epoch: 358 [14%]
783
+ 2023-02-22 10:55:40,768 32k INFO [2.742542028427124, 2.216801166534424, 8.76644515991211, 14.839630126953125, 0.49059024453163147, 35000, 9.56353380560381e-05]
784
+ 2023-02-22 10:55:45,046 32k INFO Saving model and optimizer state at iteration 358 to ./logs\32k\G_35000.pth
785
+ 2023-02-22 10:56:01,246 32k INFO Saving model and optimizer state at iteration 358 to ./logs\32k\D_35000.pth
786
+ 2023-02-22 10:57:17,536 32k INFO ====> Epoch: 358
32k-meril/D_12000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:78ab9e543a96314a4ed84946fff32b23937d77c701d9563ce440a5581a461461
3
+ size 561098185
32k-meril/D_18000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6da49cdd53e5eceacab3d696300c68080968cda024de1b8d770dd98b984a4c45
3
+ size 561098185
32k-meril/D_24000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:27b9a72d058ed0f7bc5c3e2566fec9afbd31418fe7614eac918f56a08f04bb24
3
+ size 561098185
32k-meril/D_30000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:30a143442484dda7da40b2fea7659abc2eda8df136c5754f96e20016cb88bc5f
3
+ size 561098185
32k-meril/D_35000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d2a4cd05e39eeb8140f19dcb9f7c10e8b5f1403decf1ea850849bc4271e3d205
3
+ size 561098185
32k-meril/D_6000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:783a69cb419cfb8c41c581932018efc5303174a5b90bdbaa5d6edda25a7e0bb4
3
+ size 561098185
32k-meril/G_12000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5bcce0d7e326e548b123ef4e4b672cefce378d1e79a117dc995930b930c53426
3
+ size 699505437
32k-meril/G_18000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4d9a79dc8a175145c79c4a4f500fe8d592d7db7303239f389b31e2ca55baf2d7
3
+ size 699505437
32k-meril/G_24000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:088adcbfd65eccf263a9287bccc51fd460872fea6ecbe59b730bb19d145e90e4
3
+ size 699505437
32k-meril/G_30000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:035ec65e0c048e5bac8657d88f4d6527ea00e2e9270f65bd8ba32f6feb1be635
3
+ size 699505437
32k-meril/G_35000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4dcb52fa0295806814313da02458223be26f96e34cd1290dd46a5c9b486cacb2
3
+ size 699505437
32k-meril/G_6000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e243c2bcb2e8bfe4c5243a075e1283cda38f2f5156059482bd296127ec1b28fe
3
+ size 699505437
32k-meril/config.json ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "train": {
3
+ "log_interval": 200,
4
+ "eval_interval": 1000,
5
+ "seed": 1234,
6
+ "epochs": 10000,
7
+ "learning_rate": 0.0001,
8
+ "betas": [
9
+ 0.8,
10
+ 0.99
11
+ ],
12
+ "eps": 1e-09,
13
+ "batch_size": 6,
14
+ "fp16_run": false,
15
+ "lr_decay": 0.999875,
16
+ "segment_size": 17920,
17
+ "init_lr_ratio": 1,
18
+ "warmup_epochs": 0,
19
+ "c_mel": 45,
20
+ "c_kl": 1.0,
21
+ "use_sr": true,
22
+ "max_speclen": 384,
23
+ "port": "8001"
24
+ },
25
+ "data": {
26
+ "training_files": "filelists/train.txt",
27
+ "validation_files": "filelists/val.txt",
28
+ "max_wav_value": 32768.0,
29
+ "sampling_rate": 32000,
30
+ "filter_length": 1280,
31
+ "hop_length": 320,
32
+ "win_length": 1280,
33
+ "n_mel_channels": 80,
34
+ "mel_fmin": 0.0,
35
+ "mel_fmax": null
36
+ },
37
+ "model": {
38
+ "inter_channels": 192,
39
+ "hidden_channels": 192,
40
+ "filter_channels": 768,
41
+ "n_heads": 2,
42
+ "n_layers": 6,
43
+ "kernel_size": 3,
44
+ "p_dropout": 0.1,
45
+ "resblock": "1",
46
+ "resblock_kernel_sizes": [
47
+ 3,
48
+ 7,
49
+ 11
50
+ ],
51
+ "resblock_dilation_sizes": [
52
+ [
53
+ 1,
54
+ 3,
55
+ 5
56
+ ],
57
+ [
58
+ 1,
59
+ 3,
60
+ 5
61
+ ],
62
+ [
63
+ 1,
64
+ 3,
65
+ 5
66
+ ]
67
+ ],
68
+ "upsample_rates": [
69
+ 10,
70
+ 8,
71
+ 2,
72
+ 2
73
+ ],
74
+ "upsample_initial_channel": 512,
75
+ "upsample_kernel_sizes": [
76
+ 16,
77
+ 16,
78
+ 4,
79
+ 4
80
+ ],
81
+ "n_layers_q": 3,
82
+ "use_spectral_norm": false,
83
+ "gin_channels": 256,
84
+ "ssl_dim": 256,
85
+ "n_speakers": 2
86
+ },
87
+ "spk": {
88
+ "meril": 0
89
+ }
90
+ }
32k-meril/train.log ADDED
@@ -0,0 +1,645 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2023-02-26 08:23:18,928 32k INFO {'train': {'log_interval': 200, 'eval_interval': 1000, 'seed': 1234, 'epochs': 10000, 'learning_rate': 0.0001, 'betas': [0.8, 0.99], 'eps': 1e-09, 'batch_size': 6, 'fp16_run': False, 'lr_decay': 0.999875, 'segment_size': 17920, 'init_lr_ratio': 1, 'warmup_epochs': 0, 'c_mel': 45, 'c_kl': 1.0, 'use_sr': True, 'max_speclen': 384, 'port': '8001'}, 'data': {'training_files': 'filelists/train.txt', 'validation_files': 'filelists/val.txt', 'max_wav_value': 32768.0, 'sampling_rate': 32000, 'filter_length': 1280, 'hop_length': 320, 'win_length': 1280, 'n_mel_channels': 80, 'mel_fmin': 0.0, 'mel_fmax': None}, 'model': {'inter_channels': 192, 'hidden_channels': 192, 'filter_channels': 768, 'n_heads': 2, 'n_layers': 6, 'kernel_size': 3, 'p_dropout': 0.1, 'resblock': '1', 'resblock_kernel_sizes': [3, 7, 11], 'resblock_dilation_sizes': [[1, 3, 5], [1, 3, 5], [1, 3, 5]], 'upsample_rates': [10, 8, 2, 2], 'upsample_initial_channel': 512, 'upsample_kernel_sizes': [16, 16, 4, 4], 'n_layers_q': 3, 'use_spectral_norm': False, 'gin_channels': 256, 'ssl_dim': 256, 'n_speakers': 2}, 'spk': {'meril': 0}, 'model_dir': './logs\\32k'}
2
+ 2023-02-26 08:23:18,928 32k WARNING K:\AI\so-vits-svc-32k is not a git repository, therefore hash value comparison will be ignored.
3
+ 2023-02-26 08:23:34,708 32k INFO emb_g.weight is not in the checkpoint
4
+ 2023-02-26 08:23:34,818 32k INFO Loaded checkpoint './logs\32k\G_0.pth' (iteration 1)
5
+ 2023-02-26 08:23:35,956 32k INFO Loaded checkpoint './logs\32k\D_0.pth' (iteration 1)
6
+ 2023-02-26 08:24:07,398 32k INFO Train Epoch: 1 [0%]
7
+ 2023-02-26 08:24:07,399 32k INFO [6.396151542663574, 2.3192086219787598, 11.437691688537598, 50.04783248901367, 12.884584426879883, 0, 0.0001]
8
+ 2023-02-26 08:24:14,320 32k INFO Saving model and optimizer state at iteration 1 to ./logs\32k\G_0.pth
9
+ 2023-02-26 08:24:29,149 32k INFO Saving model and optimizer state at iteration 1 to ./logs\32k\D_0.pth
10
+ 2023-02-26 08:26:57,094 32k INFO ====> Epoch: 1
11
+ 2023-02-26 08:27:48,937 32k INFO Train Epoch: 2 [21%]
12
+ 2023-02-26 08:27:48,937 32k INFO [2.6703596115112305, 2.1580634117126465, 11.49161434173584, 22.269886016845703, 1.027299404144287, 200, 9.99875e-05]
13
+ 2023-02-26 08:29:40,443 32k INFO ====> Epoch: 2
14
+ 2023-02-26 08:31:02,090 32k INFO Train Epoch: 3 [42%]
15
+ 2023-02-26 08:31:02,091 32k INFO [2.5759048461914062, 2.196209192276001, 13.65181827545166, 22.816627502441406, 0.9087493419647217, 400, 9.99750015625e-05]
16
+ 2023-02-26 08:32:23,299 32k INFO ====> Epoch: 3
17
+ 2023-02-26 08:34:14,621 32k INFO Train Epoch: 4 [64%]
18
+ 2023-02-26 08:34:14,621 32k INFO [2.3012750148773193, 2.5668628215789795, 8.850838661193848, 16.783226013183594, 1.1684110164642334, 600, 9.996250468730469e-05]
19
+ 2023-02-26 08:35:05,788 32k INFO ====> Epoch: 4
20
+ 2023-02-26 08:37:27,454 32k INFO Train Epoch: 5 [85%]
21
+ 2023-02-26 08:37:27,455 32k INFO [2.5453319549560547, 2.3678677082061768, 11.264457702636719, 20.05609130859375, 1.1281580924987793, 800, 9.995000937421877e-05]
22
+ 2023-02-26 08:37:48,604 32k INFO ====> Epoch: 5
23
+ 2023-02-26 08:40:30,764 32k INFO ====> Epoch: 6
24
+ 2023-02-26 08:41:00,695 32k INFO Train Epoch: 7 [6%]
25
+ 2023-02-26 08:41:00,696 32k INFO [2.663148880004883, 2.078253746032715, 7.356672286987305, 15.606974601745605, 0.9389564990997314, 1000, 9.99250234335941e-05]
26
+ 2023-02-26 08:41:05,158 32k INFO Saving model and optimizer state at iteration 7 to ./logs\32k\G_1000.pth
27
+ 2023-02-26 08:41:18,568 32k INFO Saving model and optimizer state at iteration 7 to ./logs\32k\D_1000.pth
28
+ 2023-02-26 08:43:34,949 32k INFO ====> Epoch: 7
29
+ 2023-02-26 08:44:35,017 32k INFO Train Epoch: 8 [27%]
30
+ 2023-02-26 08:44:35,017 32k INFO [2.3168065547943115, 2.6603329181671143, 8.887020111083984, 17.153154373168945, 1.0433679819107056, 1200, 9.991253280566489e-05]
31
+ 2023-02-26 08:46:17,519 32k INFO ====> Epoch: 8
32
+ 2023-02-26 08:47:47,469 32k INFO Train Epoch: 9 [48%]
33
+ 2023-02-26 08:47:47,469 32k INFO [2.3874289989471436, 2.434887170791626, 11.490910530090332, 21.34469985961914, 0.8404753804206848, 1400, 9.990004373906418e-05]
34
+ 2023-02-26 08:49:00,098 32k INFO ====> Epoch: 9
35
+ 2023-02-26 08:51:00,214 32k INFO Train Epoch: 10 [70%]
36
+ 2023-02-26 08:51:00,214 32k INFO [2.1876630783081055, 2.5636796951293945, 12.61434555053711, 23.038005828857422, 0.9913359880447388, 1600, 9.98875562335968e-05]
37
+ 2023-02-26 08:51:42,829 32k INFO ====> Epoch: 10
38
+ 2023-02-26 08:54:12,770 32k INFO Train Epoch: 11 [91%]
39
+ 2023-02-26 08:54:12,771 32k INFO [2.3796133995056152, 2.9137446880340576, 11.033517837524414, 21.80364227294922, 0.69889235496521, 1800, 9.987507028906759e-05]
40
+ 2023-02-26 08:54:25,282 32k INFO ====> Epoch: 11
41
+ 2023-02-26 08:57:07,397 32k INFO ====> Epoch: 12
42
+ 2023-02-26 08:57:45,982 32k INFO Train Epoch: 13 [12%]
43
+ 2023-02-26 08:57:45,982 32k INFO [2.590365409851074, 2.162517547607422, 12.567984580993652, 20.611303329467773, 0.8790818452835083, 2000, 9.98501030820433e-05]
44
+ 2023-02-26 08:57:50,455 32k INFO Saving model and optimizer state at iteration 13 to ./logs\32k\G_2000.pth
45
+ 2023-02-26 08:58:06,096 32k INFO Saving model and optimizer state at iteration 13 to ./logs\32k\D_2000.pth
46
+ 2023-02-26 09:00:13,474 32k INFO ====> Epoch: 13
47
+ 2023-02-26 09:01:22,067 32k INFO Train Epoch: 14 [33%]
48
+ 2023-02-26 09:01:22,067 32k INFO [2.7683796882629395, 2.2413697242736816, 7.833517551422119, 17.857086181640625, 1.1680740118026733, 2200, 9.983762181915804e-05]
49
+ 2023-02-26 09:02:56,328 32k INFO ====> Epoch: 14
50
+ 2023-02-26 09:04:34,974 32k INFO Train Epoch: 15 [55%]
51
+ 2023-02-26 09:04:34,974 32k INFO [2.3525094985961914, 2.3344459533691406, 12.257208824157715, 19.89115333557129, 0.9875838160514832, 2400, 9.982514211643064e-05]
52
+ 2023-02-26 09:05:39,025 32k INFO ====> Epoch: 15
53
+ 2023-02-26 09:07:47,693 32k INFO Train Epoch: 16 [76%]
54
+ 2023-02-26 09:07:47,693 32k INFO [2.6893608570098877, 2.344364881515503, 10.605229377746582, 19.090167999267578, 0.780343234539032, 2600, 9.981266397366609e-05]
55
+ 2023-02-26 09:08:21,682 32k INFO ====> Epoch: 16
56
+ 2023-02-26 09:11:00,144 32k INFO Train Epoch: 17 [97%]
57
+ 2023-02-26 09:11:00,144 32k INFO [2.524367570877075, 2.1253821849823, 10.096750259399414, 19.844287872314453, 1.2072694301605225, 2800, 9.980018739066937e-05]
58
+ 2023-02-26 09:11:04,092 32k INFO ====> Epoch: 17
59
+ 2023-02-26 09:13:46,167 32k INFO ====> Epoch: 18
60
+ 2023-02-26 09:14:33,368 32k INFO Train Epoch: 19 [18%]
61
+ 2023-02-26 09:14:33,368 32k INFO [2.5664799213409424, 2.009101629257202, 15.13039493560791, 22.3335018157959, 0.9496199488639832, 3000, 9.977523890319963e-05]
62
+ 2023-02-26 09:14:37,794 32k INFO Saving model and optimizer state at iteration 19 to ./logs\32k\G_3000.pth
63
+ 2023-02-26 09:14:55,196 32k INFO Saving model and optimizer state at iteration 19 to ./logs\32k\D_3000.pth
64
+ 2023-02-26 09:16:54,310 32k INFO ====> Epoch: 19
65
+ 2023-02-26 09:18:11,582 32k INFO Train Epoch: 20 [39%]
66
+ 2023-02-26 09:18:11,582 32k INFO [2.4653780460357666, 2.1782279014587402, 10.213360786437988, 17.996280670166016, 1.3770432472229004, 3200, 9.976276699833672e-05]
67
+ 2023-02-26 09:19:37,287 32k INFO ====> Epoch: 20
68
+ 2023-02-26 09:21:24,337 32k INFO Train Epoch: 21 [61%]
69
+ 2023-02-26 09:21:24,337 32k INFO [2.3505122661590576, 2.285341262817383, 12.031506538391113, 21.24757957458496, 0.6532730460166931, 3400, 9.975029665246193e-05]
70
+ 2023-02-26 09:22:19,751 32k INFO ====> Epoch: 21
71
+ 2023-02-26 09:24:36,978 32k INFO Train Epoch: 22 [82%]
72
+ 2023-02-26 09:24:36,979 32k INFO [2.295872688293457, 2.435206413269043, 13.174171447753906, 20.36532974243164, 1.0691040754318237, 3600, 9.973782786538036e-05]
73
+ 2023-02-26 09:25:02,486 32k INFO ====> Epoch: 22
74
+ 2023-02-26 09:27:44,742 32k INFO ====> Epoch: 23
75
+ 2023-02-26 09:28:10,473 32k INFO Train Epoch: 24 [3%]
76
+ 2023-02-26 09:28:10,474 32k INFO [2.4777655601501465, 2.438159942626953, 10.104680061340332, 20.890073776245117, 0.7508599758148193, 3800, 9.971289496681757e-05]
77
+ 2023-02-26 09:30:27,585 32k INFO ====> Epoch: 24
78
+ 2023-02-26 09:31:23,330 32k INFO Train Epoch: 25 [24%]
79
+ 2023-02-26 09:31:23,331 32k INFO [2.547969341278076, 2.103365421295166, 8.601760864257812, 17.903499603271484, 1.1798760890960693, 4000, 9.970043085494672e-05]
80
+ 2023-02-26 09:31:27,787 32k INFO Saving model and optimizer state at iteration 25 to ./logs\32k\G_4000.pth
81
+ 2023-02-26 09:31:42,830 32k INFO Saving model and optimizer state at iteration 25 to ./logs\32k\D_4000.pth
82
+ 2023-02-26 09:33:33,295 32k INFO ====> Epoch: 25
83
+ 2023-02-26 09:34:58,902 32k INFO Train Epoch: 26 [45%]
84
+ 2023-02-26 09:34:58,903 32k INFO [2.509209632873535, 2.2358999252319336, 11.01781940460205, 20.53192710876465, 1.0602046251296997, 4200, 9.968796830108985e-05]
85
+ 2023-02-26 09:36:15,852 32k INFO ====> Epoch: 26
86
+ 2023-02-26 09:38:11,412 32k INFO Train Epoch: 27 [67%]
87
+ 2023-02-26 09:38:11,412 32k INFO [2.666337490081787, 2.08825421333313, 9.963523864746094, 18.926380157470703, 1.1029638051986694, 4400, 9.967550730505221e-05]
88
+ 2023-02-26 09:38:58,150 32k INFO ====> Epoch: 27
89
+ 2023-02-26 09:41:23,952 32k INFO Train Epoch: 28 [88%]
90
+ 2023-02-26 09:41:23,953 32k INFO [2.410893678665161, 2.41351318359375, 11.717813491821289, 20.35811996459961, 0.8942963480949402, 4600, 9.966304786663908e-05]
91
+ 2023-02-26 09:41:40,917 32k INFO ====> Epoch: 28
92
+ 2023-02-26 09:44:23,546 32k INFO ====> Epoch: 29
93
+ 2023-02-26 09:44:57,880 32k INFO Train Epoch: 30 [9%]
94
+ 2023-02-26 09:44:57,880 32k INFO [2.4157967567443848, 2.2903125286102295, 10.10302448272705, 17.93283462524414, 0.8192426562309265, 4800, 9.963813366190753e-05]
95
+ 2023-02-26 09:47:06,129 32k INFO ====> Epoch: 30
96
+ 2023-02-26 09:48:10,482 32k INFO Train Epoch: 31 [30%]
97
+ 2023-02-26 09:48:10,483 32k INFO [2.456360101699829, 2.292372465133667, 11.75523567199707, 22.287111282348633, 1.0126503705978394, 5000, 9.962567889519979e-05]
98
+ 2023-02-26 09:48:14,931 32k INFO Saving model and optimizer state at iteration 31 to ./logs\32k\G_5000.pth
99
+ 2023-02-26 09:48:33,569 32k INFO Saving model and optimizer state at iteration 31 to ./logs\32k\D_5000.pth
100
+ 2023-02-26 09:50:15,366 32k INFO ====> Epoch: 31
101
+ 2023-02-26 09:51:49,619 32k INFO Train Epoch: 32 [52%]
102
+ 2023-02-26 09:51:49,619 32k INFO [2.4675090312957764, 2.139700412750244, 10.635783195495605, 18.904586791992188, 1.133800745010376, 5200, 9.961322568533789e-05]
103
+ 2023-02-26 09:52:57,795 32k INFO ====> Epoch: 32
104
+ 2023-02-26 09:55:02,298 32k INFO Train Epoch: 33 [73%]
105
+ 2023-02-26 09:55:02,299 32k INFO [2.346632480621338, 2.284864902496338, 10.70773696899414, 16.828781127929688, 0.8554806113243103, 5400, 9.960077403212722e-05]
106
+ 2023-02-26 09:55:40,602 32k INFO ====> Epoch: 33
107
+ 2023-02-26 09:58:15,316 32k INFO Train Epoch: 34 [94%]
108
+ 2023-02-26 09:58:15,316 32k INFO [2.448796510696411, 2.3974175453186035, 9.994577407836914, 20.724594116210938, 0.7800424098968506, 5600, 9.95883239353732e-05]
109
+ 2023-02-26 09:58:23,670 32k INFO ====> Epoch: 34
110
+ 2023-02-26 10:01:06,450 32k INFO ====> Epoch: 35
111
+ 2023-02-26 10:01:49,384 32k INFO Train Epoch: 36 [15%]
112
+ 2023-02-26 10:01:49,384 32k INFO [2.4834535121917725, 2.215736150741577, 10.72994613647461, 20.1037654876709, 0.9641714096069336, 5800, 9.956342841045691e-05]
113
+ 2023-02-26 10:03:49,561 32k INFO ====> Epoch: 36
114
+ 2023-02-26 10:05:02,545 32k INFO Train Epoch: 37 [36%]
115
+ 2023-02-26 10:05:02,546 32k INFO [2.2858619689941406, 2.297046422958374, 13.459763526916504, 20.353960037231445, 0.8896880745887756, 6000, 9.95509829819056e-05]
116
+ 2023-02-26 10:05:07,000 32k INFO Saving model and optimizer state at iteration 37 to ./logs\32k\G_6000.pth
117
+ 2023-02-26 10:05:21,749 32k INFO Saving model and optimizer state at iteration 37 to ./logs\32k\D_6000.pth
118
+ 2023-02-26 10:06:55,120 32k INFO ====> Epoch: 37
119
+ 2023-02-26 10:08:38,237 32k INFO Train Epoch: 38 [58%]
120
+ 2023-02-26 10:08:38,237 32k INFO [2.468832015991211, 2.1250123977661133, 9.363271713256836, 16.582752227783203, 1.190895915031433, 6200, 9.953853910903285e-05]
121
+ 2023-02-26 10:09:38,270 32k INFO ====> Epoch: 38
122
+ 2023-02-26 10:11:51,557 32k INFO Train Epoch: 39 [79%]
123
+ 2023-02-26 10:11:51,557 32k INFO [2.4695611000061035, 2.5815343856811523, 10.841808319091797, 20.596200942993164, 1.0436564683914185, 6400, 9.952609679164422e-05]
124
+ 2023-02-26 10:12:21,372 32k INFO ====> Epoch: 39
125
+ 2023-02-26 10:15:03,991 32k INFO ====> Epoch: 40
126
+ 2023-02-26 10:15:25,470 32k INFO Train Epoch: 41 [0%]
127
+ 2023-02-26 10:15:25,470 32k INFO [2.4647908210754395, 2.4343149662017822, 9.655537605285645, 17.267290115356445, 1.031686782836914, 6600, 9.950121682254156e-05]
128
+ 2023-02-26 10:17:46,940 32k INFO ====> Epoch: 41
129
+ 2023-02-26 10:18:38,447 32k INFO Train Epoch: 42 [21%]
130
+ 2023-02-26 10:18:38,447 32k INFO [2.5329630374908447, 2.130812644958496, 11.61275863647461, 16.99370574951172, 0.6277658939361572, 6800, 9.948877917043875e-05]
131
+ 2023-02-26 10:20:29,739 32k INFO ====> Epoch: 42
132
+ 2023-02-26 10:21:51,358 32k INFO Train Epoch: 43 [42%]
133
+ 2023-02-26 10:21:51,358 32k INFO [2.4816935062408447, 2.166879415512085, 8.776912689208984, 14.728988647460938, 0.7365485429763794, 7000, 9.947634307304244e-05]
134
+ 2023-02-26 10:21:55,799 32k INFO Saving model and optimizer state at iteration 43 to ./logs\32k\G_7000.pth
135
+ 2023-02-26 10:22:13,584 32k INFO Saving model and optimizer state at iteration 43 to ./logs\32k\D_7000.pth
136
+ 2023-02-26 10:23:38,459 32k INFO ====> Epoch: 43
137
+ 2023-02-26 10:25:30,320 32k INFO Train Epoch: 44 [64%]
138
+ 2023-02-26 10:25:30,321 32k INFO [2.4604673385620117, 2.3776729106903076, 9.516617774963379, 18.740528106689453, 0.7489030361175537, 7200, 9.94639085301583e-05]
139
+ 2023-02-26 10:26:21,626 32k INFO ====> Epoch: 44
140
+ 2023-02-26 10:28:43,231 32k INFO Train Epoch: 45 [85%]
141
+ 2023-02-26 10:28:43,231 32k INFO [2.274491786956787, 2.4330806732177734, 11.296477317810059, 20.537822723388672, 0.806971549987793, 7400, 9.945147554159202e-05]
142
+ 2023-02-26 10:29:04,414 32k INFO ====> Epoch: 45
143
+ 2023-02-26 10:31:47,148 32k INFO ====> Epoch: 46
144
+ 2023-02-26 10:32:17,260 32k INFO Train Epoch: 47 [6%]
145
+ 2023-02-26 10:32:17,260 32k INFO [2.7507975101470947, 2.126884698867798, 6.915186405181885, 13.514629364013672, 1.2603626251220703, 7600, 9.942661422663591e-05]
146
+ 2023-02-26 10:34:30,480 32k INFO ====> Epoch: 47
147
+ 2023-02-26 10:35:30,556 32k INFO Train Epoch: 48 [27%]
148
+ 2023-02-26 10:35:30,556 32k INFO [2.0968923568725586, 2.708631992340088, 13.675420761108398, 20.606611251831055, 0.8771938681602478, 7800, 9.941418589985758e-05]
149
+ 2023-02-26 10:37:13,522 32k INFO ====> Epoch: 48
150
+ 2023-02-26 10:38:43,695 32k INFO Train Epoch: 49 [48%]
151
+ 2023-02-26 10:38:43,695 32k INFO [2.3763391971588135, 2.3627359867095947, 9.687402725219727, 16.78105354309082, 1.0562866926193237, 8000, 9.940175912662009e-05]
152
+ 2023-02-26 10:38:48,175 32k INFO Saving model and optimizer state at iteration 49 to ./logs\32k\G_8000.pth
153
+ 2023-02-26 10:39:01,774 32k INFO Saving model and optimizer state at iteration 49 to ./logs\32k\D_8000.pth
154
+ 2023-02-26 10:40:17,615 32k INFO ====> Epoch: 49
155
+ 2023-02-26 10:42:17,724 32k INFO Train Epoch: 50 [70%]
156
+ 2023-02-26 10:42:17,725 32k INFO [2.344200611114502, 2.1781373023986816, 11.780096054077148, 18.40574073791504, 1.086164116859436, 8200, 9.938933390672926e-05]
157
+ 2023-02-26 10:43:00,363 32k INFO ====> Epoch: 50
158
+ 2023-02-26 10:45:30,408 32k INFO Train Epoch: 51 [91%]
159
+ 2023-02-26 10:45:30,408 32k INFO [2.4205174446105957, 2.7213199138641357, 7.73087215423584, 15.820646286010742, 0.7065295577049255, 8400, 9.937691023999092e-05]
160
+ 2023-02-26 10:45:43,007 32k INFO ====> Epoch: 51
161
+ 2023-02-26 10:48:26,056 32k INFO ====> Epoch: 52
162
+ 2023-02-26 10:49:04,736 32k INFO Train Epoch: 53 [12%]
163
+ 2023-02-26 10:49:04,737 32k INFO [2.5237555503845215, 2.2003116607666016, 8.608362197875977, 19.112735748291016, 0.7628692984580994, 8600, 9.935206756519513e-05]
164
+ 2023-02-26 10:51:09,259 32k INFO ====> Epoch: 53
165
+ 2023-02-26 10:52:17,858 32k INFO Train Epoch: 54 [33%]
166
+ 2023-02-26 10:52:17,858 32k INFO [2.260878562927246, 2.6713953018188477, 8.389069557189941, 18.49284553527832, 0.44008728861808777, 8800, 9.933964855674948e-05]
167
+ 2023-02-26 10:53:51,921 32k INFO ====> Epoch: 54
168
+ 2023-02-26 10:55:30,509 32k INFO Train Epoch: 55 [55%]
169
+ 2023-02-26 10:55:30,509 32k INFO [2.3566603660583496, 2.293156385421753, 10.89488410949707, 21.621196746826172, 0.8128052949905396, 9000, 9.932723110067987e-05]
170
+ 2023-02-26 10:55:34,991 32k INFO Saving model and optimizer state at iteration 55 to ./logs\32k\G_9000.pth
171
+ 2023-02-26 10:55:51,219 32k INFO Saving model and optimizer state at iteration 55 to ./logs\32k\D_9000.pth
172
+ 2023-02-26 10:56:58,333 32k INFO ====> Epoch: 55
173
+ 2023-02-26 10:59:07,126 32k INFO Train Epoch: 56 [76%]
174
+ 2023-02-26 10:59:07,126 32k INFO [2.519491195678711, 2.2075395584106445, 8.736771583557129, 16.377058029174805, 1.0564581155776978, 9200, 9.931481519679228e-05]
175
+ 2023-02-26 10:59:41,285 32k INFO ====> Epoch: 56
176
+ 2023-02-26 11:02:20,544 32k INFO Train Epoch: 57 [97%]
177
+ 2023-02-26 11:02:20,544 32k INFO [2.3525781631469727, 2.154360771179199, 6.794205665588379, 13.52789306640625, 0.7326827645301819, 9400, 9.930240084489267e-05]
178
+ 2023-02-26 11:02:24,511 32k INFO ====> Epoch: 57
179
+ 2023-02-26 11:05:07,452 32k INFO ====> Epoch: 58
180
+ 2023-02-26 11:05:54,673 32k INFO Train Epoch: 59 [18%]
181
+ 2023-02-26 11:05:54,673 32k INFO [2.4005348682403564, 2.302424907684326, 13.667900085449219, 19.535924911499023, 1.1588077545166016, 9600, 9.927757679628145e-05]
182
+ 2023-02-26 11:07:50,410 32k INFO ====> Epoch: 59
183
+ 2023-02-26 11:09:07,843 32k INFO Train Epoch: 60 [39%]
184
+ 2023-02-26 11:09:07,843 32k INFO [2.6450674533843994, 2.2238292694091797, 9.370787620544434, 17.595104217529297, 1.1768231391906738, 9800, 9.926516709918191e-05]
185
+ 2023-02-26 11:10:33,485 32k INFO ====> Epoch: 60
186
+ 2023-02-26 11:12:21,000 32k INFO Train Epoch: 61 [61%]
187
+ 2023-02-26 11:12:21,000 32k INFO [2.4990792274475098, 2.169142961502075, 10.941349983215332, 20.05807876586914, 0.4903837740421295, 10000, 9.92527589532945e-05]
188
+ 2023-02-26 11:12:25,586 32k INFO Saving model and optimizer state at iteration 61 to ./logs\32k\G_10000.pth
189
+ 2023-02-26 11:12:44,877 32k INFO Saving model and optimizer state at iteration 61 to ./logs\32k\D_10000.pth
190
+ 2023-02-26 11:13:43,760 32k INFO ====> Epoch: 61
191
+ 2023-02-26 11:16:01,215 32k INFO Train Epoch: 62 [82%]
192
+ 2023-02-26 11:16:01,215 32k INFO [2.5079026222229004, 2.3164236545562744, 11.353753089904785, 17.586023330688477, 0.8176776170730591, 10200, 9.924035235842533e-05]
193
+ 2023-02-26 11:16:26,741 32k INFO ====> Epoch: 62
194
+ 2023-02-26 11:19:09,446 32k INFO ====> Epoch: 63
195
+ 2023-02-26 11:19:35,364 32k INFO Train Epoch: 64 [3%]
196
+ 2023-02-26 11:19:35,364 32k INFO [2.4500768184661865, 2.264669418334961, 10.30024528503418, 20.142595291137695, 0.8843652606010437, 10400, 9.921554382096622e-05]
197
+ 2023-02-26 11:21:51,912 32k INFO ====> Epoch: 64
198
+ 2023-02-26 11:22:47,692 32k INFO Train Epoch: 65 [24%]
199
+ 2023-02-26 11:22:47,693 32k INFO [2.3112714290618896, 2.271970510482788, 11.571227073669434, 20.092790603637695, 1.156628966331482, 10600, 9.92031418779886e-05]
200
+ 2023-02-26 11:24:34,238 32k INFO ====> Epoch: 65
201
+ 2023-02-26 11:26:00,281 32k INFO Train Epoch: 66 [45%]
202
+ 2023-02-26 11:26:00,281 32k INFO [2.2577104568481445, 2.4161815643310547, 13.695369720458984, 20.6165714263916, 0.7836306095123291, 10800, 9.919074148525384e-05]
203
+ 2023-02-26 11:27:17,333 32k INFO ====> Epoch: 66
204
+ 2023-02-26 11:29:13,378 32k INFO Train Epoch: 67 [67%]
205
+ 2023-02-26 11:29:13,379 32k INFO [2.5594491958618164, 2.362276554107666, 8.985106468200684, 17.9657039642334, 0.5958060026168823, 11000, 9.917834264256819e-05]
206
+ 2023-02-26 11:29:17,966 32k INFO Saving model and optimizer state at iteration 67 to ./logs\32k\G_11000.pth
207
+ 2023-02-26 11:29:37,399 32k INFO Saving model and optimizer state at iteration 67 to ./logs\32k\D_11000.pth
208
+ 2023-02-26 11:30:27,649 32k INFO ====> Epoch: 67
209
+ 2023-02-26 11:32:53,499 32k INFO Train Epoch: 68 [88%]
210
+ 2023-02-26 11:32:53,500 32k INFO [2.488551378250122, 2.086569309234619, 7.120002746582031, 15.225802421569824, 1.0212639570236206, 11200, 9.916594534973787e-05]
211
+ 2023-02-26 11:33:10,300 32k INFO ====> Epoch: 68
212
+ 2023-02-26 11:35:52,703 32k INFO ====> Epoch: 69
213
+ 2023-02-26 11:36:27,149 32k INFO Train Epoch: 70 [9%]
214
+ 2023-02-26 11:36:27,150 32k INFO [2.421487808227539, 2.171816825866699, 10.875814437866211, 18.283443450927734, 0.7764310836791992, 11400, 9.914115541286833e-05]
215
+ 2023-02-26 11:38:35,072 32k INFO ====> Epoch: 70
216
+ 2023-02-26 11:39:39,324 32k INFO Train Epoch: 71 [30%]
217
+ 2023-02-26 11:39:39,325 32k INFO [2.53536319732666, 2.252779722213745, 8.362150192260742, 18.167621612548828, 0.8004449009895325, 11600, 9.912876276844171e-05]
218
+ 2023-02-26 11:41:17,298 32k INFO ====> Epoch: 71
219
+ 2023-02-26 11:42:51,836 32k INFO Train Epoch: 72 [52%]
220
+ 2023-02-26 11:42:51,836 32k INFO [2.354383945465088, 2.426551342010498, 10.844928741455078, 19.34088897705078, 0.8941074013710022, 11800, 9.911637167309565e-05]
221
+ 2023-02-26 11:43:59,941 32k INFO ====> Epoch: 72
222
+ 2023-02-26 11:46:04,250 32k INFO Train Epoch: 73 [73%]
223
+ 2023-02-26 11:46:04,250 32k INFO [2.426401376724243, 2.322526454925537, 12.05467700958252, 20.506017684936523, 0.779582142829895, 12000, 9.910398212663652e-05]
224
+ 2023-02-26 11:46:08,787 32k INFO Saving model and optimizer state at iteration 73 to ./logs\32k\G_12000.pth
225
+ 2023-02-26 11:46:29,376 32k INFO Saving model and optimizer state at iteration 73 to ./logs\32k\D_12000.pth
226
+ 2023-02-26 11:47:11,053 32k INFO ====> Epoch: 73
227
+ 2023-02-26 11:49:45,489 32k INFO Train Epoch: 74 [94%]
228
+ 2023-02-26 11:49:45,490 32k INFO [2.5343637466430664, 2.27656888961792, 7.482431411743164, 15.527031898498535, 1.1111173629760742, 12200, 9.909159412887068e-05]
229
+ 2023-02-26 11:49:53,749 32k INFO ====> Epoch: 74
230
+ 2023-02-26 11:52:35,964 32k INFO ====> Epoch: 75
231
+ 2023-02-26 11:53:19,010 32k INFO Train Epoch: 76 [15%]
232
+ 2023-02-26 11:53:19,011 32k INFO [2.406834363937378, 2.1646063327789307, 11.008843421936035, 19.259048461914062, 0.9326943159103394, 12400, 9.906682277864462e-05]
233
+ 2023-02-26 11:55:19,408 32k INFO ====> Epoch: 76
234
+ 2023-02-26 11:56:32,760 32k INFO Train Epoch: 77 [36%]
235
+ 2023-02-26 11:56:32,761 32k INFO [2.4522836208343506, 2.1643474102020264, 8.926955223083496, 16.831026077270508, 0.900156557559967, 12600, 9.905443942579728e-05]
236
+ 2023-02-26 11:58:02,659 32k INFO ====> Epoch: 77
237
+ 2023-02-26 11:59:45,816 32k INFO Train Epoch: 78 [58%]
238
+ 2023-02-26 11:59:45,816 32k INFO [2.5271518230438232, 1.9529402256011963, 10.654559135437012, 18.19529914855957, 0.8628219366073608, 12800, 9.904205762086905e-05]
239
+ 2023-02-26 12:00:45,628 32k INFO ====> Epoch: 78
240
+ 2023-02-26 12:02:58,997 32k INFO Train Epoch: 79 [79%]
241
+ 2023-02-26 12:02:58,997 32k INFO [2.6350643634796143, 2.541313648223877, 8.186237335205078, 15.443192481994629, 0.9821919202804565, 13000, 9.902967736366644e-05]
242
+ 2023-02-26 12:03:03,614 32k INFO Saving model and optimizer state at iteration 79 to ./logs\32k\G_13000.pth
243
+ 2023-02-26 12:03:21,550 32k INFO Saving model and optimizer state at iteration 79 to ./logs\32k\D_13000.pth
244
+ 2023-02-26 12:03:54,598 32k INFO ====> Epoch: 79
245
+ 2023-02-26 12:06:37,363 32k INFO ====> Epoch: 80
246
+ 2023-02-26 12:06:58,998 32k INFO Train Epoch: 81 [0%]
247
+ 2023-02-26 12:06:58,998 32k INFO [2.4078915119171143, 2.3199801445007324, 11.23653793334961, 20.276243209838867, 1.1815028190612793, 13200, 9.900492149166423e-05]
248
+ 2023-02-26 12:09:20,572 32k INFO ====> Epoch: 81
249
+ 2023-02-26 12:10:12,077 32k INFO Train Epoch: 82 [21%]
250
+ 2023-02-26 12:10:12,077 32k INFO [2.6164400577545166, 2.0303988456726074, 9.276334762573242, 16.739444732666016, 0.6553909778594971, 13400, 9.899254587647776e-05]
251
+ 2023-02-26 12:12:03,130 32k INFO ====> Epoch: 82
252
+ 2023-02-26 12:13:24,471 32k INFO Train Epoch: 83 [42%]
253
+ 2023-02-26 12:13:24,471 32k INFO [2.5159566402435303, 2.14263916015625, 9.134819984436035, 15.940903663635254, 0.8988858461380005, 13600, 9.89801718082432e-05]
254
+ 2023-02-26 12:14:45,738 32k INFO ====> Epoch: 83
255
+ 2023-02-26 12:16:37,387 32k INFO Train Epoch: 84 [64%]
256
+ 2023-02-26 12:16:37,387 32k INFO [2.477391242980957, 2.244145393371582, 10.206537246704102, 18.64247703552246, 1.085805058479309, 13800, 9.896779928676716e-05]
257
+ 2023-02-26 12:17:28,668 32k INFO ====> Epoch: 84
258
+ 2023-02-26 12:19:51,028 32k INFO Train Epoch: 85 [85%]
259
+ 2023-02-26 12:19:51,028 32k INFO [2.391843557357788, 2.3440544605255127, 9.451476097106934, 18.754911422729492, 0.7714139819145203, 14000, 9.895542831185631e-05]
260
+ 2023-02-26 12:19:55,663 32k INFO Saving model and optimizer state at iteration 85 to ./logs\32k\G_14000.pth
261
+ 2023-02-26 12:20:14,439 32k INFO Saving model and optimizer state at iteration 85 to ./logs\32k\D_14000.pth
262
+ 2023-02-26 12:20:38,984 32k INFO ====> Epoch: 85
263
+ 2023-02-26 12:23:21,881 32k INFO ====> Epoch: 86
264
+ 2023-02-26 12:23:52,036 32k INFO Train Epoch: 87 [6%]
265
+ 2023-02-26 12:23:52,036 32k INFO [2.5880343914031982, 2.229883909225464, 9.676251411437988, 17.177593231201172, 1.0161173343658447, 14200, 9.89306910009569e-05]
266
+ 2023-02-26 12:26:04,685 32k INFO ====> Epoch: 87
267
+ 2023-02-26 12:27:04,860 32k INFO Train Epoch: 88 [27%]
268
+ 2023-02-26 12:27:04,861 32k INFO [2.4286739826202393, 2.340498685836792, 8.028197288513184, 17.648345947265625, 0.772789716720581, 14400, 9.891832466458178e-05]
269
+ 2023-02-26 12:28:47,879 32k INFO ====> Epoch: 88
270
+ 2023-02-26 12:30:17,930 32k INFO Train Epoch: 89 [48%]
271
+ 2023-02-26 12:30:17,930 32k INFO [2.43446683883667, 2.1952078342437744, 9.100362777709961, 15.110132217407227, 0.9286537170410156, 14600, 9.89059598739987e-05]
272
+ 2023-02-26 12:31:30,914 32k INFO ====> Epoch: 89
273
+ 2023-02-26 12:33:31,510 32k INFO Train Epoch: 90 [70%]
274
+ 2023-02-26 12:33:31,510 32k INFO [2.373197078704834, 2.1595218181610107, 11.149375915527344, 18.03667640686035, 1.4922605752944946, 14800, 9.889359662901445e-05]
275
+ 2023-02-26 12:34:14,202 32k INFO ====> Epoch: 90
276
+ 2023-02-26 12:36:44,894 32k INFO Train Epoch: 91 [91%]
277
+ 2023-02-26 12:36:44,894 32k INFO [2.510439872741699, 2.3874175548553467, 10.754826545715332, 17.15918731689453, 0.9694947004318237, 15000, 9.888123492943583e-05]
278
+ 2023-02-26 12:36:49,496 32k INFO Saving model and optimizer state at iteration 91 to ./logs\32k\G_15000.pth
279
+ 2023-02-26 12:37:05,821 32k INFO Saving model and optimizer state at iteration 91 to ./logs\32k\D_15000.pth
280
+ 2023-02-26 12:37:21,861 32k INFO ====> Epoch: 91
281
+ 2023-02-26 12:40:04,580 32k INFO ====> Epoch: 92
282
+ 2023-02-26 12:40:43,339 32k INFO Train Epoch: 93 [12%]
283
+ 2023-02-26 12:40:43,339 32k INFO [2.642388105392456, 2.06752872467041, 8.832306861877441, 16.4188232421875, 0.5744019150733948, 15200, 9.885651616572276e-05]
284
+ 2023-02-26 12:42:47,745 32k INFO ====> Epoch: 93
285
+ 2023-02-26 12:43:56,624 32k INFO Train Epoch: 94 [33%]
286
+ 2023-02-26 12:43:56,624 32k INFO [2.478945255279541, 2.3418726921081543, 8.750991821289062, 18.5238094329834, 0.5728946924209595, 15400, 9.884415910120204e-05]
287
+ 2023-02-26 12:45:31,004 32k INFO ====> Epoch: 94
288
+ 2023-02-26 12:47:10,134 32k INFO Train Epoch: 95 [55%]
289
+ 2023-02-26 12:47:10,134 32k INFO [2.3579206466674805, 2.31203031539917, 12.870414733886719, 18.046958923339844, 0.9995560050010681, 15600, 9.883180358131438e-05]
290
+ 2023-02-26 12:48:14,547 32k INFO ====> Epoch: 95
291
+ 2023-02-26 12:50:23,600 32k INFO Train Epoch: 96 [76%]
292
+ 2023-02-26 12:50:23,601 32k INFO [2.4442484378814697, 2.2578535079956055, 13.333099365234375, 20.16985511779785, 0.7437376379966736, 15800, 9.881944960586671e-05]
293
+ 2023-02-26 12:50:57,662 32k INFO ====> Epoch: 96
294
+ 2023-02-26 12:53:36,723 32k INFO Train Epoch: 97 [97%]
295
+ 2023-02-26 12:53:36,723 32k INFO [2.3909411430358887, 2.307910919189453, 10.894923210144043, 17.3150691986084, 0.68175208568573, 16000, 9.880709717466598e-05]
296
+ 2023-02-26 12:53:41,364 32k INFO Saving model and optimizer state at iteration 97 to ./logs\32k\G_16000.pth
297
+ 2023-02-26 12:53:59,201 32k INFO Saving model and optimizer state at iteration 97 to ./logs\32k\D_16000.pth
298
+ 2023-02-26 12:54:06,830 32k INFO ====> Epoch: 97
299
+ 2023-02-26 12:56:49,437 32k INFO ====> Epoch: 98
300
+ 2023-02-26 12:57:36,944 32k INFO Train Epoch: 99 [18%]
301
+ 2023-02-26 12:57:36,944 32k INFO [2.634997606277466, 2.167447090148926, 9.929041862487793, 16.28448486328125, 0.8011267781257629, 16200, 9.87823969442332e-05]
302
+ 2023-02-26 12:59:33,007 32k INFO ====> Epoch: 99
303
+ 2023-02-26 13:00:50,543 32k INFO Train Epoch: 100 [39%]
304
+ 2023-02-26 13:00:50,543 32k INFO [2.3184642791748047, 2.705042839050293, 16.255216598510742, 21.745960235595703, 0.44556474685668945, 16400, 9.877004914461517e-05]
305
+ 2023-02-26 13:02:16,391 32k INFO ====> Epoch: 100
306
+ 2023-02-26 13:04:03,758 32k INFO Train Epoch: 101 [61%]
307
+ 2023-02-26 13:04:03,759 32k INFO [2.477332830429077, 2.2263150215148926, 9.293753623962402, 14.126523971557617, 1.1971876621246338, 16600, 9.875770288847208e-05]
308
+ 2023-02-26 13:04:59,212 32k INFO ====> Epoch: 101
309
+ 2023-02-26 13:07:16,685 32k INFO Train Epoch: 102 [82%]
310
+ 2023-02-26 13:07:16,685 32k INFO [2.2978484630584717, 2.2822859287261963, 16.321632385253906, 20.7629337310791, 1.3268964290618896, 16800, 9.874535817561101e-05]
311
+ 2023-02-26 13:07:42,165 32k INFO ====> Epoch: 102
312
+ 2023-02-26 13:10:24,749 32k INFO ====> Epoch: 103
313
+ 2023-02-26 13:10:50,748 32k INFO Train Epoch: 104 [3%]
314
+ 2023-02-26 13:10:50,748 32k INFO [2.371419906616211, 2.4400792121887207, 12.4196138381958, 21.362497329711914, 0.7677367925643921, 17000, 9.872067337896332e-05]
315
+ 2023-02-26 13:10:55,199 32k INFO Saving model and optimizer state at iteration 104 to ./logs\32k\G_17000.pth
316
+ 2023-02-26 13:11:13,185 32k INFO Saving model and optimizer state at iteration 104 to ./logs\32k\D_17000.pth
317
+ 2023-02-26 13:13:33,715 32k INFO ====> Epoch: 104
318
+ 2023-02-26 13:14:29,696 32k INFO Train Epoch: 105 [24%]
319
+ 2023-02-26 13:14:29,696 32k INFO [2.47296142578125, 2.4107069969177246, 12.652009963989258, 18.860116958618164, 0.5747465491294861, 17200, 9.870833329479095e-05]
320
+ 2023-02-26 13:16:16,869 32k INFO ====> Epoch: 105
321
+ 2023-02-26 13:17:42,916 32k INFO Train Epoch: 106 [45%]
322
+ 2023-02-26 13:17:42,916 32k INFO [2.304741859436035, 2.252957344055176, 9.988290786743164, 17.014446258544922, 1.0552763938903809, 17400, 9.86959947531291e-05]
323
+ 2023-02-26 13:18:59,953 32k INFO ====> Epoch: 106
324
+ 2023-02-26 13:20:55,999 32k INFO Train Epoch: 107 [67%]
325
+ 2023-02-26 13:20:56,000 32k INFO [2.284235715866089, 2.448091506958008, 12.869209289550781, 20.055776596069336, 0.9019550681114197, 17600, 9.868365775378495e-05]
326
+ 2023-02-26 13:21:42,926 32k INFO ====> Epoch: 107
327
+ 2023-02-26 13:24:09,043 32k INFO Train Epoch: 108 [88%]
328
+ 2023-02-26 13:24:09,043 32k INFO [2.475769281387329, 2.1433467864990234, 11.07569694519043, 18.09524917602539, 0.5900028347969055, 17800, 9.867132229656573e-05]
329
+ 2023-02-26 13:24:25,935 32k INFO ====> Epoch: 108
330
+ 2023-02-26 13:27:08,654 32k INFO ====> Epoch: 109
331
+ 2023-02-26 13:27:43,160 32k INFO Train Epoch: 110 [9%]
332
+ 2023-02-26 13:27:43,160 32k INFO [2.4269490242004395, 2.379713535308838, 11.117244720458984, 17.388996124267578, 1.439730167388916, 18000, 9.864665600773098e-05]
333
+ 2023-02-26 13:27:47,636 32k INFO Saving model and optimizer state at iteration 110 to ./logs\32k\G_18000.pth
334
+ 2023-02-26 13:28:04,718 32k INFO Saving model and optimizer state at iteration 110 to ./logs\32k\D_18000.pth
335
+ 2023-02-26 13:30:16,432 32k INFO ====> Epoch: 110
336
+ 2023-02-26 13:31:20,894 32k INFO Train Epoch: 111 [30%]
337
+ 2023-02-26 13:31:20,895 32k INFO [2.3756492137908936, 2.4075100421905518, 10.187261581420898, 18.855302810668945, 1.052577257156372, 18200, 9.863432517573002e-05]
338
+ 2023-02-26 13:32:59,526 32k INFO ====> Epoch: 111
339
+ 2023-02-26 13:34:34,102 32k INFO Train Epoch: 112 [52%]
340
+ 2023-02-26 13:34:34,102 32k INFO [2.535065174102783, 2.273414134979248, 10.668708801269531, 19.734350204467773, 0.91983962059021, 18400, 9.862199588508305e-05]
341
+ 2023-02-26 13:35:42,444 32k INFO ====> Epoch: 112
342
+ 2023-02-26 13:37:59,802 32k INFO Train Epoch: 113 [73%]
343
+ 2023-02-26 13:37:59,803 32k INFO [2.4501099586486816, 2.217410087585449, 11.971563339233398, 20.161376953125, 0.8216803073883057, 18600, 9.86096681355974e-05]
344
+ 2023-02-26 13:38:38,263 32k INFO ====> Epoch: 113
345
+ 2023-02-26 13:41:13,187 32k INFO Train Epoch: 114 [94%]
346
+ 2023-02-26 13:41:13,188 32k INFO [2.3596482276916504, 2.3115038871765137, 11.398310661315918, 20.521438598632812, 0.8498327136039734, 18800, 9.859734192708044e-05]
347
+ 2023-02-26 13:41:21,449 32k INFO ====> Epoch: 114
348
+ 2023-02-26 13:44:03,918 32k INFO ====> Epoch: 115
349
+ 2023-02-26 13:44:47,046 32k INFO Train Epoch: 116 [15%]
350
+ 2023-02-26 13:44:47,047 32k INFO [2.0652332305908203, 3.1315736770629883, 8.01175594329834, 14.535475730895996, 0.7651816606521606, 19000, 9.857269413218213e-05]
351
+ 2023-02-26 13:44:51,565 32k INFO Saving model and optimizer state at iteration 116 to ./logs\32k\G_19000.pth
352
+ 2023-02-26 13:45:10,729 32k INFO Saving model and optimizer state at iteration 116 to ./logs\32k\D_19000.pth
353
+ 2023-02-26 13:47:13,579 32k INFO ====> Epoch: 116
354
+ 2023-02-26 13:48:26,856 32k INFO Train Epoch: 117 [36%]
355
+ 2023-02-26 13:48:26,856 32k INFO [2.108147621154785, 2.9401679039001465, 10.276931762695312, 16.358720779418945, 1.1584646701812744, 19200, 9.85603725454156e-05]
356
+ 2023-02-26 13:49:56,624 32k INFO ====> Epoch: 117
357
+ 2023-02-26 13:51:40,008 32k INFO Train Epoch: 118 [58%]
358
+ 2023-02-26 13:51:40,009 32k INFO [2.5673112869262695, 2.034385919570923, 9.616512298583984, 16.208316802978516, 0.5776308178901672, 19400, 9.854805249884741e-05]
359
+ 2023-02-26 13:52:39,631 32k INFO ====> Epoch: 118
360
+ 2023-02-26 13:54:52,831 32k INFO Train Epoch: 119 [79%]
361
+ 2023-02-26 13:54:52,831 32k INFO [2.3758647441864014, 2.9925012588500977, 11.676872253417969, 16.84213638305664, 0.8426787257194519, 19600, 9.853573399228505e-05]
362
+ 2023-02-26 13:55:22,466 32k INFO ====> Epoch: 119
363
+ 2023-02-26 13:58:04,962 32k INFO ====> Epoch: 120
364
+ 2023-02-26 13:58:26,712 32k INFO Train Epoch: 121 [0%]
365
+ 2023-02-26 13:58:26,712 32k INFO [2.3770370483398438, 2.334702491760254, 11.059645652770996, 18.461627960205078, 0.8800027370452881, 19800, 9.851110159840781e-05]
366
+ 2023-02-26 14:00:47,783 32k INFO ====> Epoch: 121
367
+ 2023-02-26 14:01:39,407 32k INFO Train Epoch: 122 [21%]
368
+ 2023-02-26 14:01:39,408 32k INFO [2.527496814727783, 2.255011796951294, 11.402054786682129, 17.249414443969727, 0.4702906012535095, 20000, 9.8498787710708e-05]
369
+ 2023-02-26 14:01:43,901 32k INFO Saving model and optimizer state at iteration 122 to ./logs\32k\G_20000.pth
370
+ 2023-02-26 14:02:02,003 32k INFO Saving model and optimizer state at iteration 122 to ./logs\32k\D_20000.pth
371
+ 2023-02-26 14:03:56,814 32k INFO ====> Epoch: 122
372
+ 2023-02-26 14:05:18,678 32k INFO Train Epoch: 123 [42%]
373
+ 2023-02-26 14:05:18,678 32k INFO [2.512394428253174, 2.2709600925445557, 11.919401168823242, 17.652254104614258, 0.645085871219635, 20200, 9.848647536224416e-05]
374
+ 2023-02-26 14:06:40,243 32k INFO ====> Epoch: 123
375
+ 2023-02-26 14:08:31,954 32k INFO Train Epoch: 124 [64%]
376
+ 2023-02-26 14:08:31,955 32k INFO [2.643122434616089, 2.007068395614624, 6.659689426422119, 14.161267280578613, 0.931346595287323, 20400, 9.847416455282387e-05]
377
+ 2023-02-26 14:09:23,160 32k INFO ====> Epoch: 124
378
+ 2023-02-26 14:11:44,973 32k INFO Train Epoch: 125 [85%]
379
+ 2023-02-26 14:11:44,974 32k INFO [2.2812883853912354, 2.3466286659240723, 10.443126678466797, 17.098127365112305, 0.8298570513725281, 20600, 9.846185528225477e-05]
380
+ 2023-02-26 14:12:06,190 32k INFO ====> Epoch: 125
381
+ 2023-02-26 14:14:49,057 32k INFO ====> Epoch: 126
382
+ 2023-02-26 14:15:19,377 32k INFO Train Epoch: 127 [6%]
383
+ 2023-02-26 14:15:19,378 32k INFO [2.2862539291381836, 2.4085965156555176, 8.173857688903809, 13.033564567565918, 1.2490079402923584, 20800, 9.84372413569007e-05]
384
+ 2023-02-26 14:17:32,296 32k INFO ====> Epoch: 127
385
+ 2023-02-26 14:18:32,493 32k INFO Train Epoch: 128 [27%]
386
+ 2023-02-26 14:18:32,494 32k INFO [2.3605754375457764, 2.3576247692108154, 8.230725288391113, 16.209781646728516, 0.5764607191085815, 21000, 9.842493670173108e-05]
387
+ 2023-02-26 14:18:36,964 32k INFO Saving model and optimizer state at iteration 128 to ./logs\32k\G_21000.pth
388
+ 2023-02-26 14:18:56,081 32k INFO Saving model and optimizer state at iteration 128 to ./logs\32k\D_21000.pth
389
+ 2023-02-26 14:20:42,104 32k INFO ====> Epoch: 128
390
+ 2023-02-26 14:22:12,423 32k INFO Train Epoch: 129 [48%]
391
+ 2023-02-26 14:22:12,423 32k INFO [2.4556076526641846, 2.319934844970703, 10.994032859802246, 17.579025268554688, 0.9810572862625122, 21200, 9.841263358464336e-05]
392
+ 2023-02-26 14:23:25,204 32k INFO ====> Epoch: 129
393
+ 2023-02-26 14:25:25,430 32k INFO Train Epoch: 130 [70%]
394
+ 2023-02-26 14:25:25,430 32k INFO [2.3116416931152344, 2.5935959815979004, 12.18552303314209, 19.612600326538086, 0.09066768735647202, 21400, 9.840033200544528e-05]
395
+ 2023-02-26 14:26:07,943 32k INFO ====> Epoch: 130
396
+ 2023-02-26 14:28:38,322 32k INFO Train Epoch: 131 [91%]
397
+ 2023-02-26 14:28:38,322 32k INFO [2.417170763015747, 2.866428852081299, 10.004213333129883, 21.375431060791016, 0.7538483142852783, 21600, 9.838803196394459e-05]
398
+ 2023-02-26 14:28:50,906 32k INFO ====> Epoch: 131
399
+ 2023-02-26 14:31:33,676 32k INFO ====> Epoch: 132
400
+ 2023-02-26 14:32:12,439 32k INFO Train Epoch: 133 [12%]
401
+ 2023-02-26 14:32:12,439 32k INFO [2.4903366565704346, 2.381277322769165, 8.76326847076416, 17.031940460205078, 0.6056929230690002, 21800, 9.836343649326659e-05]
402
+ 2023-02-26 14:34:16,420 32k INFO ====> Epoch: 133
403
+ 2023-02-26 14:35:25,160 32k INFO Train Epoch: 134 [33%]
404
+ 2023-02-26 14:35:25,161 32k INFO [2.4677324295043945, 2.5568089485168457, 9.496826171875, 19.7178955078125, 0.6253650784492493, 22000, 9.835114106370493e-05]
405
+ 2023-02-26 14:35:29,650 32k INFO Saving model and optimizer state at iteration 134 to ./logs\32k\G_22000.pth
406
+ 2023-02-26 14:35:46,991 32k INFO Saving model and optimizer state at iteration 134 to ./logs\32k\D_22000.pth
407
+ 2023-02-26 14:37:24,621 32k INFO ====> Epoch: 134
408
+ 2023-02-26 14:39:03,827 32k INFO Train Epoch: 135 [55%]
409
+ 2023-02-26 14:39:03,828 32k INFO [2.3463804721832275, 2.363849639892578, 14.297788619995117, 20.66683006286621, 0.6891955137252808, 22200, 9.833884717107196e-05]
410
+ 2023-02-26 14:40:08,153 32k INFO ====> Epoch: 135
411
+ 2023-02-26 14:42:17,146 32k INFO Train Epoch: 136 [76%]
412
+ 2023-02-26 14:42:17,146 32k INFO [2.5203256607055664, 2.283804416656494, 11.94053840637207, 18.11589241027832, 1.292447805404663, 22400, 9.832655481517557e-05]
413
+ 2023-02-26 14:42:51,186 32k INFO ====> Epoch: 136
414
+ 2023-02-26 14:45:30,393 32k INFO Train Epoch: 137 [97%]
415
+ 2023-02-26 14:45:30,393 32k INFO [2.601602077484131, 2.1686148643493652, 9.941407203674316, 15.776628494262695, 0.6523029208183289, 22600, 9.831426399582366e-05]
416
+ 2023-02-26 14:45:34,385 32k INFO ====> Epoch: 137
417
+ 2023-02-26 14:48:17,181 32k INFO ====> Epoch: 138
418
+ 2023-02-26 14:49:04,561 32k INFO Train Epoch: 139 [18%]
419
+ 2023-02-26 14:49:04,561 32k INFO [2.4520339965820312, 2.3309485912323, 13.66077995300293, 19.651887893676758, 0.536736011505127, 22800, 9.828968696598508e-05]
420
+ 2023-02-26 14:51:00,322 32k INFO ====> Epoch: 139
421
+ 2023-02-26 14:52:17,661 32k INFO Train Epoch: 140 [39%]
422
+ 2023-02-26 14:52:17,662 32k INFO [2.4809441566467285, 2.5217180252075195, 12.526641845703125, 21.054609298706055, 1.3811713457107544, 23000, 9.827740075511432e-05]
423
+ 2023-02-26 14:52:22,135 32k INFO Saving model and optimizer state at iteration 140 to ./logs\32k\G_23000.pth
424
+ 2023-02-26 14:52:39,142 32k INFO Saving model and optimizer state at iteration 140 to ./logs\32k\D_23000.pth
425
+ 2023-02-26 14:54:08,027 32k INFO ====> Epoch: 140
426
+ 2023-02-26 14:55:55,688 32k INFO Train Epoch: 141 [61%]
427
+ 2023-02-26 14:55:55,688 32k INFO [2.57208514213562, 2.1723203659057617, 5.556727886199951, 11.347487449645996, 0.6554768085479736, 23200, 9.826511608001993e-05]
428
+ 2023-02-26 14:56:51,095 32k INFO ====> Epoch: 141
429
+ 2023-02-26 14:59:08,681 32k INFO Train Epoch: 142 [82%]
430
+ 2023-02-26 14:59:08,681 32k INFO [2.2383546829223633, 2.4536244869232178, 12.622249603271484, 18.40056610107422, 0.4919876456260681, 23400, 9.825283294050992e-05]
431
+ 2023-02-26 14:59:34,044 32k INFO ====> Epoch: 142
432
+ 2023-02-26 15:02:16,450 32k INFO ====> Epoch: 143
433
+ 2023-02-26 15:02:42,500 32k INFO Train Epoch: 144 [3%]
434
+ 2023-02-26 15:02:42,501 32k INFO [2.2660953998565674, 2.6160569190979004, 12.693144798278809, 20.315467834472656, 0.5867981314659119, 23600, 9.822827126747529e-05]
435
+ 2023-02-26 15:04:59,831 32k INFO ====> Epoch: 144
436
+ 2023-02-26 15:05:55,876 32k INFO Train Epoch: 145 [24%]
437
+ 2023-02-26 15:05:55,876 32k INFO [2.4133129119873047, 2.3682570457458496, 12.988896369934082, 19.82603645324707, 0.8757506012916565, 23800, 9.821599273356685e-05]
438
+ 2023-02-26 15:07:42,996 32k INFO ====> Epoch: 145
439
+ 2023-02-26 15:09:09,009 32k INFO Train Epoch: 146 [45%]
440
+ 2023-02-26 15:09:09,010 32k INFO [2.371466875076294, 2.411593198776245, 11.293488502502441, 16.904762268066406, 0.918782651424408, 24000, 9.820371573447515e-05]
441
+ 2023-02-26 15:09:13,478 32k INFO Saving model and optimizer state at iteration 146 to ./logs\32k\G_24000.pth
442
+ 2023-02-26 15:09:32,722 32k INFO Saving model and optimizer state at iteration 146 to ./logs\32k\D_24000.pth
443
+ 2023-02-26 15:10:52,999 32k INFO ====> Epoch: 146
444
+ 2023-02-26 15:12:49,426 32k INFO Train Epoch: 147 [67%]
445
+ 2023-02-26 15:12:49,426 32k INFO [2.5269036293029785, 2.449225664138794, 11.160113334655762, 19.884809494018555, 0.6317355632781982, 24200, 9.819144027000834e-05]
446
+ 2023-02-26 15:13:36,511 32k INFO ====> Epoch: 147
447
+ 2023-02-26 15:16:02,727 32k INFO Train Epoch: 148 [88%]
448
+ 2023-02-26 15:16:02,728 32k INFO [2.286109209060669, 2.792794942855835, 8.917141914367676, 14.302690505981445, 0.7620863318443298, 24400, 9.817916633997459e-05]
449
+ 2023-02-26 15:16:19,590 32k INFO ====> Epoch: 148
450
+ 2023-02-26 15:19:02,564 32k INFO ====> Epoch: 149
451
+ 2023-02-26 15:19:37,203 32k INFO Train Epoch: 150 [9%]
452
+ 2023-02-26 15:19:37,203 32k INFO [2.229402542114258, 2.738642692565918, 11.272886276245117, 16.66368293762207, 1.1462740898132324, 24600, 9.815462308243906e-05]
453
+ 2023-02-26 15:21:45,802 32k INFO ====> Epoch: 150
454
+ 2023-02-26 15:22:50,583 32k INFO Train Epoch: 151 [30%]
455
+ 2023-02-26 15:22:50,584 32k INFO [2.3389620780944824, 2.6368374824523926, 10.70553207397461, 18.062721252441406, 1.1679829359054565, 24800, 9.814235375455375e-05]
456
+ 2023-02-26 15:24:29,362 32k INFO ====> Epoch: 151
457
+ 2023-02-26 15:26:04,092 32k INFO Train Epoch: 152 [52%]
458
+ 2023-02-26 15:26:04,092 32k INFO [2.5250439643859863, 2.5258371829986572, 9.597591400146484, 18.75698471069336, 0.38917434215545654, 25000, 9.813008596033443e-05]
459
+ 2023-02-26 15:26:08,573 32k INFO Saving model and optimizer state at iteration 152 to ./logs\32k\G_25000.pth
460
+ 2023-02-26 15:26:25,818 32k INFO Saving model and optimizer state at iteration 152 to ./logs\32k\D_25000.pth
461
+ 2023-02-26 15:27:37,202 32k INFO ====> Epoch: 152
462
+ 2023-02-26 15:29:41,926 32k INFO Train Epoch: 153 [73%]
463
+ 2023-02-26 15:29:41,927 32k INFO [2.3494515419006348, 2.425994873046875, 12.536174774169922, 20.15695571899414, 0.5543274283409119, 25200, 9.811781969958938e-05]
464
+ 2023-02-26 15:30:20,303 32k INFO ====> Epoch: 153
465
+ 2023-02-26 15:32:55,039 32k INFO Train Epoch: 154 [94%]
466
+ 2023-02-26 15:32:55,039 32k INFO [2.4024767875671387, 2.125033140182495, 10.49553394317627, 16.836292266845703, 0.6470404267311096, 25400, 9.810555497212693e-05]
467
+ 2023-02-26 15:33:03,303 32k INFO ====> Epoch: 154
468
+ 2023-02-26 15:35:46,178 32k INFO ====> Epoch: 155
469
+ 2023-02-26 15:36:29,478 32k INFO Train Epoch: 156 [15%]
470
+ 2023-02-26 15:36:29,478 32k INFO [2.6614341735839844, 2.014702081680298, 8.740072250366211, 12.970017433166504, 0.6980465650558472, 25600, 9.808103011628319e-05]
471
+ 2023-02-26 15:38:29,800 32k INFO ====> Epoch: 156
472
+ 2023-02-26 15:39:43,051 32k INFO Train Epoch: 157 [36%]
473
+ 2023-02-26 15:39:43,051 32k INFO [2.654205560684204, 1.9615615606307983, 10.93551254272461, 16.712379455566406, 0.4804637134075165, 25800, 9.806876998751865e-05]
474
+ 2023-02-26 15:41:13,122 32k INFO ====> Epoch: 157
475
+ 2023-02-26 15:42:56,289 32k INFO Train Epoch: 158 [58%]
476
+ 2023-02-26 15:42:56,289 32k INFO [2.6511435508728027, 2.1100852489471436, 8.584171295166016, 15.892630577087402, 0.90334153175354, 26000, 9.80565113912702e-05]
477
+ 2023-02-26 15:43:00,797 32k INFO Saving model and optimizer state at iteration 158 to ./logs\32k\G_26000.pth
478
+ 2023-02-26 15:43:20,004 32k INFO Saving model and optimizer state at iteration 158 to ./logs\32k\D_26000.pth
479
+ 2023-02-26 15:44:23,290 32k INFO ====> Epoch: 158
480
+ 2023-02-26 15:46:36,868 32k INFO Train Epoch: 159 [79%]
481
+ 2023-02-26 15:46:36,869 32k INFO [2.527649402618408, 2.3790462017059326, 9.914752960205078, 17.783714294433594, 0.7191269397735596, 26200, 9.804425432734629e-05]
482
+ 2023-02-26 15:47:06,754 32k INFO ====> Epoch: 159
483
+ 2023-02-26 15:49:49,795 32k INFO ====> Epoch: 160
484
+ 2023-02-26 15:50:11,611 32k INFO Train Epoch: 161 [0%]
485
+ 2023-02-26 15:50:11,611 32k INFO [2.4440112113952637, 2.4401893615722656, 11.61741828918457, 17.544946670532227, 0.9068783521652222, 26400, 9.801974479570593e-05]
486
+ 2023-02-26 15:52:33,511 32k INFO ====> Epoch: 161
487
+ 2023-02-26 15:53:25,221 32k INFO Train Epoch: 162 [21%]
488
+ 2023-02-26 15:53:25,221 32k INFO [2.300156593322754, 2.5606746673583984, 11.57729434967041, 19.577245712280273, 0.6100308895111084, 26600, 9.800749232760646e-05]
489
+ 2023-02-26 15:55:16,561 32k INFO ====> Epoch: 162
490
+ 2023-02-26 15:56:38,386 32k INFO Train Epoch: 163 [42%]
491
+ 2023-02-26 15:56:38,387 32k INFO [2.491887331008911, 2.174567937850952, 8.945231437683105, 16.8068790435791, 0.6337096095085144, 26800, 9.79952413910655e-05]
492
+ 2023-02-26 15:57:59,818 32k INFO ====> Epoch: 163
493
+ 2023-02-26 15:59:51,615 32k INFO Train Epoch: 164 [64%]
494
+ 2023-02-26 15:59:51,615 32k INFO [2.4709670543670654, 2.4039604663848877, 10.360793113708496, 15.725382804870605, 0.9451103210449219, 27000, 9.798299198589162e-05]
495
+ 2023-02-26 15:59:56,072 32k INFO Saving model and optimizer state at iteration 164 to ./logs\32k\G_27000.pth
496
+ 2023-02-26 16:00:12,933 32k INFO Saving model and optimizer state at iteration 164 to ./logs\32k\D_27000.pth
497
+ 2023-02-26 16:01:07,609 32k INFO ====> Epoch: 164
498
+ 2023-02-26 16:03:29,754 32k INFO Train Epoch: 165 [85%]
499
+ 2023-02-26 16:03:29,754 32k INFO [2.4107818603515625, 2.2964704036712646, 10.976929664611816, 18.840293884277344, 0.8151223063468933, 27200, 9.797074411189339e-05]
500
+ 2023-02-26 16:03:50,996 32k INFO ====> Epoch: 165
501
+ 2023-02-26 16:06:33,906 32k INFO ====> Epoch: 166
502
+ 2023-02-26 16:07:04,238 32k INFO Train Epoch: 167 [6%]
503
+ 2023-02-26 16:07:04,239 32k INFO [2.4089515209198, 2.457214593887329, 9.272985458374023, 17.213451385498047, 0.820664644241333, 27400, 9.794625295665828e-05]
504
+ 2023-02-26 16:09:17,110 32k INFO ====> Epoch: 167
505
+ 2023-02-26 16:10:17,435 32k INFO Train Epoch: 168 [27%]
506
+ 2023-02-26 16:10:17,436 32k INFO [2.2713615894317627, 2.1985321044921875, 11.777606010437012, 20.058086395263672, 0.7406303882598877, 27600, 9.79340096750387e-05]
507
+ 2023-02-26 16:12:00,339 32k INFO ====> Epoch: 168
508
+ 2023-02-26 16:13:30,855 32k INFO Train Epoch: 169 [48%]
509
+ 2023-02-26 16:13:30,855 32k INFO [2.4570372104644775, 2.2436656951904297, 11.803679466247559, 17.833927154541016, 0.6307259798049927, 27800, 9.792176792382932e-05]
510
+ 2023-02-26 16:14:43,793 32k INFO ====> Epoch: 169
511
+ 2023-02-26 16:16:43,976 32k INFO Train Epoch: 170 [70%]
512
+ 2023-02-26 16:16:43,976 32k INFO [2.246384382247925, 2.4739303588867188, 15.200418472290039, 21.546630859375, 0.6328362226486206, 28000, 9.790952770283884e-05]
513
+ 2023-02-26 16:16:48,430 32k INFO Saving model and optimizer state at iteration 170 to ./logs\32k\G_28000.pth
514
+ 2023-02-26 16:17:01,674 32k INFO Saving model and optimizer state at iteration 170 to ./logs\32k\D_28000.pth
515
+ 2023-02-26 16:17:47,558 32k INFO ====> Epoch: 170
516
+ 2023-02-26 16:20:18,059 32k INFO Train Epoch: 171 [91%]
517
+ 2023-02-26 16:20:18,059 32k INFO [2.306701898574829, 2.6621859073638916, 10.908252716064453, 17.372089385986328, 0.784980297088623, 28200, 9.789728901187598e-05]
518
+ 2023-02-26 16:20:30,684 32k INFO ====> Epoch: 171
519
+ 2023-02-26 16:23:13,289 32k INFO ====> Epoch: 172
520
+ 2023-02-26 16:23:52,145 32k INFO Train Epoch: 173 [12%]
521
+ 2023-02-26 16:23:52,145 32k INFO [2.399289131164551, 2.3199777603149414, 7.142980098724365, 13.057271003723145, 0.8620398640632629, 28400, 9.787281621926815e-05]
522
+ 2023-02-26 16:25:56,218 32k INFO ====> Epoch: 173
523
+ 2023-02-26 16:27:05,005 32k INFO Train Epoch: 174 [33%]
524
+ 2023-02-26 16:27:05,005 32k INFO [2.470820188522339, 2.4600534439086914, 6.626948356628418, 13.11660385131836, 0.841496467590332, 28600, 9.786058211724074e-05]
525
+ 2023-02-26 16:28:39,432 32k INFO ====> Epoch: 174
526
+ 2023-02-26 16:30:18,516 32k INFO Train Epoch: 175 [55%]
527
+ 2023-02-26 16:30:18,516 32k INFO [2.4205260276794434, 2.25736927986145, 13.192204475402832, 19.85516357421875, 0.7485554218292236, 28800, 9.784834954447608e-05]
528
+ 2023-02-26 16:31:22,767 32k INFO ====> Epoch: 175
529
+ 2023-02-26 16:33:31,581 32k INFO Train Epoch: 176 [76%]
530
+ 2023-02-26 16:33:31,582 32k INFO [2.455970287322998, 2.2141826152801514, 13.451642036437988, 19.803707122802734, 0.9468473792076111, 29000, 9.783611850078301e-05]
531
+ 2023-02-26 16:33:36,138 32k INFO Saving model and optimizer state at iteration 176 to ./logs\32k\G_29000.pth
532
+ 2023-02-26 16:33:53,144 32k INFO Saving model and optimizer state at iteration 176 to ./logs\32k\D_29000.pth
533
+ 2023-02-26 16:34:30,652 32k INFO ====> Epoch: 176
534
+ 2023-02-26 16:37:10,010 32k INFO Train Epoch: 177 [97%]
535
+ 2023-02-26 16:37:10,011 32k INFO [2.232689380645752, 2.6161293983459473, 10.906001091003418, 17.958105087280273, 0.6369590163230896, 29200, 9.782388898597041e-05]
536
+ 2023-02-26 16:37:13,995 32k INFO ====> Epoch: 177
537
+ 2023-02-26 16:39:57,216 32k INFO ====> Epoch: 178
538
+ 2023-02-26 16:40:44,663 32k INFO Train Epoch: 179 [18%]
539
+ 2023-02-26 16:40:44,664 32k INFO [2.267847776412964, 2.723783493041992, 14.152230262756348, 18.365135192871094, 1.0862221717834473, 29400, 9.779943454222217e-05]
540
+ 2023-02-26 16:42:40,681 32k INFO ====> Epoch: 179
541
+ 2023-02-26 16:43:58,308 32k INFO Train Epoch: 180 [39%]
542
+ 2023-02-26 16:43:58,308 32k INFO [2.529902696609497, 2.3272926807403564, 9.528871536254883, 18.957096099853516, 0.9033053517341614, 29600, 9.778720961290439e-05]
543
+ 2023-02-26 16:45:24,000 32k INFO ====> Epoch: 180
544
+ 2023-02-26 16:47:11,592 32k INFO Train Epoch: 181 [61%]
545
+ 2023-02-26 16:47:11,593 32k INFO [2.2471132278442383, 2.5182602405548096, 11.21172046661377, 16.198549270629883, 0.6432769298553467, 29800, 9.777498621170277e-05]
546
+ 2023-02-26 16:48:07,243 32k INFO ====> Epoch: 181
547
+ 2023-02-26 16:50:24,659 32k INFO Train Epoch: 182 [82%]
548
+ 2023-02-26 16:50:24,659 32k INFO [2.3189096450805664, 2.2408602237701416, 11.619179725646973, 19.898473739624023, 0.5813133716583252, 30000, 9.776276433842631e-05]
549
+ 2023-02-26 16:50:29,138 32k INFO Saving model and optimizer state at iteration 182 to ./logs\32k\G_30000.pth
550
+ 2023-02-26 16:50:46,475 32k INFO Saving model and optimizer state at iteration 182 to ./logs\32k\D_30000.pth
551
+ 2023-02-26 16:51:15,396 32k INFO ====> Epoch: 182
552
+ 2023-02-26 16:53:58,501 32k INFO ====> Epoch: 183
553
+ 2023-02-26 16:54:24,532 32k INFO Train Epoch: 184 [3%]
554
+ 2023-02-26 16:54:24,532 32k INFO [2.3636772632598877, 2.432774782180786, 12.960881233215332, 19.998132705688477, 0.9693511128425598, 30200, 9.773832517488488e-05]
555
+ 2023-02-26 16:56:41,437 32k INFO ====> Epoch: 184
556
+ 2023-02-26 16:57:37,255 32k INFO Train Epoch: 185 [24%]
557
+ 2023-02-26 16:57:37,255 32k INFO [2.5370290279388428, 2.19403338432312, 11.489301681518555, 18.055042266845703, 0.938399076461792, 30400, 9.772610788423802e-05]
558
+ 2023-02-26 16:59:24,316 32k INFO ====> Epoch: 185
559
+ 2023-02-26 17:00:50,418 32k INFO Train Epoch: 186 [45%]
560
+ 2023-02-26 17:00:50,418 32k INFO [2.288398265838623, 2.4643843173980713, 13.293804168701172, 19.007169723510742, 0.821979820728302, 30600, 9.771389212075249e-05]
561
+ 2023-02-26 17:02:07,421 32k INFO ====> Epoch: 186
562
+ 2023-02-26 17:04:03,697 32k INFO Train Epoch: 187 [67%]
563
+ 2023-02-26 17:04:03,697 32k INFO [2.5506398677825928, 2.305884838104248, 9.674799919128418, 17.26383399963379, 0.8924499154090881, 30800, 9.77016778842374e-05]
564
+ 2023-02-26 17:04:50,640 32k INFO ====> Epoch: 187
565
+ 2023-02-26 17:07:16,910 32k INFO Train Epoch: 188 [88%]
566
+ 2023-02-26 17:07:16,911 32k INFO [2.2533488273620605, 2.5775458812713623, 9.142365455627441, 14.616179466247559, 0.9205642342567444, 31000, 9.768946517450186e-05]
567
+ 2023-02-26 17:07:21,445 32k INFO Saving model and optimizer state at iteration 188 to ./logs\32k\G_31000.pth
568
+ 2023-02-26 17:07:36,357 32k INFO Saving model and optimizer state at iteration 188 to ./logs\32k\D_31000.pth
569
+ 2023-02-26 17:07:56,519 32k INFO ====> Epoch: 188
570
+ 2023-02-26 17:10:39,474 32k INFO ====> Epoch: 189
571
+ 2023-02-26 17:11:14,118 32k INFO Train Epoch: 190 [9%]
572
+ 2023-02-26 17:11:14,118 32k INFO [2.4963176250457764, 2.191936492919922, 8.220184326171875, 15.837531089782715, 0.7903323173522949, 31200, 9.766504433460612e-05]
573
+ 2023-02-26 17:13:22,812 32k INFO ====> Epoch: 190
574
+ 2023-02-26 17:14:27,380 32k INFO Train Epoch: 191 [30%]
575
+ 2023-02-26 17:14:27,381 32k INFO [2.4663021564483643, 2.284228801727295, 11.195096969604492, 19.334308624267578, 0.8231672644615173, 31400, 9.765283620406429e-05]
576
+ 2023-02-26 17:16:05,903 32k INFO ====> Epoch: 191
577
+ 2023-02-26 17:17:40,428 32k INFO Train Epoch: 192 [52%]
578
+ 2023-02-26 17:17:40,428 32k INFO [2.4471681118011475, 2.4072704315185547, 8.412674903869629, 18.166622161865234, 0.9922705888748169, 31600, 9.764062959953878e-05]
579
+ 2023-02-26 17:18:49,074 32k INFO ====> Epoch: 192
580
+ 2023-02-26 17:20:54,252 32k INFO Train Epoch: 193 [73%]
581
+ 2023-02-26 17:20:54,253 32k INFO [2.1911771297454834, 2.7126452922821045, 11.184356689453125, 17.750993728637695, 1.0288227796554565, 31800, 9.762842452083883e-05]
582
+ 2023-02-26 17:21:32,780 32k INFO ====> Epoch: 193
583
+ 2023-02-26 17:24:07,741 32k INFO Train Epoch: 194 [94%]
584
+ 2023-02-26 17:24:07,742 32k INFO [2.3111774921417236, 2.240935802459717, 11.050326347351074, 20.342138290405273, 0.5650017857551575, 32000, 9.761622096777372e-05]
585
+ 2023-02-26 17:24:12,415 32k INFO Saving model and optimizer state at iteration 194 to ./logs\32k\G_32000.pth
586
+ 2023-02-26 17:24:29,104 32k INFO Saving model and optimizer state at iteration 194 to ./logs\32k\D_32000.pth
587
+ 2023-02-26 17:24:41,097 32k INFO ====> Epoch: 194
588
+ 2023-02-26 17:27:23,969 32k INFO ====> Epoch: 195
589
+ 2023-02-26 17:28:07,238 32k INFO Train Epoch: 196 [15%]
590
+ 2023-02-26 17:28:07,239 32k INFO [1.9948863983154297, 3.002936363220215, 11.8908052444458, 17.03791618347168, 0.7017788290977478, 32200, 9.759181843778522e-05]
591
+ 2023-02-26 17:30:07,220 32k INFO ====> Epoch: 196
592
+ 2023-02-26 17:31:20,166 32k INFO Train Epoch: 197 [36%]
593
+ 2023-02-26 17:31:20,167 32k INFO [2.5854690074920654, 2.1435956954956055, 7.690774440765381, 17.148244857788086, 0.5889198780059814, 32400, 9.757961946048049e-05]
594
+ 2023-02-26 17:32:50,210 32k INFO ====> Epoch: 197
595
+ 2023-02-26 17:34:33,444 32k INFO Train Epoch: 198 [58%]
596
+ 2023-02-26 17:34:33,444 32k INFO [2.352189302444458, 2.4562466144561768, 9.4241304397583, 17.52354621887207, 0.9340884685516357, 32600, 9.756742200804793e-05]
597
+ 2023-02-26 17:35:33,194 32k INFO ====> Epoch: 198
598
+ 2023-02-26 17:37:46,492 32k INFO Train Epoch: 199 [79%]
599
+ 2023-02-26 17:37:46,493 32k INFO [2.4146697521209717, 2.7238364219665527, 12.723920822143555, 18.988252639770508, 0.7106491923332214, 32800, 9.755522608029692e-05]
600
+ 2023-02-26 17:38:16,258 32k INFO ====> Epoch: 199
601
+ 2023-02-26 17:40:58,717 32k INFO ====> Epoch: 200
602
+ 2023-02-26 17:41:20,503 32k INFO Train Epoch: 201 [0%]
603
+ 2023-02-26 17:41:20,503 32k INFO [2.367798089981079, 2.7577104568481445, 10.98935317993164, 17.85304832458496, 0.918219268321991, 33000, 9.753083879807726e-05]
604
+ 2023-02-26 17:41:24,949 32k INFO Saving model and optimizer state at iteration 201 to ./logs\32k\G_33000.pth
605
+ 2023-02-26 17:41:44,987 32k INFO Saving model and optimizer state at iteration 201 to ./logs\32k\D_33000.pth
606
+ 2023-02-26 17:44:09,525 32k INFO ====> Epoch: 201
607
+ 2023-02-26 17:45:01,443 32k INFO Train Epoch: 202 [21%]
608
+ 2023-02-26 17:45:01,444 32k INFO [2.361729145050049, 2.3721957206726074, 9.747944831848145, 14.154775619506836, 0.863827109336853, 33200, 9.75186474432275e-05]
609
+ 2023-02-26 17:46:53,304 32k INFO ====> Epoch: 202
610
+ 2023-02-26 17:48:15,322 32k INFO Train Epoch: 203 [42%]
611
+ 2023-02-26 17:48:15,323 32k INFO [2.4549036026000977, 2.087249279022217, 9.122322082519531, 14.459492683410645, 0.8657273054122925, 33400, 9.750645761229709e-05]
612
+ 2023-02-26 17:49:36,813 32k INFO ====> Epoch: 203
613
+ 2023-02-26 17:51:28,792 32k INFO Train Epoch: 204 [64%]
614
+ 2023-02-26 17:51:28,793 32k INFO [2.574124813079834, 2.3560216426849365, 10.238191604614258, 17.874080657958984, 0.7011803388595581, 33600, 9.749426930509556e-05]
615
+ 2023-02-26 17:52:20,099 32k INFO ====> Epoch: 204
616
+ 2023-02-26 17:54:41,919 32k INFO Train Epoch: 205 [85%]
617
+ 2023-02-26 17:54:41,919 32k INFO [2.441328763961792, 2.4306070804595947, 11.29406452178955, 17.06692123413086, 0.48038169741630554, 33800, 9.748208252143241e-05]
618
+ 2023-02-26 17:55:03,141 32k INFO ====> Epoch: 205
619
+ 2023-02-26 17:57:45,959 32k INFO ====> Epoch: 206
620
+ 2023-02-26 17:58:16,204 32k INFO Train Epoch: 207 [6%]
621
+ 2023-02-26 17:58:16,204 32k INFO [2.772115468978882, 2.026067018508911, 7.555477142333984, 12.804871559143066, 0.48970046639442444, 34000, 9.745771352395957e-05]
622
+ 2023-02-26 17:58:20,721 32k INFO Saving model and optimizer state at iteration 207 to ./logs\32k\G_34000.pth
623
+ 2023-02-26 17:58:38,922 32k INFO Saving model and optimizer state at iteration 207 to ./logs\32k\D_34000.pth
624
+ 2023-02-26 18:00:55,455 32k INFO ====> Epoch: 207
625
+ 2023-02-26 18:01:56,077 32k INFO Train Epoch: 208 [27%]
626
+ 2023-02-26 18:01:56,077 32k INFO [2.3201816082000732, 2.404618263244629, 13.214925765991211, 19.893857955932617, 0.8915579915046692, 34200, 9.744553130976908e-05]
627
+ 2023-02-26 18:03:39,028 32k INFO ====> Epoch: 208
628
+ 2023-02-26 18:05:09,684 32k INFO Train Epoch: 209 [48%]
629
+ 2023-02-26 18:05:09,685 32k INFO [2.543070077896118, 2.1961042881011963, 9.725593566894531, 14.015393257141113, 0.5515499711036682, 34400, 9.743335061835535e-05]
630
+ 2023-02-26 18:06:22,420 32k INFO ====> Epoch: 209
631
+ 2023-02-26 18:08:22,834 32k INFO Train Epoch: 210 [70%]
632
+ 2023-02-26 18:08:22,835 32k INFO [2.291600465774536, 2.7387681007385254, 11.23803424835205, 18.91277313232422, 0.5562854409217834, 34600, 9.742117144952805e-05]
633
+ 2023-02-26 18:09:05,526 32k INFO ====> Epoch: 210
634
+ 2023-02-26 18:11:36,675 32k INFO Train Epoch: 211 [91%]
635
+ 2023-02-26 18:11:36,676 32k INFO [2.4361047744750977, 2.432372808456421, 6.777080535888672, 13.665382385253906, 0.6072559356689453, 34800, 9.740899380309685e-05]
636
+ 2023-02-26 18:11:49,390 32k INFO ====> Epoch: 211
637
+ 2023-02-26 18:14:33,046 32k INFO ====> Epoch: 212
638
+ 2023-02-26 18:15:12,040 32k INFO Train Epoch: 213 [12%]
639
+ 2023-02-26 18:15:12,040 32k INFO [2.706439733505249, 1.891126036643982, 7.307193279266357, 14.386757850646973, 0.69676274061203, 35000, 9.73846430766616e-05]
640
+ 2023-02-26 18:15:16,522 32k INFO Saving model and optimizer state at iteration 213 to ./logs\32k\G_35000.pth
641
+ 2023-02-26 18:15:32,644 32k INFO Saving model and optimizer state at iteration 213 to ./logs\32k\D_35000.pth
642
+ 2023-02-26 18:17:40,257 32k INFO ====> Epoch: 213
643
+ 2023-02-26 18:18:49,631 32k INFO Train Epoch: 214 [33%]
644
+ 2023-02-26 18:18:49,632 32k INFO [2.469897747039795, 2.5583174228668213, 10.134647369384766, 18.64297866821289, 0.6196038722991943, 35200, 9.7372469996277e-05]
645
+ 2023-02-26 18:20:23,825 32k INFO ====> Epoch: 214
32k-saika/D_12000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:67f9271e63af615ae004cb576b83458431114840c2742416c1c423302814a324
3
+ size 561098185
32k-saika/D_15000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5633d4fa6ade47710640f612f656960471ecb91ff73fb8236262dfca18c5c8f7
3
+ size 561098185
32k-saika/D_6000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:08b2155633d9fd041c4566b431bb04d3da4f0249a7f5e505243891bdb384e4a8
3
+ size 561098185
32k-saika/G_12000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:44672632663661d93924b7d7240a851e4081be370d545b95ba988887b732ee1b
3
+ size 699505437
32k-saika/G_15000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:25b4017c91fac1a39a920855a034879659d7cac2d35e510930251a62f40846fb
3
+ size 699505437
32k-saika/G_6000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:37738cf1daa35c80a4781a11a7ccde92160142afda6f3b28cda7525dcb9bca3a
3
+ size 699505437
32k-saika/config.json ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "train": {
3
+ "log_interval": 200,
4
+ "eval_interval": 1000,
5
+ "seed": 1234,
6
+ "epochs": 10000,
7
+ "learning_rate": 0.0001,
8
+ "betas": [
9
+ 0.8,
10
+ 0.99
11
+ ],
12
+ "eps": 1e-09,
13
+ "batch_size": 5,
14
+ "fp16_run": false,
15
+ "lr_decay": 0.999875,
16
+ "segment_size": 17920,
17
+ "init_lr_ratio": 1,
18
+ "warmup_epochs": 0,
19
+ "c_mel": 45,
20
+ "c_kl": 1.0,
21
+ "use_sr": true,
22
+ "max_speclen": 384,
23
+ "port": "8001"
24
+ },
25
+ "data": {
26
+ "training_files": "filelists/train.txt",
27
+ "validation_files": "filelists/val.txt",
28
+ "max_wav_value": 32768.0,
29
+ "sampling_rate": 32000,
30
+ "filter_length": 1280,
31
+ "hop_length": 320,
32
+ "win_length": 1280,
33
+ "n_mel_channels": 80,
34
+ "mel_fmin": 0.0,
35
+ "mel_fmax": null
36
+ },
37
+ "model": {
38
+ "inter_channels": 192,
39
+ "hidden_channels": 192,
40
+ "filter_channels": 768,
41
+ "n_heads": 2,
42
+ "n_layers": 6,
43
+ "kernel_size": 3,
44
+ "p_dropout": 0.1,
45
+ "resblock": "1",
46
+ "resblock_kernel_sizes": [
47
+ 3,
48
+ 7,
49
+ 11
50
+ ],
51
+ "resblock_dilation_sizes": [
52
+ [
53
+ 1,
54
+ 3,
55
+ 5
56
+ ],
57
+ [
58
+ 1,
59
+ 3,
60
+ 5
61
+ ],
62
+ [
63
+ 1,
64
+ 3,
65
+ 5
66
+ ]
67
+ ],
68
+ "upsample_rates": [
69
+ 10,
70
+ 8,
71
+ 2,
72
+ 2
73
+ ],
74
+ "upsample_initial_channel": 512,
75
+ "upsample_kernel_sizes": [
76
+ 16,
77
+ 16,
78
+ 4,
79
+ 4
80
+ ],
81
+ "n_layers_q": 3,
82
+ "use_spectral_norm": false,
83
+ "gin_channels": 256,
84
+ "ssl_dim": 256,
85
+ "n_speakers": 2
86
+ },
87
+ "spk": {
88
+ "saika": 0
89
+ }
90
+ }
32k-saika/train.log ADDED
@@ -0,0 +1,362 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2023-02-22 14:12:56,950 32k INFO {'train': {'log_interval': 200, 'eval_interval': 1000, 'seed': 1234, 'epochs': 10000, 'learning_rate': 0.0001, 'betas': [0.8, 0.99], 'eps': 1e-09, 'batch_size': 6, 'fp16_run': False, 'lr_decay': 0.999875, 'segment_size': 17920, 'init_lr_ratio': 1, 'warmup_epochs': 0, 'c_mel': 45, 'c_kl': 1.0, 'use_sr': True, 'max_speclen': 384, 'port': '8001'}, 'data': {'training_files': 'filelists/train.txt', 'validation_files': 'filelists/val.txt', 'max_wav_value': 32768.0, 'sampling_rate': 32000, 'filter_length': 1280, 'hop_length': 320, 'win_length': 1280, 'n_mel_channels': 80, 'mel_fmin': 0.0, 'mel_fmax': None}, 'model': {'inter_channels': 192, 'hidden_channels': 192, 'filter_channels': 768, 'n_heads': 2, 'n_layers': 6, 'kernel_size': 3, 'p_dropout': 0.1, 'resblock': '1', 'resblock_kernel_sizes': [3, 7, 11], 'resblock_dilation_sizes': [[1, 3, 5], [1, 3, 5], [1, 3, 5]], 'upsample_rates': [10, 8, 2, 2], 'upsample_initial_channel': 512, 'upsample_kernel_sizes': [16, 16, 4, 4], 'n_layers_q': 3, 'use_spectral_norm': False, 'gin_channels': 256, 'ssl_dim': 256, 'n_speakers': 2}, 'spk': {'saika': 0}, 'model_dir': './logs\\32k'}
2
+ 2023-02-22 14:12:56,950 32k WARNING K:\AI\so-vits-svc-32k is not a git repository, therefore hash value comparison will be ignored.
3
+ 2023-02-22 14:13:01,617 32k INFO Loaded checkpoint './logs\32k\G_0.pth' (iteration 1)
4
+ 2023-02-22 14:13:02,013 32k INFO Loaded checkpoint './logs\32k\D_0.pth' (iteration 1)
5
+ 2023-02-22 14:13:28,354 32k INFO Train Epoch: 1 [0%]
6
+ 2023-02-22 14:13:28,355 32k INFO [3.187079668045044, 3.543747663497925, 12.749074935913086, 35.17381286621094, 6.83956241607666, 0, 0.0001]
7
+ 2023-02-22 14:13:34,228 32k INFO Saving model and optimizer state at iteration 1 to ./logs\32k\G_0.pth
8
+ 2023-02-22 14:13:52,172 32k INFO Saving model and optimizer state at iteration 1 to ./logs\32k\D_0.pth
9
+ 2023-02-22 14:15:03,077 32k INFO ====> Epoch: 1
10
+ 2023-02-22 14:16:31,043 32k INFO ====> Epoch: 2
11
+ 2023-02-22 14:17:26,851 32k INFO Train Epoch: 3 [50%]
12
+ 2023-02-22 14:17:26,851 32k INFO [2.488687515258789, 2.5710129737854004, 14.215274810791016, 26.32855796813965, 1.4047647714614868, 200, 9.99750015625e-05]
13
+ 2023-02-22 14:17:59,730 32k INFO ====> Epoch: 3
14
+ 2023-02-22 14:19:27,494 32k INFO ====> Epoch: 4
15
+ 2023-02-22 14:20:55,315 32k INFO ====> Epoch: 5
16
+ 2023-02-22 14:21:17,292 32k INFO Train Epoch: 6 [0%]
17
+ 2023-02-22 14:21:17,292 32k INFO [2.2817749977111816, 2.477022171020508, 12.80789852142334, 22.15078353881836, 1.1265417337417603, 400, 9.993751562304699e-05]
18
+ 2023-02-22 14:32:12,662 32k INFO {'train': {'log_interval': 200, 'eval_interval': 1000, 'seed': 1234, 'epochs': 10000, 'learning_rate': 0.0001, 'betas': [0.8, 0.99], 'eps': 1e-09, 'batch_size': 5, 'fp16_run': False, 'lr_decay': 0.999875, 'segment_size': 17920, 'init_lr_ratio': 1, 'warmup_epochs': 0, 'c_mel': 45, 'c_kl': 1.0, 'use_sr': True, 'max_speclen': 384, 'port': '8001'}, 'data': {'training_files': 'filelists/train.txt', 'validation_files': 'filelists/val.txt', 'max_wav_value': 32768.0, 'sampling_rate': 32000, 'filter_length': 1280, 'hop_length': 320, 'win_length': 1280, 'n_mel_channels': 80, 'mel_fmin': 0.0, 'mel_fmax': None}, 'model': {'inter_channels': 192, 'hidden_channels': 192, 'filter_channels': 768, 'n_heads': 2, 'n_layers': 6, 'kernel_size': 3, 'p_dropout': 0.1, 'resblock': '1', 'resblock_kernel_sizes': [3, 7, 11], 'resblock_dilation_sizes': [[1, 3, 5], [1, 3, 5], [1, 3, 5]], 'upsample_rates': [10, 8, 2, 2], 'upsample_initial_channel': 512, 'upsample_kernel_sizes': [16, 16, 4, 4], 'n_layers_q': 3, 'use_spectral_norm': False, 'gin_channels': 256, 'ssl_dim': 256, 'n_speakers': 2}, 'spk': {'saika': 0}, 'model_dir': './logs\\32k'}
19
+ 2023-02-22 14:32:12,663 32k WARNING K:\AI\so-vits-svc-32k is not a git repository, therefore hash value comparison will be ignored.
20
+ 2023-02-22 14:32:17,462 32k INFO Loaded checkpoint './logs\32k\G_0.pth' (iteration 1)
21
+ 2023-02-22 14:32:17,863 32k INFO Loaded checkpoint './logs\32k\D_0.pth' (iteration 1)
22
+ 2023-02-22 14:32:43,402 32k INFO Train Epoch: 1 [0%]
23
+ 2023-02-22 14:32:43,402 32k INFO [3.086385726928711, 3.1004512310028076, 11.565768241882324, 32.38859558105469, 5.992624759674072, 0, 0.0001]
24
+ 2023-02-22 14:32:49,281 32k INFO Saving model and optimizer state at iteration 1 to ./logs\32k\G_0.pth
25
+ 2023-02-22 14:33:08,839 32k INFO Saving model and optimizer state at iteration 1 to ./logs\32k\D_0.pth
26
+ 2023-02-22 14:34:22,487 32k INFO ====> Epoch: 1
27
+ 2023-02-22 14:35:53,637 32k INFO ====> Epoch: 2
28
+ 2023-02-22 14:36:21,152 32k INFO Train Epoch: 3 [8%]
29
+ 2023-02-22 14:36:21,152 32k INFO [2.3054869174957275, 2.327169418334961, 12.3760347366333, 22.735485076904297, 0.9988258481025696, 200, 9.99750015625e-05]
30
+ 2023-02-22 14:37:24,967 32k INFO ====> Epoch: 3
31
+ 2023-02-22 14:38:56,168 32k INFO ====> Epoch: 4
32
+ 2023-02-22 14:39:29,664 32k INFO Train Epoch: 5 [17%]
33
+ 2023-02-22 14:39:29,664 32k INFO [2.247507095336914, 2.4475936889648438, 12.479228019714355, 23.749866485595703, 1.3646601438522339, 400, 9.995000937421877e-05]
34
+ 2023-02-22 14:40:27,690 32k INFO ====> Epoch: 5
35
+ 2023-02-22 14:41:58,975 32k INFO ====> Epoch: 6
36
+ 2023-02-22 14:42:38,309 32k INFO Train Epoch: 7 [25%]
37
+ 2023-02-22 14:42:38,310 32k INFO [2.720693349838257, 2.3375821113586426, 9.826765060424805, 21.648380279541016, 0.9433642625808716, 600, 9.99250234335941e-05]
38
+ 2023-02-22 14:43:30,606 32k INFO ====> Epoch: 7
39
+ 2023-02-22 14:45:01,819 32k INFO ====> Epoch: 8
40
+ 2023-02-22 14:45:47,085 32k INFO Train Epoch: 9 [33%]
41
+ 2023-02-22 14:45:47,085 32k INFO [2.5194616317749023, 2.6869425773620605, 12.535841941833496, 22.44904327392578, 1.2266863584518433, 800, 9.990004373906418e-05]
42
+ 2023-02-22 14:46:33,458 32k INFO ====> Epoch: 9
43
+ 2023-02-22 14:48:04,723 32k INFO ====> Epoch: 10
44
+ 2023-02-22 14:48:55,750 32k INFO Train Epoch: 11 [42%]
45
+ 2023-02-22 14:48:55,750 32k INFO [2.049856424331665, 3.2148919105529785, 9.496326446533203, 15.099220275878906, 1.1717641353607178, 1000, 9.987507028906759e-05]
46
+ 2023-02-22 14:49:00,183 32k INFO Saving model and optimizer state at iteration 11 to ./logs\32k\G_1000.pth
47
+ 2023-02-22 14:49:19,296 32k INFO Saving model and optimizer state at iteration 11 to ./logs\32k\D_1000.pth
48
+ 2023-02-22 14:50:03,757 32k INFO ====> Epoch: 11
49
+ 2023-02-22 14:51:35,495 32k INFO ====> Epoch: 12
50
+ 2023-02-22 14:52:32,553 32k INFO Train Epoch: 13 [50%]
51
+ 2023-02-22 14:52:32,554 32k INFO [2.489741802215576, 2.4319345951080322, 11.793535232543945, 20.218076705932617, 0.7845316529273987, 1200, 9.98501030820433e-05]
52
+ 2023-02-22 14:53:07,458 32k INFO ====> Epoch: 13
53
+ 2023-02-22 14:54:39,323 32k INFO ====> Epoch: 14
54
+ 2023-02-22 14:55:42,302 32k INFO Train Epoch: 15 [58%]
55
+ 2023-02-22 14:55:42,302 32k INFO [2.609837055206299, 2.1415061950683594, 8.502999305725098, 19.083311080932617, 1.204352855682373, 1400, 9.982514211643064e-05]
56
+ 2023-02-22 14:56:11,452 32k INFO ====> Epoch: 15
57
+ 2023-02-22 14:57:43,335 32k INFO ====> Epoch: 16
58
+ 2023-02-22 14:58:52,204 32k INFO Train Epoch: 17 [67%]
59
+ 2023-02-22 14:58:52,205 32k INFO [2.5788638591766357, 2.1364264488220215, 7.731012344360352, 15.810824394226074, 1.110003113746643, 1600, 9.980018739066937e-05]
60
+ 2023-02-22 14:59:15,498 32k INFO ====> Epoch: 17
61
+ 2023-02-22 15:00:47,193 32k INFO ====> Epoch: 18
62
+ 2023-02-22 15:02:01,857 32k INFO Train Epoch: 19 [75%]
63
+ 2023-02-22 15:02:01,857 32k INFO [2.5087671279907227, 2.266523838043213, 9.564532279968262, 18.576448440551758, 0.7115161418914795, 1800, 9.977523890319963e-05]
64
+ 2023-02-22 15:02:19,244 32k INFO ====> Epoch: 19
65
+ 2023-02-22 15:03:51,055 32k INFO ====> Epoch: 20
66
+ 2023-02-22 15:05:11,654 32k INFO Train Epoch: 21 [83%]
67
+ 2023-02-22 15:05:11,655 32k INFO [2.3199269771575928, 2.477095603942871, 11.856741905212402, 20.60849952697754, 0.9765581488609314, 2000, 9.975029665246193e-05]
68
+ 2023-02-22 15:05:16,169 32k INFO Saving model and optimizer state at iteration 21 to ./logs\32k\G_2000.pth
69
+ 2023-02-22 15:05:35,247 32k INFO Saving model and optimizer state at iteration 21 to ./logs\32k\D_2000.pth
70
+ 2023-02-22 15:05:50,320 32k INFO ====> Epoch: 21
71
+ 2023-02-22 15:07:22,122 32k INFO ====> Epoch: 22
72
+ 2023-02-22 15:08:48,724 32k INFO Train Epoch: 23 [92%]
73
+ 2023-02-22 15:08:48,724 32k INFO [2.5126163959503174, 2.3070528507232666, 11.5335693359375, 21.152307510375977, 1.1387696266174316, 2200, 9.972536063689719e-05]
74
+ 2023-02-22 15:08:54,325 32k INFO ====> Epoch: 23
75
+ 2023-02-22 15:10:26,193 32k INFO ====> Epoch: 24
76
+ 2023-02-22 15:11:58,042 32k INFO ====> Epoch: 25
77
+ 2023-02-22 15:12:19,978 32k INFO Train Epoch: 26 [0%]
78
+ 2023-02-22 15:12:19,978 32k INFO [2.566260814666748, 2.118185520172119, 8.801782608032227, 16.81813621520996, 0.8712365627288818, 2400, 9.968796830108985e-05]
79
+ 2023-02-22 15:13:30,299 32k INFO ====> Epoch: 26
80
+ 2023-02-22 15:15:02,065 32k INFO ====> Epoch: 27
81
+ 2023-02-22 15:15:29,847 32k INFO Train Epoch: 28 [8%]
82
+ 2023-02-22 15:15:29,847 32k INFO [2.5252060890197754, 2.0401153564453125, 10.247071266174316, 18.943756103515625, 1.3569800853729248, 2600, 9.966304786663908e-05]
83
+ 2023-02-22 15:16:34,164 32k INFO ====> Epoch: 28
84
+ 2023-02-22 15:18:06,132 32k INFO ====> Epoch: 29
85
+ 2023-02-22 15:18:39,775 32k INFO Train Epoch: 30 [17%]
86
+ 2023-02-22 15:18:39,775 32k INFO [2.5057239532470703, 2.326035976409912, 11.950654983520508, 22.501440048217773, 1.2382237911224365, 2800, 9.963813366190753e-05]
87
+ 2023-02-22 15:19:38,222 32k INFO ====> Epoch: 30
88
+ 2023-02-22 15:21:10,107 32k INFO ====> Epoch: 31
89
+ 2023-02-22 15:21:49,589 32k INFO Train Epoch: 32 [25%]
90
+ 2023-02-22 15:21:49,590 32k INFO [2.2636756896972656, 2.373335123062134, 11.810818672180176, 19.981029510498047, 0.858932614326477, 3000, 9.961322568533789e-05]
91
+ 2023-02-22 15:21:54,053 32k INFO Saving model and optimizer state at iteration 32 to ./logs\32k\G_3000.pth
92
+ 2023-02-22 15:22:11,823 32k INFO Saving model and optimizer state at iteration 32 to ./logs\32k\D_3000.pth
93
+ 2023-02-22 15:23:07,665 32k INFO ====> Epoch: 32
94
+ 2023-02-22 15:24:39,609 32k INFO ====> Epoch: 33
95
+ 2023-02-22 15:25:25,049 32k INFO Train Epoch: 34 [33%]
96
+ 2023-02-22 15:25:25,050 32k INFO [2.5494279861450195, 2.0830795764923096, 6.602403163909912, 10.558989524841309, 1.0258899927139282, 3200, 9.95883239353732e-05]
97
+ 2023-02-22 15:26:11,828 32k INFO ====> Epoch: 34
98
+ 2023-02-22 15:27:43,763 32k INFO ====> Epoch: 35
99
+ 2023-02-22 15:28:35,139 32k INFO Train Epoch: 36 [42%]
100
+ 2023-02-22 15:28:35,139 32k INFO [2.4735586643218994, 2.1442575454711914, 10.77430248260498, 18.345321655273438, 0.9167714715003967, 3400, 9.956342841045691e-05]
101
+ 2023-02-22 15:29:16,168 32k INFO ====> Epoch: 36
102
+ 2023-02-22 15:30:48,154 32k INFO ====> Epoch: 37
103
+ 2023-02-22 15:31:45,388 32k INFO Train Epoch: 38 [50%]
104
+ 2023-02-22 15:31:45,388 32k INFO [2.1438865661621094, 2.6590867042541504, 13.766039848327637, 20.467525482177734, 1.1156392097473145, 3600, 9.953853910903285e-05]
105
+ 2023-02-22 15:32:20,434 32k INFO ====> Epoch: 38
106
+ 2023-02-22 15:33:52,419 32k INFO ====> Epoch: 39
107
+ 2023-02-22 15:34:55,633 32k INFO Train Epoch: 40 [58%]
108
+ 2023-02-22 15:34:55,634 32k INFO [2.2264719009399414, 2.578108549118042, 13.543416023254395, 20.043352127075195, 1.2697389125823975, 3800, 9.951365602954526e-05]
109
+ 2023-02-22 15:35:24,767 32k INFO ====> Epoch: 40
110
+ 2023-02-22 15:36:56,696 32k INFO ====> Epoch: 41
111
+ 2023-02-22 15:38:05,727 32k INFO Train Epoch: 42 [67%]
112
+ 2023-02-22 15:38:05,727 32k INFO [2.699327230453491, 2.1295037269592285, 6.064660549163818, 14.428650856018066, 1.158591628074646, 4000, 9.948877917043875e-05]
113
+ 2023-02-22 15:38:10,077 32k INFO Saving model and optimizer state at iteration 42 to ./logs\32k\G_4000.pth
114
+ 2023-02-22 15:38:26,694 32k INFO Saving model and optimizer state at iteration 42 to ./logs\32k\D_4000.pth
115
+ 2023-02-22 15:38:53,288 32k INFO ====> Epoch: 42
116
+ 2023-02-22 15:40:25,213 32k INFO ====> Epoch: 43
117
+ 2023-02-22 15:41:40,124 32k INFO Train Epoch: 44 [75%]
118
+ 2023-02-22 15:41:40,125 32k INFO [2.5202181339263916, 2.263179063796997, 8.506352424621582, 14.219698905944824, 1.1474385261535645, 4200, 9.94639085301583e-05]
119
+ 2023-02-22 15:41:57,522 32k INFO ====> Epoch: 44
120
+ 2023-02-22 15:43:29,518 32k INFO ====> Epoch: 45
121
+ 2023-02-22 15:44:50,330 32k INFO Train Epoch: 46 [83%]
122
+ 2023-02-22 15:44:50,331 32k INFO [2.5390665531158447, 2.3581087589263916, 10.267776489257812, 18.579225540161133, 0.5731378197669983, 4400, 9.943904410714931e-05]
123
+ 2023-02-22 15:45:01,966 32k INFO ====> Epoch: 46
124
+ 2023-02-22 15:46:34,021 32k INFO ====> Epoch: 47
125
+ 2023-02-22 15:48:00,727 32k INFO Train Epoch: 48 [92%]
126
+ 2023-02-22 15:48:00,727 32k INFO [2.515454053878784, 2.529177188873291, 9.766312599182129, 17.621761322021484, 0.8474573493003845, 4600, 9.941418589985758e-05]
127
+ 2023-02-22 15:48:06,337 32k INFO ====> Epoch: 48
128
+ 2023-02-22 15:49:38,320 32k INFO ====> Epoch: 49
129
+ 2023-02-22 15:51:10,201 32k INFO ====> Epoch: 50
130
+ 2023-02-22 15:51:32,250 32k INFO Train Epoch: 51 [0%]
131
+ 2023-02-22 15:51:32,251 32k INFO [2.3931424617767334, 2.356489419937134, 9.818400382995605, 17.91476821899414, 0.9206458926200867, 4800, 9.937691023999092e-05]
132
+ 2023-02-22 15:52:42,572 32k INFO ====> Epoch: 51
133
+ 2023-02-22 15:54:14,584 32k INFO ====> Epoch: 52
134
+ 2023-02-22 15:54:42,402 32k INFO Train Epoch: 53 [8%]
135
+ 2023-02-22 15:54:42,403 32k INFO [2.62162184715271, 2.1928069591522217, 7.006716251373291, 12.615202903747559, 1.3347645998001099, 5000, 9.935206756519513e-05]
136
+ 2023-02-22 15:54:46,954 32k INFO Saving model and optimizer state at iteration 53 to ./logs\32k\G_5000.pth
137
+ 2023-02-22 15:55:04,806 32k INFO Saving model and optimizer state at iteration 53 to ./logs\32k\D_5000.pth
138
+ 2023-02-22 15:56:12,484 32k INFO ====> Epoch: 53
139
+ 2023-02-22 15:57:44,459 32k INFO ====> Epoch: 54
140
+ 2023-02-22 15:58:18,146 32k INFO Train Epoch: 55 [17%]
141
+ 2023-02-22 15:58:18,146 32k INFO [2.3371574878692627, 2.5377357006073, 12.064924240112305, 20.312028884887695, 0.8844712376594543, 5200, 9.932723110067987e-05]
142
+ 2023-02-22 15:59:16,702 32k INFO ====> Epoch: 55
143
+ 2023-02-22 16:00:48,683 32k INFO ====> Epoch: 56
144
+ 2023-02-22 16:01:28,287 32k INFO Train Epoch: 57 [25%]
145
+ 2023-02-22 16:01:28,287 32k INFO [2.4237263202667236, 2.382598876953125, 9.18662166595459, 19.957326889038086, 0.973038375377655, 5400, 9.930240084489267e-05]
146
+ 2023-02-22 16:02:21,032 32k INFO ====> Epoch: 57
147
+ 2023-02-22 16:03:53,053 32k INFO ====> Epoch: 58
148
+ 2023-02-22 16:04:38,512 32k INFO Train Epoch: 59 [33%]
149
+ 2023-02-22 16:04:38,513 32k INFO [2.5449366569519043, 2.398916006088257, 6.566847801208496, 14.815089225769043, 0.9515616297721863, 5600, 9.927757679628145e-05]
150
+ 2023-02-22 16:05:25,239 32k INFO ====> Epoch: 59
151
+ 2023-02-22 16:06:57,184 32k INFO ====> Epoch: 60
152
+ 2023-02-22 16:07:48,540 32k INFO Train Epoch: 61 [42%]
153
+ 2023-02-22 16:07:48,541 32k INFO [2.401327610015869, 2.294501781463623, 10.175949096679688, 17.809703826904297, 0.2573431134223938, 5800, 9.92527589532945e-05]
154
+ 2023-02-22 16:08:29,513 32k INFO ====> Epoch: 61
155
+ 2023-02-22 16:10:01,428 32k INFO ====> Epoch: 62
156
+ 2023-02-22 16:10:58,728 32k INFO Train Epoch: 63 [50%]
157
+ 2023-02-22 16:10:58,728 32k INFO [2.3318843841552734, 2.3909454345703125, 14.289694786071777, 23.690601348876953, 0.8774771094322205, 6000, 9.922794731438052e-05]
158
+ 2023-02-22 16:11:03,111 32k INFO Saving model and optimizer state at iteration 63 to ./logs\32k\G_6000.pth
159
+ 2023-02-22 16:11:21,087 32k INFO Saving model and optimizer state at iteration 63 to ./logs\32k\D_6000.pth
160
+ 2023-02-22 16:11:59,465 32k INFO ====> Epoch: 63
161
+ 2023-02-22 16:13:31,525 32k INFO ====> Epoch: 64
162
+ 2023-02-22 16:14:34,657 32k INFO Train Epoch: 65 [58%]
163
+ 2023-02-22 16:14:34,658 32k INFO [2.5927109718322754, 1.9995574951171875, 10.218186378479004, 17.972421646118164, 0.97355717420578, 6200, 9.92031418779886e-05]
164
+ 2023-02-22 16:15:03,864 32k INFO ====> Epoch: 65
165
+ 2023-02-22 16:16:35,954 32k INFO ====> Epoch: 66
166
+ 2023-02-22 16:17:44,886 32k INFO Train Epoch: 67 [67%]
167
+ 2023-02-22 16:17:44,886 32k INFO [2.424114942550659, 2.504781484603882, 8.676969528198242, 19.212587356567383, 0.6396912336349487, 6400, 9.917834264256819e-05]
168
+ 2023-02-22 16:18:08,323 32k INFO ====> Epoch: 67
169
+ 2023-02-22 16:19:40,252 32k INFO ====> Epoch: 68
170
+ 2023-02-22 16:20:55,170 32k INFO Train Epoch: 69 [75%]
171
+ 2023-02-22 16:20:55,170 32k INFO [2.1533024311065674, 3.0644826889038086, 7.629947662353516, 12.209115982055664, 0.5737940669059753, 6600, 9.915354960656915e-05]
172
+ 2023-02-22 16:21:12,576 32k INFO ====> Epoch: 69
173
+ 2023-02-22 16:22:44,491 32k INFO ====> Epoch: 70
174
+ 2023-02-22 16:24:05,302 32k INFO Train Epoch: 71 [83%]
175
+ 2023-02-22 16:24:05,303 32k INFO [2.5728468894958496, 2.255802631378174, 12.372926712036133, 19.72517204284668, 0.4394569396972656, 6800, 9.912876276844171e-05]
176
+ 2023-02-22 16:24:16,802 32k INFO ====> Epoch: 71
177
+ 2023-02-22 16:25:48,812 32k INFO ====> Epoch: 72
178
+ 2023-02-22 16:27:15,565 32k INFO Train Epoch: 73 [92%]
179
+ 2023-02-22 16:27:15,566 32k INFO [2.4174983501434326, 2.583244800567627, 12.01104736328125, 20.709869384765625, 0.8593897223472595, 7000, 9.910398212663652e-05]
180
+ 2023-02-22 16:27:19,933 32k INFO Saving model and optimizer state at iteration 73 to ./logs\32k\G_7000.pth
181
+ 2023-02-22 16:27:36,201 32k INFO Saving model and optimizer state at iteration 73 to ./logs\32k\D_7000.pth
182
+ 2023-02-22 16:27:45,345 32k INFO ====> Epoch: 73
183
+ 2023-02-22 16:29:17,311 32k INFO ====> Epoch: 74
184
+ 2023-02-22 16:30:49,193 32k INFO ====> Epoch: 75
185
+ 2023-02-22 16:31:11,286 32k INFO Train Epoch: 76 [0%]
186
+ 2023-02-22 16:31:11,286 32k INFO [2.4050168991088867, 2.314572334289551, 10.949235916137695, 17.868207931518555, 0.8503013849258423, 7200, 9.906682277864462e-05]
187
+ 2023-02-22 16:32:21,498 32k INFO ====> Epoch: 76
188
+ 2023-02-22 16:33:53,639 32k INFO ====> Epoch: 77
189
+ 2023-02-22 16:34:21,492 32k INFO Train Epoch: 78 [8%]
190
+ 2023-02-22 16:34:21,492 32k INFO [2.33305025100708, 2.5026371479034424, 10.88790225982666, 19.406280517578125, 0.8907907605171204, 7400, 9.904205762086905e-05]
191
+ 2023-02-22 16:35:25,844 32k INFO ====> Epoch: 78
192
+ 2023-02-22 16:36:57,756 32k INFO ====> Epoch: 79
193
+ 2023-02-22 16:37:33,287 32k INFO Train Epoch: 80 [17%]
194
+ 2023-02-22 16:37:33,287 32k INFO [2.3774352073669434, 2.40655517578125, 11.575504302978516, 19.511062622070312, 0.7897243499755859, 7600, 9.901729865399597e-05]
195
+ 2023-02-22 16:38:33,509 32k INFO ====> Epoch: 80
196
+ 2023-02-22 16:40:21,757 32k INFO ====> Epoch: 81
197
+ 2023-02-22 16:41:05,284 32k INFO Train Epoch: 82 [25%]
198
+ 2023-02-22 16:41:05,285 32k INFO [2.507054090499878, 2.5407822132110596, 10.627754211425781, 19.242189407348633, 1.1186254024505615, 7800, 9.899254587647776e-05]
199
+ 2023-02-22 16:42:05,985 32k INFO ====> Epoch: 82
200
+ 2023-02-22 16:43:40,273 32k INFO ====> Epoch: 83
201
+ 2023-02-22 16:44:26,718 32k INFO Train Epoch: 84 [33%]
202
+ 2023-02-22 16:44:26,718 32k INFO [2.372248649597168, 2.335797071456909, 10.346333503723145, 19.8045597076416, 0.8870834708213806, 8000, 9.896779928676716e-05]
203
+ 2023-02-22 16:44:31,079 32k INFO Saving model and optimizer state at iteration 84 to ./logs\32k\G_8000.pth
204
+ 2023-02-22 16:44:47,704 32k INFO Saving model and optimizer state at iteration 84 to ./logs\32k\D_8000.pth
205
+ 2023-02-22 16:45:39,633 32k INFO ====> Epoch: 84
206
+ 2023-02-22 16:47:14,215 32k INFO ====> Epoch: 85
207
+ 2023-02-22 16:48:06,579 32k INFO Train Epoch: 86 [42%]
208
+ 2023-02-22 16:48:06,580 32k INFO [2.71091628074646, 1.917865514755249, 6.362136363983154, 12.128801345825195, 0.7704624533653259, 8200, 9.894305888331732e-05]
209
+ 2023-02-22 16:48:48,895 32k INFO ====> Epoch: 86
210
+ 2023-02-22 16:50:23,435 32k INFO ====> Epoch: 87
211
+ 2023-02-22 16:51:21,956 32k INFO Train Epoch: 88 [50%]
212
+ 2023-02-22 16:51:21,956 32k INFO [2.347158193588257, 2.519123077392578, 13.479973793029785, 21.263261795043945, 1.0561842918395996, 8400, 9.891832466458178e-05]
213
+ 2023-02-22 16:51:58,188 32k INFO ====> Epoch: 88
214
+ 2023-02-22 16:53:32,713 32k INFO ====> Epoch: 89
215
+ 2023-02-22 16:54:37,198 32k INFO Train Epoch: 90 [58%]
216
+ 2023-02-22 16:54:37,198 32k INFO [2.253192901611328, 2.7299721240997314, 10.998817443847656, 17.6474552154541, 0.6397795081138611, 8600, 9.889359662901445e-05]
217
+ 2023-02-22 16:55:07,253 32k INFO ====> Epoch: 90
218
+ 2023-02-22 16:56:41,772 32k INFO ====> Epoch: 91
219
+ 2023-02-22 16:57:52,446 32k INFO Train Epoch: 92 [67%]
220
+ 2023-02-22 16:57:52,446 32k INFO [2.1650004386901855, 2.9927978515625, 12.249756813049316, 19.009756088256836, 0.5815404653549194, 8800, 9.886887477506964e-05]
221
+ 2023-02-22 16:58:16,630 32k INFO ====> Epoch: 92
222
+ 2023-02-22 16:59:51,089 32k INFO ====> Epoch: 93
223
+ 2023-02-22 17:01:07,832 32k INFO Train Epoch: 94 [75%]
224
+ 2023-02-22 17:01:07,832 32k INFO [2.2091586589813232, 2.7259390354156494, 8.377230644226074, 15.831489562988281, 1.1868693828582764, 9000, 9.884415910120204e-05]
225
+ 2023-02-22 17:01:12,237 32k INFO Saving model and optimizer state at iteration 94 to ./logs\32k\G_9000.pth
226
+ 2023-02-22 17:01:29,650 32k INFO Saving model and optimizer state at iteration 94 to ./logs\32k\D_9000.pth
227
+ 2023-02-22 17:01:51,234 32k INFO ====> Epoch: 94
228
+ 2023-02-22 17:03:26,924 32k INFO ====> Epoch: 95
229
+ 2023-02-22 17:04:49,734 32k INFO Train Epoch: 96 [83%]
230
+ 2023-02-22 17:04:49,734 32k INFO [2.2579903602600098, 2.5196046829223633, 11.29547119140625, 19.78851318359375, 0.7442747950553894, 9200, 9.881944960586671e-05]
231
+ 2023-02-22 17:05:01,727 32k INFO ====> Epoch: 96
232
+ 2023-02-22 17:06:36,234 32k INFO ====> Epoch: 97
233
+ 2023-02-22 17:08:05,105 32k INFO Train Epoch: 98 [92%]
234
+ 2023-02-22 17:08:05,106 32k INFO [2.2475061416625977, 2.572089195251465, 12.223832130432129, 19.65192413330078, 1.1417388916015625, 9400, 9.879474628751914e-05]
235
+ 2023-02-22 17:08:10,877 32k INFO ====> Epoch: 98
236
+ 2023-02-22 17:09:45,334 32k INFO ====> Epoch: 99
237
+ 2023-02-22 17:11:19,697 32k INFO ====> Epoch: 100
238
+ 2023-02-22 17:11:41,688 32k INFO Train Epoch: 101 [0%]
239
+ 2023-02-22 17:11:41,688 32k INFO [2.349648952484131, 2.544644594192505, 11.396925926208496, 19.187210083007812, 1.01069974899292, 9600, 9.875770288847208e-05]
240
+ 2023-02-22 17:12:54,275 32k INFO ====> Epoch: 101
241
+ 2023-02-22 17:14:28,617 32k INFO ====> Epoch: 102
242
+ 2023-02-22 17:14:56,638 32k INFO Train Epoch: 103 [8%]
243
+ 2023-02-22 17:14:56,639 32k INFO [2.373283863067627, 2.405756711959839, 11.242820739746094, 19.008630752563477, 0.8255038857460022, 9800, 9.873301500583906e-05]
244
+ 2023-02-22 17:16:03,251 32k INFO ====> Epoch: 103
245
+ 2023-02-22 17:17:37,617 32k INFO ====> Epoch: 104
246
+ 2023-02-22 17:18:11,823 32k INFO Train Epoch: 105 [17%]
247
+ 2023-02-22 17:18:11,823 32k INFO [2.284367084503174, 2.3137102127075195, 11.732841491699219, 19.72892951965332, 0.8187623620033264, 10000, 9.870833329479095e-05]
248
+ 2023-02-22 17:18:16,173 32k INFO Saving model and optimizer state at iteration 105 to ./logs\32k\G_10000.pth
249
+ 2023-02-22 17:18:35,785 32k INFO Saving model and optimizer state at iteration 105 to ./logs\32k\D_10000.pth
250
+ 2023-02-22 17:19:39,979 32k INFO ====> Epoch: 105
251
+ 2023-02-22 17:21:14,353 32k INFO ====> Epoch: 106
252
+ 2023-02-22 17:21:54,670 32k INFO Train Epoch: 107 [25%]
253
+ 2023-02-22 17:21:54,670 32k INFO [2.573239326477051, 2.3250515460968018, 10.18419361114502, 19.073320388793945, 1.2266535758972168, 10200, 9.868365775378495e-05]
254
+ 2023-02-22 17:22:49,113 32k INFO ====> Epoch: 107
255
+ 2023-02-22 17:24:23,459 32k INFO ====> Epoch: 108
256
+ 2023-02-22 17:25:09,774 32k INFO Train Epoch: 109 [33%]
257
+ 2023-02-22 17:25:09,775 32k INFO [2.383540153503418, 2.1890156269073486, 9.045347213745117, 16.33897590637207, 1.013568639755249, 10400, 9.865898838127865e-05]
258
+ 2023-02-22 17:25:58,126 32k INFO ====> Epoch: 109
259
+ 2023-02-22 17:27:32,628 32k INFO ====> Epoch: 110
260
+ 2023-02-22 17:28:24,990 32k INFO Train Epoch: 111 [42%]
261
+ 2023-02-22 17:28:24,990 32k INFO [2.5534281730651855, 2.1356728076934814, 6.74716854095459, 11.141881942749023, 0.9786153435707092, 10600, 9.863432517573002e-05]
262
+ 2023-02-22 17:29:07,375 32k INFO ====> Epoch: 111
263
+ 2023-02-22 17:30:41,812 32k INFO ====> Epoch: 112
264
+ 2023-02-22 17:31:40,243 32k INFO Train Epoch: 113 [50%]
265
+ 2023-02-22 17:31:40,243 32k INFO [2.333615779876709, 2.469421863555908, 11.403718948364258, 18.24078941345215, 0.8930609822273254, 10800, 9.86096681355974e-05]
266
+ 2023-02-22 17:32:16,374 32k INFO ====> Epoch: 113
267
+ 2023-02-22 17:33:50,757 32k INFO ====> Epoch: 114
268
+ 2023-02-22 17:34:55,259 32k INFO Train Epoch: 115 [58%]
269
+ 2023-02-22 17:34:55,260 32k INFO [2.597790241241455, 2.1551156044006348, 10.270503997802734, 16.67560577392578, 1.3888392448425293, 11000, 9.858501725933955e-05]
270
+ 2023-02-22 17:34:59,644 32k INFO Saving model and optimizer state at iteration 115 to ./logs\32k\G_11000.pth
271
+ 2023-02-22 17:35:17,175 32k INFO Saving model and optimizer state at iteration 115 to ./logs\32k\D_11000.pth
272
+ 2023-02-22 17:35:54,062 32k INFO ====> Epoch: 115
273
+ 2023-02-22 20:31:20,713 32k INFO ====> Epoch: 116
274
+ 2023-02-22 20:32:30,093 32k INFO Train Epoch: 117 [67%]
275
+ 2023-02-22 20:32:30,094 32k INFO [2.4348371028900146, 2.0384914875030518, 10.057577133178711, 16.99835205078125, 0.528782844543457, 11200, 9.85603725454156e-05]
276
+ 2023-02-22 20:32:53,867 32k INFO ====> Epoch: 117
277
+ 2023-02-22 20:34:28,874 32k INFO ====> Epoch: 118
278
+ 2023-02-22 20:35:45,963 32k INFO Train Epoch: 119 [75%]
279
+ 2023-02-22 20:35:45,963 32k INFO [2.6818912029266357, 2.2558794021606445, 8.338019371032715, 15.641912460327148, 0.7577197551727295, 11400, 9.853573399228505e-05]
280
+ 2023-02-22 20:36:03,813 32k INFO ====> Epoch: 119
281
+ 2023-02-22 20:37:37,151 32k INFO ====> Epoch: 120
282
+ 2023-02-22 20:39:00,221 32k INFO Train Epoch: 121 [83%]
283
+ 2023-02-22 20:39:00,221 32k INFO [2.2360901832580566, 2.620755910873413, 12.849711418151855, 21.679330825805664, 0.5196329355239868, 11600, 9.851110159840781e-05]
284
+ 2023-02-22 20:39:12,031 32k INFO ====> Epoch: 121
285
+ 2023-02-22 20:40:47,542 32k INFO ====> Epoch: 122
286
+ 2023-02-22 20:42:17,956 32k INFO Train Epoch: 123 [92%]
287
+ 2023-02-22 20:42:17,956 32k INFO [2.399332046508789, 2.413424253463745, 10.66106128692627, 20.046344757080078, 0.6961185336112976, 11800, 9.848647536224416e-05]
288
+ 2023-02-22 20:42:23,805 32k INFO ====> Epoch: 123
289
+ 2023-02-22 20:44:01,725 32k INFO ====> Epoch: 124
290
+ 2023-02-22 20:45:38,751 32k INFO ====> Epoch: 125
291
+ 2023-02-22 20:46:00,717 32k INFO Train Epoch: 126 [0%]
292
+ 2023-02-22 20:46:00,717 32k INFO [2.8306925296783447, 2.608029842376709, 6.2529497146606445, 12.364657402038574, 0.8958999514579773, 12000, 9.84495475503445e-05]
293
+ 2023-02-22 20:46:05,062 32k INFO Saving model and optimizer state at iteration 126 to ./logs\32k\G_12000.pth
294
+ 2023-02-22 20:46:22,876 32k INFO Saving model and optimizer state at iteration 126 to ./logs\32k\D_12000.pth
295
+ 2023-02-22 20:47:49,445 32k INFO ====> Epoch: 126
296
+ 2023-02-22 20:49:39,187 32k INFO ====> Epoch: 127
297
+ 2023-02-22 20:50:07,537 32k INFO Train Epoch: 128 [8%]
298
+ 2023-02-22 20:50:07,538 32k INFO [2.4325919151306152, 2.298684597015381, 8.939014434814453, 12.424883842468262, 0.7497723698616028, 12200, 9.842493670173108e-05]
299
+ 2023-02-22 20:51:19,370 32k INFO ====> Epoch: 128
300
+ 2023-02-22 20:52:52,991 32k INFO ====> Epoch: 129
301
+ 2023-02-22 20:53:26,740 32k INFO Train Epoch: 130 [17%]
302
+ 2023-02-22 20:53:26,740 32k INFO [2.4256911277770996, 2.498311996459961, 11.079325675964355, 18.753610610961914, 0.739328145980835, 12400, 9.840033200544528e-05]
303
+ 2023-02-22 20:54:26,818 32k INFO ====> Epoch: 130
304
+ 2023-02-22 20:56:01,178 32k INFO ====> Epoch: 131
305
+ 2023-02-22 20:56:41,144 32k INFO Train Epoch: 132 [25%]
306
+ 2023-02-22 20:56:41,145 32k INFO [2.4430596828460693, 2.443613052368164, 9.05742359161377, 17.19443702697754, 0.7620078921318054, 12600, 9.837573345994909e-05]
307
+ 2023-02-22 20:57:35,413 32k INFO ====> Epoch: 132
308
+ 2023-02-22 20:59:09,120 32k INFO ====> Epoch: 133
309
+ 2023-02-22 20:59:55,044 32k INFO Train Epoch: 134 [33%]
310
+ 2023-02-22 20:59:55,045 32k INFO [2.505936622619629, 2.3898284435272217, 7.810985088348389, 13.378456115722656, 1.2783924341201782, 12800, 9.835114106370493e-05]
311
+ 2023-02-22 21:00:43,179 32k INFO ====> Epoch: 134
312
+ 2023-02-22 21:02:16,908 32k INFO ====> Epoch: 135
313
+ 2023-02-22 21:03:08,902 32k INFO Train Epoch: 136 [42%]
314
+ 2023-02-22 21:03:08,902 32k INFO [2.4402434825897217, 2.1326770782470703, 10.419414520263672, 17.756248474121094, 0.7173585295677185, 13000, 9.832655481517557e-05]
315
+ 2023-02-22 21:03:13,205 32k INFO Saving model and optimizer state at iteration 136 to ./logs\32k\G_13000.pth
316
+ 2023-02-22 21:03:30,811 32k INFO Saving model and optimizer state at iteration 136 to ./logs\32k\D_13000.pth
317
+ 2023-02-22 21:04:16,487 32k INFO ====> Epoch: 136
318
+ 2023-02-22 21:05:50,181 32k INFO ====> Epoch: 137
319
+ 2023-02-22 21:06:48,153 32k INFO Train Epoch: 138 [50%]
320
+ 2023-02-22 21:06:48,153 32k INFO [2.360225200653076, 2.247342109680176, 11.932147026062012, 19.808679580688477, 0.7805557250976562, 13200, 9.830197471282419e-05]
321
+ 2023-02-22 21:07:24,075 32k INFO ====> Epoch: 138
322
+ 2023-02-22 21:08:57,845 32k INFO ====> Epoch: 139
323
+ 2023-02-22 21:10:01,976 32k INFO Train Epoch: 140 [58%]
324
+ 2023-02-22 21:10:01,977 32k INFO [2.3206355571746826, 2.3782708644866943, 10.206865310668945, 16.233327865600586, 1.0171841382980347, 13400, 9.827740075511432e-05]
325
+ 2023-02-22 21:10:31,906 32k INFO ====> Epoch: 140
326
+ 2023-02-22 21:12:05,629 32k INFO ====> Epoch: 141
327
+ 2023-02-22 21:13:15,714 32k INFO Train Epoch: 142 [67%]
328
+ 2023-02-22 21:13:15,714 32k INFO [2.479469060897827, 2.4788477420806885, 9.181254386901855, 18.939329147338867, 0.5550024509429932, 13600, 9.825283294050992e-05]
329
+ 2023-02-22 21:13:39,694 32k INFO ====> Epoch: 142
330
+ 2023-02-22 21:15:13,374 32k INFO ====> Epoch: 143
331
+ 2023-02-22 21:16:29,548 32k INFO Train Epoch: 144 [75%]
332
+ 2023-02-22 21:16:29,548 32k INFO [2.6104512214660645, 2.257268190383911, 8.792437553405762, 15.681217193603516, 0.7313880324363708, 13800, 9.822827126747529e-05]
333
+ 2023-02-22 21:16:47,390 32k INFO ====> Epoch: 144
334
+ 2023-02-22 21:18:20,999 32k INFO ====> Epoch: 145
335
+ 2023-02-22 21:19:43,366 32k INFO Train Epoch: 146 [83%]
336
+ 2023-02-22 21:19:43,366 32k INFO [2.349297523498535, 2.540470600128174, 9.31801700592041, 17.564865112304688, 0.844805121421814, 14000, 9.820371573447515e-05]
337
+ 2023-02-22 21:19:47,730 32k INFO Saving model and optimizer state at iteration 146 to ./logs\32k\G_14000.pth
338
+ 2023-02-22 21:20:06,766 32k INFO Saving model and optimizer state at iteration 146 to ./logs\32k\D_14000.pth
339
+ 2023-02-22 21:20:22,632 32k INFO ====> Epoch: 146
340
+ 2023-02-22 21:21:56,366 32k INFO ====> Epoch: 147
341
+ 2023-02-22 21:23:24,644 32k INFO Train Epoch: 148 [92%]
342
+ 2023-02-22 21:23:24,644 32k INFO [2.459839344024658, 2.3918144702911377, 10.237080574035645, 19.57539176940918, 0.8234599232673645, 14200, 9.817916633997459e-05]
343
+ 2023-02-22 21:23:30,381 32k INFO ====> Epoch: 148
344
+ 2023-02-22 21:25:04,056 32k INFO ====> Epoch: 149
345
+ 2023-02-22 21:26:43,871 32k INFO ====> Epoch: 150
346
+ 2023-02-22 21:27:05,974 32k INFO Train Epoch: 151 [0%]
347
+ 2023-02-22 21:27:05,974 32k INFO [2.3207051753997803, 2.562465190887451, 11.952434539794922, 18.7629337310791, 0.7440965175628662, 14400, 9.814235375455375e-05]
348
+ 2023-02-22 21:28:21,242 32k INFO ====> Epoch: 151
349
+ 2023-02-22 21:29:56,786 32k INFO ====> Epoch: 152
350
+ 2023-02-22 21:30:24,915 32k INFO Train Epoch: 153 [8%]
351
+ 2023-02-22 21:30:24,915 32k INFO [2.3596057891845703, 2.2290072441101074, 12.462300300598145, 16.245037078857422, 0.7593880891799927, 14600, 9.811781969958938e-05]
352
+ 2023-02-22 21:31:32,555 32k INFO ====> Epoch: 153
353
+ 2023-02-22 21:33:08,028 32k INFO ====> Epoch: 154
354
+ 2023-02-22 21:33:42,295 32k INFO Train Epoch: 155 [17%]
355
+ 2023-02-22 21:33:42,295 32k INFO [2.2800114154815674, 2.362840175628662, 14.557291030883789, 21.61530876159668, 0.5335739254951477, 14800, 9.809329177775541e-05]
356
+ 2023-02-22 21:34:43,650 32k INFO ====> Epoch: 155
357
+ 2023-02-22 21:36:27,381 32k INFO ====> Epoch: 156
358
+ 2023-02-22 21:37:10,436 32k INFO Train Epoch: 157 [25%]
359
+ 2023-02-22 21:37:10,436 32k INFO [2.6323020458221436, 2.181378126144409, 8.393657684326172, 14.980151176452637, 0.9145171046257019, 15000, 9.806876998751865e-05]
360
+ 2023-02-22 21:37:14,832 32k INFO Saving model and optimizer state at iteration 157 to ./logs\32k\G_15000.pth
361
+ 2023-02-22 21:37:33,451 32k INFO Saving model and optimizer state at iteration 157 to ./logs\32k\D_15000.pth
362
+ 2023-02-22 21:38:35,506 32k INFO ====> Epoch: 157
44k-V4.0-luna/D_12800.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5fa048f50b284781bbc569947e8eeb52480f3370810f525971b44a92e3d23bec
3
+ size 561098185
44k-V4.0-luna/D_128000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:42edfd73d886429819d2d199f69df36feba967ab3f369306cf5fee6313c1a717
3
+ size 561099143
44k-V4.0-luna/D_188800.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2137437dd953da66fa55ca5411a0915289f1d2495e97a1f83fa4129226a1dd30
3
+ size 561099143
44k-V4.0-luna/D_20000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:262572034aca7b6a4e7c2fc1b9100c7cce2d1b1ef38fb85e3a2e7dd93d99f4b4
3
+ size 561098185
44k-V4.0-luna/D_289600.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c47dfa9e34b25759fd77b674b750ea52df060ec8ab183e1ef96f0f61d846b3ca
3
+ size 561099143
44k-V4.0-luna/G_12800.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:76577a34ac0896082e6c9c3c74210f7f7404cab1494b43f0563db6b030675c60
3
+ size 542789405
44k-V4.0-luna/G_128000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8004bdc1152cc7ac09d312e74db052053503925df02b6b51de4e3f53d6ab7ccc
3
+ size 542792859
44k-V4.0-luna/G_188800.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:36f9ba3851ec07310225123cbcf77f6dce5cd122bbf1de6eb078f7b463c43104
3
+ size 542792859
44k-V4.0-luna/G_20000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:44689b6b11807edcce50ebfec8b08c44cf8cebbd470568d3ad445f18470d6386
3
+ size 542789405
44k-V4.0-luna/G_289600.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cba65851c6e00a6b8b3e055db0954abcb4a87b2361b1b2cd2e9a3cd2ba4a5f8c
3
+ size 542792859
44k-V4.0-luna/config.json ADDED
@@ -0,0 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "train": {
3
+ "log_interval": 200,
4
+ "eval_interval": 800,
5
+ "seed": 1234,
6
+ "epochs": 10000,
7
+ "learning_rate": 0.0001,
8
+ "betas": [
9
+ 0.8,
10
+ 0.99
11
+ ],
12
+ "eps": 1e-09,
13
+ "batch_size": 6,
14
+ "fp16_run": false,
15
+ "lr_decay": 0.999875,
16
+ "segment_size": 10240,
17
+ "init_lr_ratio": 1,
18
+ "warmup_epochs": 0,
19
+ "c_mel": 45,
20
+ "c_kl": 1.0,
21
+ "use_sr": true,
22
+ "max_speclen": 512,
23
+ "port": "8001",
24
+ "keep_ckpts": 0
25
+ },
26
+ "data": {
27
+ "training_files": "filelists/train.txt",
28
+ "validation_files": "filelists/val.txt",
29
+ "max_wav_value": 32768.0,
30
+ "sampling_rate": 44100,
31
+ "filter_length": 2048,
32
+ "hop_length": 512,
33
+ "win_length": 2048,
34
+ "n_mel_channels": 80,
35
+ "mel_fmin": 0.0,
36
+ "mel_fmax": 22050
37
+ },
38
+ "model": {
39
+ "inter_channels": 192,
40
+ "hidden_channels": 192,
41
+ "filter_channels": 768,
42
+ "n_heads": 2,
43
+ "n_layers": 6,
44
+ "kernel_size": 3,
45
+ "p_dropout": 0.1,
46
+ "resblock": "1",
47
+ "resblock_kernel_sizes": [
48
+ 3,
49
+ 7,
50
+ 11
51
+ ],
52
+ "resblock_dilation_sizes": [
53
+ [
54
+ 1,
55
+ 3,
56
+ 5
57
+ ],
58
+ [
59
+ 1,
60
+ 3,
61
+ 5
62
+ ],
63
+ [
64
+ 1,
65
+ 3,
66
+ 5
67
+ ]
68
+ ],
69
+ "upsample_rates": [
70
+ 8,
71
+ 8,
72
+ 2,
73
+ 2,
74
+ 2
75
+ ],
76
+ "upsample_initial_channel": 512,
77
+ "upsample_kernel_sizes": [
78
+ 16,
79
+ 16,
80
+ 4,
81
+ 4,
82
+ 4
83
+ ],
84
+ "n_layers_q": 3,
85
+ "use_spectral_norm": false,
86
+ "gin_channels": 256,
87
+ "ssl_dim": 256,
88
+ "n_speakers": 200
89
+ },
90
+ "spk": {
91
+ "luna": 0
92
+ }
93
+ }
44k-V4.0-luna/train.log ADDED
The diff for this file is too large to render. See raw diff