{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "True\n", "1\n", "NVIDIA GeForce GTX 1650 Ti\n", "0\n" ] } ], "source": [ "import torch\n", "from exp.exp_long_term_forecasting import Exp_Long_Term_Forecast\n", "from utils.print_args import print_args\n", "import argparse\n", "import random\n", "import numpy as np\n", "\n", "print(torch.cuda.is_available())\t\t\n", "print(torch.cuda.device_count()) \t\t\n", "print(torch.cuda.get_device_name()) \t\n", "print(torch.cuda.current_device())\t\t" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [], "source": [ "def run_experiment(task_name, is_training, model_id, model, data, root_path, data_path, features, target, freq, checkpoints,\n", " seq_len, label_len, pred_len, seasonal_patterns, inverse, mask_rate, anomaly_ratio, top_k, num_kernels,\n", " enc_in, dec_in, c_out, d_model, n_heads, e_layers, d_layers, d_ff, moving_avg, factor, distil, dropout,\n", " embed, activation, output_attention, channel_independence, num_workers, itr, train_epochs, batch_size,\n", " patience, learning_rate, des, loss, lradj, use_amp, use_gpu, gpu, use_multi_gpu, devices, p_hidden_dims,\n", " p_hidden_layers):\n", " fix_seed = 2021\n", " random.seed(fix_seed)\n", " torch.manual_seed(fix_seed)\n", " np.random.seed(fix_seed)\n", "\n", " args = argparse.Namespace(\n", " task_name=task_name,\n", " is_training=is_training,\n", " model_id=model_id,\n", " model=model,\n", " data=data,\n", " root_path=root_path,\n", " data_path=data_path,\n", " features=features,\n", " target=target,\n", " freq=freq,\n", " checkpoints=checkpoints,\n", " seq_len=seq_len,\n", " label_len=label_len,\n", " pred_len=pred_len,\n", " seasonal_patterns=seasonal_patterns,\n", " inverse=inverse,\n", " mask_rate=mask_rate,\n", " anomaly_ratio=anomaly_ratio,\n", " top_k=top_k,\n", " num_kernels=num_kernels,\n", " enc_in=enc_in,\n", " dec_in=dec_in,\n", " c_out=c_out,\n", " d_model=d_model,\n", " n_heads=n_heads,\n", " e_layers=e_layers,\n", " d_layers=d_layers,\n", " d_ff=d_ff,\n", " moving_avg=moving_avg,\n", " factor=factor,\n", " distil=distil,\n", " dropout=dropout,\n", " embed=embed,\n", " activation=activation,\n", " output_attention=output_attention,\n", " channel_independence=channel_independence,\n", " num_workers=num_workers,\n", " itr=itr,\n", " train_epochs=train_epochs,\n", " batch_size=batch_size,\n", " patience=patience,\n", " learning_rate=learning_rate,\n", " des=des,\n", " loss=loss,\n", " lradj=lradj,\n", " use_amp=use_amp,\n", " use_gpu=use_gpu,\n", " gpu=gpu,\n", " use_multi_gpu=use_multi_gpu,\n", " devices=devices,\n", " p_hidden_dims=p_hidden_dims,\n", " p_hidden_layers=p_hidden_layers\n", " )\n", "\n", " args.use_gpu = True if torch.cuda.is_available() and args.use_gpu else False\n", "\n", " if args.use_gpu and args.use_multi_gpu:\n", " args.devices = args.devices.replace(' ', '')\n", " device_ids = args.devices.split(',')\n", " args.device_ids = [int(id_) for id_ in device_ids]\n", " args.gpu = args.device_ids[0]\n", "\n", " print('Args in experiment:')\n", " print_args(args)\n", "\n", " # Map task_name to the corresponding experiment class\n", " task_to_exp = {\n", " 'long_term_forecast': Exp_Long_Term_Forecast\n", " }\n", " Exp = task_to_exp.get(task_name, Exp_Long_Term_Forecast)\n", "\n", " if args.is_training:\n", " for ii in range(args.itr):\n", " # setting record of experiments\n", " exp = Exp(args) # set experiments\n", " setting = f'{task_name}_{model_id}_{model}_{data}_ft{features}_sl{seq_len}_ll{label_len}_pl{pred_len}_dm{d_model}_nh{n_heads}_el{e_layers}_dl{d_layers}_df{d_ff}_fc{factor}_eb{embed}_dt{distil}_{des}_{ii}'\n", "\n", " print(f'>>>>>>>start training : {setting}>>>>>>>>>>>>>>>>>>>>>>>>>>')\n", " exp.train(setting)\n", "\n", " print(f'>>>>>>>testing : {setting}<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<')\n", " exp.test(setting)\n", " torch.cuda.empty_cache()\n", " else:\n", " ii = 0\n", " setting = f'{task_name}_{model_id}_{model}_{data}_ft{features}_sl{seq_len}_ll{label_len}_pl{pred_len}_dm{d_model}_nh{n_heads}_el{e_layers}_dl{d_layers}_df{d_ff}_fc{factor}_eb{embed}_dt{distil}_{des}_{ii}'\n", "\n", " exp = Exp(args) # set experiments\n", " print(f'>>>>>>>testing : {setting}<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<')\n", " exp.test(setting, test=1)\n", " torch.cuda.empty_cache()" ] }, { "cell_type": "code", "execution_count": 12, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Args in experiment:\n", "\u001b[1mBasic Config\u001b[0m\n", " Task Name: long_term_forecast Is Training: 1 \n", " Model ID: weather_96_96 Model: iTransformer \n", "\n", "\u001b[1mData Loader\u001b[0m\n", " Data: custom Root Path: ./dataset/ \n", " Data Path: UBB_weather_jan2008_may2023_cleaned.csvFeatures: M \n", " Target: T(degC) Freq: h \n", " Checkpoints: ./checkpoints/ \n", "\n", "\u001b[1mForecasting Task\u001b[0m\n", " Seq Len: 96 Label Len: 48 \n", " Pred Len: 96 Seasonal Patterns: Yearly \n", " Inverse: 0 \n", "\n", "\u001b[1mModel Parameters\u001b[0m\n", " Top k: 5 Num Kernels: 6 \n", " Enc In: 21 Dec In: 21 \n", " C Out: 21 d model: 512 \n", " n heads: 8 e layers: 3 \n", " d layers: 1 d FF: 512 \n", " Moving Avg: 25 Factor: 3 \n", " Distil: 1 Dropout: 0.1 \n", " Embed: timeF Activation: gelu \n", " Output Attention: 0 \n", "\n", "\u001b[1mRun Parameters\u001b[0m\n", " Num Workers: 10 Itr: 1 \n", " Train Epochs: 10 Batch Size: 32 \n", " Patience: 3 Learning Rate: 0.0001 \n", " Des: Exp Loss: MSE \n", " Lradj: type1 Use Amp: 0 \n", "\n", "\u001b[1mGPU\u001b[0m\n", " Use GPU: 1 GPU: 0 \n", " Use Multi GPU: 0 Devices: 0,1,2,3 \n", "\n", "\u001b[1mDe-stationary Projector Params\u001b[0m\n", " P Hidden Dims: 128, 128 P Hidden Layers: 2 \n", "\n", "Use GPU: cuda:0\n", ">>>>>>>start training : long_term_forecast_weather_96_96_iTransformer_custom_ftM_sl96_ll48_pl96_dm512_nh8_el3_dl1_df512_fc3_ebtimeF_dtTrue_Exp_0>>>>>>>>>>>>>>>>>>>>>>>>>>\n", "train 97382\n", "val 13845\n", "test 27783\n", "\titers: 100, epoch: 1 | loss: 0.5936559\n", "\tspeed: 0.5555s/iter; left time: 16848.5485s\n", "\titers: 200, epoch: 1 | loss: 0.4642630\n", "\tspeed: 0.0207s/iter; left time: 625.0950s\n", "\titers: 300, epoch: 1 | loss: 0.6176420\n", "\tspeed: 0.0209s/iter; left time: 630.2278s\n", "\titers: 400, epoch: 1 | loss: 0.7134495\n", "\tspeed: 0.0209s/iter; left time: 628.9690s\n", "\titers: 500, epoch: 1 | loss: 0.7800292\n", "\tspeed: 0.0210s/iter; left time: 628.5194s\n", "\titers: 600, epoch: 1 | loss: 0.5186352\n", "\tspeed: 0.0209s/iter; left time: 624.0082s\n", "\titers: 700, epoch: 1 | loss: 0.7754697\n", "\tspeed: 0.0208s/iter; left time: 618.7988s\n", "\titers: 800, epoch: 1 | loss: 0.7751970\n", "\tspeed: 0.0209s/iter; left time: 618.1029s\n", "\titers: 900, epoch: 1 | loss: 0.6297045\n", "\tspeed: 0.0209s/iter; left time: 618.4241s\n", "\titers: 1000, epoch: 1 | loss: 0.7198268\n", "\tspeed: 0.0209s/iter; left time: 613.6612s\n", "\titers: 1100, epoch: 1 | loss: 0.5327927\n", "\tspeed: 0.0209s/iter; left time: 612.4658s\n", "\titers: 1200, epoch: 1 | loss: 0.6536652\n", "\tspeed: 0.0210s/iter; left time: 614.1852s\n", "\titers: 1300, epoch: 1 | loss: 0.4536501\n", "\tspeed: 0.0209s/iter; left time: 609.8295s\n", "\titers: 1400, epoch: 1 | loss: 0.5372092\n", "\tspeed: 0.0209s/iter; left time: 607.6192s\n", "\titers: 1500, epoch: 1 | loss: 0.6050881\n", "\tspeed: 0.0210s/iter; left time: 608.2530s\n", "\titers: 1600, epoch: 1 | loss: 0.5191691\n", "\tspeed: 0.0209s/iter; left time: 603.6556s\n", "\titers: 1700, epoch: 1 | loss: 1.2809647\n", "\tspeed: 0.0209s/iter; left time: 600.0178s\n", "\titers: 1800, epoch: 1 | loss: 0.4646869\n", "\tspeed: 0.0210s/iter; left time: 600.3948s\n", "\titers: 1900, epoch: 1 | loss: 0.5571308\n", "\tspeed: 0.0209s/iter; left time: 596.6669s\n", "\titers: 2000, epoch: 1 | loss: 0.6021132\n", "\tspeed: 0.0210s/iter; left time: 597.3279s\n", "\titers: 2100, epoch: 1 | loss: 0.4441825\n", "\tspeed: 0.0208s/iter; left time: 590.4537s\n", "\titers: 2200, epoch: 1 | loss: 0.5600955\n", "\tspeed: 0.0210s/iter; left time: 591.9178s\n", "\titers: 2300, epoch: 1 | loss: 0.5569951\n", "\tspeed: 0.0210s/iter; left time: 590.2396s\n", "\titers: 2400, epoch: 1 | loss: 0.9510192\n", "\tspeed: 0.0209s/iter; left time: 587.1312s\n", "\titers: 2500, epoch: 1 | loss: 0.6509035\n", "\tspeed: 0.0211s/iter; left time: 588.6173s\n", "\titers: 2600, epoch: 1 | loss: 0.5674310\n", "\tspeed: 0.0211s/iter; left time: 586.0627s\n", "\titers: 2700, epoch: 1 | loss: 0.7384115\n", "\tspeed: 0.0210s/iter; left time: 582.7360s\n", "\titers: 2800, epoch: 1 | loss: 0.5617262\n", "\tspeed: 0.0210s/iter; left time: 580.3314s\n", "\titers: 2900, epoch: 1 | loss: 0.6220732\n", "\tspeed: 0.0212s/iter; left time: 584.3804s\n", "\titers: 3000, epoch: 1 | loss: 0.4869988\n", "\tspeed: 0.0212s/iter; left time: 580.4846s\n", "Epoch: 1 cost time: 117.79116916656494\n", "Epoch: 1, Steps: 3043 | Train Loss: 0.6648432 Vali Loss: 0.5661516 Test Loss: 0.5663692\n", "Validation loss decreased (inf --> 0.566152). Saving model ...\n", "Updating learning rate to 0.0001\n", "\titers: 100, epoch: 2 | loss: 0.5689440\n", "\tspeed: 2.9596s/iter; left time: 80760.5058s\n", "\titers: 200, epoch: 2 | loss: 0.4005603\n", "\tspeed: 0.0207s/iter; left time: 562.6363s\n", "\titers: 300, epoch: 2 | loss: 0.4669296\n", "\tspeed: 0.0209s/iter; left time: 564.9786s\n", "\titers: 400, epoch: 2 | loss: 0.4520299\n", "\tspeed: 0.0208s/iter; left time: 560.6172s\n", "\titers: 500, epoch: 2 | loss: 0.7590920\n", "\tspeed: 0.0209s/iter; left time: 560.9001s\n", "\titers: 600, epoch: 2 | loss: 0.6386052\n", "\tspeed: 0.0208s/iter; left time: 556.5773s\n", "\titers: 700, epoch: 2 | loss: 0.5778190\n", "\tspeed: 0.0212s/iter; left time: 564.4835s\n", "\titers: 800, epoch: 2 | loss: 0.5623382\n", "\tspeed: 0.0210s/iter; left time: 559.0215s\n", "\titers: 900, epoch: 2 | loss: 0.7266727\n", "\tspeed: 0.0209s/iter; left time: 554.2205s\n", "\titers: 1000, epoch: 2 | loss: 0.7285644\n", "\tspeed: 0.0209s/iter; left time: 550.2931s\n", "\titers: 1100, epoch: 2 | loss: 0.8691047\n", "\tspeed: 0.0210s/iter; left time: 553.1770s\n", "\titers: 1200, epoch: 2 | loss: 0.6328333\n", "\tspeed: 0.0208s/iter; left time: 545.8935s\n", "\titers: 1300, epoch: 2 | loss: 0.4483800\n", "\tspeed: 0.0212s/iter; left time: 553.0334s\n", "\titers: 1400, epoch: 2 | loss: 0.5846487\n", "\tspeed: 0.0216s/iter; left time: 560.5742s\n", "\titers: 1500, epoch: 2 | loss: 0.5472016\n", "\tspeed: 0.0210s/iter; left time: 544.3237s\n", "\titers: 1600, epoch: 2 | loss: 0.6267307\n", "\tspeed: 0.0209s/iter; left time: 539.9525s\n", "\titers: 1700, epoch: 2 | loss: 0.5278637\n", "\tspeed: 0.0211s/iter; left time: 542.6012s\n", "\titers: 1800, epoch: 2 | loss: 0.6313742\n", "\tspeed: 0.0210s/iter; left time: 537.5675s\n", "\titers: 1900, epoch: 2 | loss: 1.0085171\n", "\tspeed: 0.0210s/iter; left time: 535.4151s\n", "\titers: 2000, epoch: 2 | loss: 0.6759319\n", "\tspeed: 0.0209s/iter; left time: 531.3173s\n", "\titers: 2100, epoch: 2 | loss: 0.8794027\n", "\tspeed: 0.0210s/iter; left time: 531.0814s\n", "\titers: 2200, epoch: 2 | loss: 0.4398144\n", "\tspeed: 0.0210s/iter; left time: 528.3802s\n", "\titers: 2300, epoch: 2 | loss: 0.5152415\n", "\tspeed: 0.0209s/iter; left time: 525.0045s\n", "\titers: 2400, epoch: 2 | loss: 0.7516017\n", "\tspeed: 0.0210s/iter; left time: 524.6319s\n", "\titers: 2500, epoch: 2 | loss: 0.5357326\n", "\tspeed: 0.0210s/iter; left time: 521.9605s\n", "\titers: 2600, epoch: 2 | loss: 0.5535575\n", "\tspeed: 0.0211s/iter; left time: 522.4071s\n", "\titers: 2700, epoch: 2 | loss: 0.7575374\n", "\tspeed: 0.0210s/iter; left time: 518.0417s\n", "\titers: 2800, epoch: 2 | loss: 0.4881577\n", "\tspeed: 0.0211s/iter; left time: 518.8254s\n", "\titers: 2900, epoch: 2 | loss: 0.5703858\n", "\tspeed: 0.0211s/iter; left time: 515.7224s\n", "\titers: 3000, epoch: 2 | loss: 0.4413440\n", "\tspeed: 0.0211s/iter; left time: 514.2995s\n", "Epoch: 2 cost time: 110.59458589553833\n", "Epoch: 2, Steps: 3043 | Train Loss: 0.6408427 Vali Loss: 0.5647771 Test Loss: 0.5661147\n", "Validation loss decreased (0.566152 --> 0.564777). Saving model ...\n", "Updating learning rate to 5e-05\n", "\titers: 100, epoch: 3 | loss: 0.6696136\n", "\tspeed: 2.8933s/iter; left time: 70148.3740s\n", "\titers: 200, epoch: 3 | loss: 0.6296551\n", "\tspeed: 0.0208s/iter; left time: 502.0992s\n", "\titers: 300, epoch: 3 | loss: 0.5613220\n", "\tspeed: 0.0208s/iter; left time: 501.1569s\n", "\titers: 400, epoch: 3 | loss: 0.6057032\n", "\tspeed: 0.0209s/iter; left time: 500.0316s\n", "\titers: 500, epoch: 3 | loss: 0.5127991\n", "\tspeed: 0.0208s/iter; left time: 496.6682s\n", "\titers: 600, epoch: 3 | loss: 0.6669347\n", "\tspeed: 0.0209s/iter; left time: 495.2880s\n", "\titers: 700, epoch: 3 | loss: 0.4469402\n", "\tspeed: 0.0209s/iter; left time: 494.8161s\n", "\titers: 800, epoch: 3 | loss: 0.4618206\n", "\tspeed: 0.0211s/iter; left time: 497.2289s\n", "\titers: 900, epoch: 3 | loss: 0.5419515\n", "\tspeed: 0.0210s/iter; left time: 491.9180s\n", "\titers: 1000, epoch: 3 | loss: 0.4739613\n", "\tspeed: 0.0209s/iter; left time: 489.0371s\n", "\titers: 1100, epoch: 3 | loss: 1.1039358\n", "\tspeed: 0.0209s/iter; left time: 486.4033s\n", "\titers: 1200, epoch: 3 | loss: 0.5036694\n", "\tspeed: 0.0208s/iter; left time: 481.9038s\n", "\titers: 1300, epoch: 3 | loss: 0.4496817\n", "\tspeed: 0.0209s/iter; left time: 482.5413s\n", "\titers: 1400, epoch: 3 | loss: 0.5365888\n", "\tspeed: 0.0211s/iter; left time: 485.0475s\n", "\titers: 1500, epoch: 3 | loss: 0.8331614\n", "\tspeed: 0.0210s/iter; left time: 479.4608s\n", "\titers: 1600, epoch: 3 | loss: 0.4998763\n", "\tspeed: 0.0209s/iter; left time: 475.7885s\n", "\titers: 1700, epoch: 3 | loss: 0.4907500\n", "\tspeed: 0.0209s/iter; left time: 474.2606s\n", "\titers: 1800, epoch: 3 | loss: 0.6187430\n", "\tspeed: 0.0210s/iter; left time: 473.5931s\n", "\titers: 1900, epoch: 3 | loss: 0.5498903\n", "\tspeed: 0.0210s/iter; left time: 471.3948s\n", "\titers: 2000, epoch: 3 | loss: 0.4807916\n", "\tspeed: 0.0211s/iter; left time: 471.1564s\n", "\titers: 2100, epoch: 3 | loss: 0.6922873\n", "\tspeed: 0.0210s/iter; left time: 467.2739s\n", "\titers: 2200, epoch: 3 | loss: 0.6449802\n", "\tspeed: 0.0210s/iter; left time: 464.5904s\n", "\titers: 2300, epoch: 3 | loss: 0.4853516\n", "\tspeed: 0.0210s/iter; left time: 462.6230s\n", "\titers: 2400, epoch: 3 | loss: 0.8718269\n", "\tspeed: 0.0224s/iter; left time: 492.2234s\n", "\titers: 2500, epoch: 3 | loss: 0.4353878\n", "\tspeed: 0.0218s/iter; left time: 476.1104s\n", "\titers: 2600, epoch: 3 | loss: 0.4804717\n", "\tspeed: 0.0217s/iter; left time: 472.2691s\n", "\titers: 2700, epoch: 3 | loss: 0.5756162\n", "\tspeed: 0.0233s/iter; left time: 503.3379s\n", "\titers: 2800, epoch: 3 | loss: 0.7456284\n", "\tspeed: 0.0218s/iter; left time: 468.9205s\n", "\titers: 2900, epoch: 3 | loss: 0.5476383\n", "\tspeed: 0.0214s/iter; left time: 458.3827s\n", "\titers: 3000, epoch: 3 | loss: 0.5259632\n", "\tspeed: 0.0221s/iter; left time: 471.0426s\n", "Epoch: 3 cost time: 110.35074162483215\n", "Epoch: 3, Steps: 3043 | Train Loss: 0.6258284 Vali Loss: 0.5633367 Test Loss: 0.5647465\n", "Validation loss decreased (0.564777 --> 0.563337). Saving model ...\n", "Updating learning rate to 2.5e-05\n", "\titers: 100, epoch: 4 | loss: 0.7093579\n", "\tspeed: 2.8666s/iter; left time: 60776.8987s\n", "\titers: 200, epoch: 4 | loss: 0.6705186\n", "\tspeed: 0.0208s/iter; left time: 438.5196s\n", "\titers: 300, epoch: 4 | loss: 0.5196757\n", "\tspeed: 0.0208s/iter; left time: 436.7268s\n", "\titers: 400, epoch: 4 | loss: 0.5922917\n", "\tspeed: 0.0208s/iter; left time: 435.1457s\n", "\titers: 500, epoch: 4 | loss: 0.6753014\n", "\tspeed: 0.0208s/iter; left time: 432.3630s\n", "\titers: 600, epoch: 4 | loss: 0.6751527\n", "\tspeed: 0.0208s/iter; left time: 430.7255s\n", "\titers: 700, epoch: 4 | loss: 0.6638355\n", "\tspeed: 0.0209s/iter; left time: 430.1319s\n", "\titers: 800, epoch: 4 | loss: 0.7944902\n", "\tspeed: 0.0210s/iter; left time: 429.6335s\n", "\titers: 900, epoch: 4 | loss: 0.9003332\n", "\tspeed: 0.0210s/iter; left time: 427.9618s\n", "\titers: 1000, epoch: 4 | loss: 0.4967296\n", "\tspeed: 0.0209s/iter; left time: 423.8480s\n", "\titers: 1100, epoch: 4 | loss: 0.5462276\n", "\tspeed: 0.0209s/iter; left time: 422.7743s\n", "\titers: 1200, epoch: 4 | loss: 0.5903513\n", "\tspeed: 0.0210s/iter; left time: 421.1439s\n", "\titers: 1300, epoch: 4 | loss: 0.7473672\n", "\tspeed: 0.0210s/iter; left time: 420.3233s\n", "\titers: 1400, epoch: 4 | loss: 0.5545889\n", "\tspeed: 0.0209s/iter; left time: 416.1266s\n", "\titers: 1500, epoch: 4 | loss: 0.6661657\n", "\tspeed: 0.0210s/iter; left time: 414.9132s\n", "\titers: 1600, epoch: 4 | loss: 0.5496048\n", "\tspeed: 0.0210s/iter; left time: 413.2948s\n", "\titers: 1700, epoch: 4 | loss: 0.8328064\n", "\tspeed: 0.0210s/iter; left time: 411.7954s\n", "\titers: 1800, epoch: 4 | loss: 0.5000949\n", "\tspeed: 0.0210s/iter; left time: 409.6735s\n", "\titers: 1900, epoch: 4 | loss: 0.4978345\n", "\tspeed: 0.0211s/iter; left time: 409.7220s\n", "\titers: 2000, epoch: 4 | loss: 0.7451611\n", "\tspeed: 0.0210s/iter; left time: 405.6191s\n", "\titers: 2100, epoch: 4 | loss: 0.6040910\n", "\tspeed: 0.0210s/iter; left time: 402.8205s\n", "\titers: 2200, epoch: 4 | loss: 0.5337805\n", "\tspeed: 0.0210s/iter; left time: 401.3659s\n", "\titers: 2300, epoch: 4 | loss: 0.6520462\n", "\tspeed: 0.0211s/iter; left time: 399.9942s\n", "\titers: 2400, epoch: 4 | loss: 0.5280023\n", "\tspeed: 0.0210s/iter; left time: 396.6029s\n", "\titers: 2500, epoch: 4 | loss: 0.5544235\n", "\tspeed: 0.0210s/iter; left time: 395.3015s\n", "\titers: 2600, epoch: 4 | loss: 0.5723540\n", "\tspeed: 0.0210s/iter; left time: 392.8696s\n", "\titers: 2700, epoch: 4 | loss: 0.8112493\n", "\tspeed: 0.0210s/iter; left time: 390.2330s\n", "\titers: 2800, epoch: 4 | loss: 0.6890924\n", "\tspeed: 0.0210s/iter; left time: 388.7815s\n", "\titers: 2900, epoch: 4 | loss: 0.5898009\n", "\tspeed: 0.0211s/iter; left time: 388.6449s\n", "\titers: 3000, epoch: 4 | loss: 0.3794389\n", "\tspeed: 0.0211s/iter; left time: 385.6209s\n", "Epoch: 4 cost time: 109.72375655174255\n", "Epoch: 4, Steps: 3043 | Train Loss: 0.6165352 Vali Loss: 0.5638781 Test Loss: 0.5633605\n", "EarlyStopping counter: 1 out of 3\n", "Updating learning rate to 1.25e-05\n", "\titers: 100, epoch: 5 | loss: 1.2997311\n", "\tspeed: 2.9121s/iter; left time: 52880.3173s\n", "\titers: 200, epoch: 5 | loss: 0.4815159\n", "\tspeed: 0.0210s/iter; left time: 379.1491s\n", "\titers: 300, epoch: 5 | loss: 0.6439011\n", "\tspeed: 0.0207s/iter; left time: 372.2544s\n", "\titers: 400, epoch: 5 | loss: 0.7072651\n", "\tspeed: 0.0208s/iter; left time: 372.1090s\n", "\titers: 500, epoch: 5 | loss: 0.6946663\n", "\tspeed: 0.0208s/iter; left time: 368.9618s\n", "\titers: 600, epoch: 5 | loss: 0.4611654\n", "\tspeed: 0.0208s/iter; left time: 367.8793s\n", "\titers: 700, epoch: 5 | loss: 0.5433119\n", "\tspeed: 0.0208s/iter; left time: 365.4074s\n", "\titers: 800, epoch: 5 | loss: 0.6214898\n", "\tspeed: 0.0209s/iter; left time: 364.1723s\n", "\titers: 900, epoch: 5 | loss: 0.5205077\n", "\tspeed: 0.0209s/iter; left time: 362.1572s\n", "\titers: 1000, epoch: 5 | loss: 0.5459392\n", "\tspeed: 0.0209s/iter; left time: 359.9540s\n", "\titers: 1100, epoch: 5 | loss: 0.5517793\n", "\tspeed: 0.0208s/iter; left time: 357.0244s\n", "\titers: 1200, epoch: 5 | loss: 0.5244579\n", "\tspeed: 0.0209s/iter; left time: 355.9635s\n", "\titers: 1300, epoch: 5 | loss: 0.7046168\n", "\tspeed: 0.0209s/iter; left time: 354.1601s\n", "\titers: 1400, epoch: 5 | loss: 0.5522704\n", "\tspeed: 0.0209s/iter; left time: 351.9684s\n", "\titers: 1500, epoch: 5 | loss: 0.8616702\n", "\tspeed: 0.0208s/iter; left time: 348.7932s\n", "\titers: 1600, epoch: 5 | loss: 0.6876750\n", "\tspeed: 0.0209s/iter; left time: 348.2753s\n", "\titers: 1700, epoch: 5 | loss: 0.5252747\n", "\tspeed: 0.0211s/iter; left time: 348.9199s\n", "\titers: 1800, epoch: 5 | loss: 0.5879482\n", "\tspeed: 0.0209s/iter; left time: 344.4041s\n", "\titers: 1900, epoch: 5 | loss: 0.5522950\n", "\tspeed: 0.0210s/iter; left time: 343.0479s\n", "\titers: 2000, epoch: 5 | loss: 0.7783802\n", "\tspeed: 0.0209s/iter; left time: 339.2096s\n", "\titers: 2100, epoch: 5 | loss: 0.5681529\n", "\tspeed: 0.0209s/iter; left time: 337.8327s\n", "\titers: 2200, epoch: 5 | loss: 0.5251057\n", "\tspeed: 0.0211s/iter; left time: 338.8088s\n", "\titers: 2300, epoch: 5 | loss: 0.6210421\n", "\tspeed: 0.0210s/iter; left time: 335.3320s\n", "\titers: 2400, epoch: 5 | loss: 0.5935898\n", "\tspeed: 0.0209s/iter; left time: 331.9647s\n", "\titers: 2500, epoch: 5 | loss: 0.5000907\n", "\tspeed: 0.0210s/iter; left time: 330.7807s\n", "\titers: 2600, epoch: 5 | loss: 0.5918574\n", "\tspeed: 0.0211s/iter; left time: 329.7820s\n", "\titers: 2700, epoch: 5 | loss: 0.5212571\n", "\tspeed: 0.0210s/iter; left time: 327.1265s\n", "\titers: 2800, epoch: 5 | loss: 0.4470948\n", "\tspeed: 0.0210s/iter; left time: 324.3962s\n", "\titers: 2900, epoch: 5 | loss: 0.6216493\n", "\tspeed: 0.0210s/iter; left time: 322.5742s\n", "\titers: 3000, epoch: 5 | loss: 0.8510860\n", "\tspeed: 0.0211s/iter; left time: 321.5503s\n", "Epoch: 5 cost time: 111.23226404190063\n", "Epoch: 5, Steps: 3043 | Train Loss: 0.6111027 Vali Loss: 0.5678769 Test Loss: 0.5678037\n", "EarlyStopping counter: 2 out of 3\n", "Updating learning rate to 6.25e-06\n", "\titers: 100, epoch: 6 | loss: 0.6819092\n", "\tspeed: 2.8766s/iter; left time: 43482.4888s\n", "\titers: 200, epoch: 6 | loss: 0.5901961\n", "\tspeed: 0.0207s/iter; left time: 310.2381s\n", "\titers: 300, epoch: 6 | loss: 0.5482473\n", "\tspeed: 0.0208s/iter; left time: 310.4865s\n", "\titers: 400, epoch: 6 | loss: 0.7099934\n", "\tspeed: 0.0208s/iter; left time: 307.9513s\n", "\titers: 500, epoch: 6 | loss: 0.6683566\n", "\tspeed: 0.0209s/iter; left time: 307.2355s\n", "\titers: 600, epoch: 6 | loss: 0.6771836\n", "\tspeed: 0.0209s/iter; left time: 305.1661s\n", "\titers: 700, epoch: 6 | loss: 0.5648811\n", "\tspeed: 0.0208s/iter; left time: 301.9108s\n", "\titers: 800, epoch: 6 | loss: 0.6837624\n", "\tspeed: 0.0208s/iter; left time: 300.1431s\n", "\titers: 900, epoch: 6 | loss: 0.5332974\n", "\tspeed: 0.0209s/iter; left time: 299.7027s\n", "\titers: 1000, epoch: 6 | loss: 0.8406922\n", "\tspeed: 0.0209s/iter; left time: 297.0402s\n", "\titers: 1100, epoch: 6 | loss: 0.6336458\n", "\tspeed: 0.0209s/iter; left time: 294.6490s\n", "\titers: 1200, epoch: 6 | loss: 0.5651897\n", "\tspeed: 0.0209s/iter; left time: 292.7448s\n", "\titers: 1300, epoch: 6 | loss: 0.4525819\n", "\tspeed: 0.0210s/iter; left time: 291.8446s\n", "\titers: 1400, epoch: 6 | loss: 0.6271230\n", "\tspeed: 0.0209s/iter; left time: 288.3074s\n", "\titers: 1500, epoch: 6 | loss: 0.4739183\n", "\tspeed: 0.0209s/iter; left time: 286.5312s\n", "\titers: 1600, epoch: 6 | loss: 0.6339068\n", "\tspeed: 0.0209s/iter; left time: 285.1650s\n", "\titers: 1700, epoch: 6 | loss: 0.7386739\n", "\tspeed: 0.0209s/iter; left time: 283.1360s\n", "\titers: 1800, epoch: 6 | loss: 0.4672417\n", "\tspeed: 0.0209s/iter; left time: 280.8037s\n", "\titers: 1900, epoch: 6 | loss: 0.4538830\n", "\tspeed: 0.0210s/iter; left time: 279.8680s\n", "\titers: 2000, epoch: 6 | loss: 0.4388819\n", "\tspeed: 0.0210s/iter; left time: 276.9400s\n", "\titers: 2100, epoch: 6 | loss: 0.7156850\n", "\tspeed: 0.0210s/iter; left time: 275.5326s\n", "\titers: 2200, epoch: 6 | loss: 0.7622992\n", "\tspeed: 0.0210s/iter; left time: 272.9326s\n", "\titers: 2300, epoch: 6 | loss: 0.5537825\n", "\tspeed: 0.0211s/iter; left time: 272.4756s\n", "\titers: 2400, epoch: 6 | loss: 0.5959505\n", "\tspeed: 0.0210s/iter; left time: 269.6567s\n", "\titers: 2500, epoch: 6 | loss: 0.6860824\n", "\tspeed: 0.0212s/iter; left time: 269.0633s\n", "\titers: 2600, epoch: 6 | loss: 0.9760101\n", "\tspeed: 0.0210s/iter; left time: 264.9789s\n", "\titers: 2700, epoch: 6 | loss: 0.5377415\n", "\tspeed: 0.0210s/iter; left time: 262.4590s\n", "\titers: 2800, epoch: 6 | loss: 0.6139575\n", "\tspeed: 0.0211s/iter; left time: 261.4569s\n", "\titers: 2900, epoch: 6 | loss: 0.6605660\n", "\tspeed: 0.0210s/iter; left time: 258.4351s\n", "\titers: 3000, epoch: 6 | loss: 0.4300097\n", "\tspeed: 0.0210s/iter; left time: 256.7175s\n", "Epoch: 6 cost time: 109.54686999320984\n", "Epoch: 6, Steps: 3043 | Train Loss: 0.6081107 Vali Loss: 0.5692307 Test Loss: 0.5683640\n", "EarlyStopping counter: 3 out of 3\n", "Early stopping\n", ">>>>>>>testing : long_term_forecast_weather_96_96_iTransformer_custom_ftM_sl96_ll48_pl96_dm512_nh8_el3_dl1_df512_fc3_ebtimeF_dtTrue_Exp_0<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n", "test 27783\n", "test shape: (27783, 1, 96, 15) (27783, 1, 96, 15)\n", "test shape: (27783, 96, 15) (27783, 96, 15)\n", "mse:0.5647459030151367, mae:0.4116513729095459\n" ] } ], "source": [ "run_experiment(\n", " task_name='long_term_forecast',\n", " is_training=1,\n", " model_id='weather_96_96',\n", " model='iTransformer',\n", " data='custom',\n", " root_path='./dataset/',\n", " data_path='UBB_weather_jan2008_may2023_cleaned.csv',\n", " features='M',\n", " target='T(degC)',\n", " freq='h',\n", " checkpoints='./checkpoints/',\n", " seq_len=96,\n", " label_len=48,\n", " pred_len=96,\n", " seasonal_patterns='Yearly',\n", " inverse=False,\n", " mask_rate=0.25,\n", " anomaly_ratio=0.25,\n", " top_k=5,\n", " num_kernels=6,\n", " enc_in=21,\n", " dec_in=21,\n", " c_out=21,\n", " d_model=512,\n", " n_heads=8,\n", " e_layers=3,\n", " d_layers=1,\n", " d_ff=512,\n", " moving_avg=25,\n", " factor=3,\n", " distil=True,\n", " dropout=0.1,\n", " embed='timeF',\n", " activation='gelu',\n", " output_attention=False,\n", " channel_independence=0,\n", " num_workers=10,\n", " itr=1,\n", " train_epochs=10,\n", " batch_size=32,\n", " patience=3,\n", " learning_rate=0.0001,\n", " des='Exp',\n", " loss='MSE',\n", " lradj='type1',\n", " use_amp=False,\n", " use_gpu=True,\n", " gpu=0,\n", " use_multi_gpu=False,\n", " devices='0,1,2,3',\n", " p_hidden_dims=[128, 128],\n", " p_hidden_layers=2\n", ")" ] }, { "cell_type": "code", "execution_count": 16, "metadata": {}, "outputs": [], "source": [ "torch.cuda.empty_cache()" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Args in experiment:\n", "\u001b[1mBasic Config\u001b[0m\n", " Task Name: long_term_forecast Is Training: 1 \n", " Model ID: weather_96_96 Model: Autoformer \n", "\n", "\u001b[1mData Loader\u001b[0m\n", " Data: custom Root Path: ./dataset/ \n", " Data Path: UBB_weather_jan2008_may2023_cleaned.csvFeatures: M \n", " Target: T(degC) Freq: h \n", " Checkpoints: ./checkpoints/ \n", "\n", "\u001b[1mForecasting Task\u001b[0m\n", " Seq Len: 96 Label Len: 48 \n", " Pred Len: 96 Seasonal Patterns: Yearly \n", " Inverse: 0 \n", "\n", "\u001b[1mModel Parameters\u001b[0m\n", " Top k: 5 Num Kernels: 6 \n", " Enc In: 15 Dec In: 15 \n", " C Out: 15 d model: 512 \n", " n heads: 8 e layers: 3 \n", " d layers: 1 d FF: 512 \n", " Moving Avg: 25 Factor: 3 \n", " Distil: 1 Dropout: 0.1 \n", " Embed: timeF Activation: gelu \n", " Output Attention: 0 \n", "\n", "\u001b[1mRun Parameters\u001b[0m\n", " Num Workers: 10 Itr: 1 \n", " Train Epochs: 10 Batch Size: 32 \n", " Patience: 3 Learning Rate: 0.0001 \n", " Des: Exp Loss: MSE \n", " Lradj: type1 Use Amp: 0 \n", "\n", "\u001b[1mGPU\u001b[0m\n", " Use GPU: 1 GPU: 0 \n", " Use Multi GPU: 0 Devices: 0,1,2,3 \n", "\n", "\u001b[1mDe-stationary Projector Params\u001b[0m\n", " P Hidden Dims: 128, 128 P Hidden Layers: 2 \n", "\n", "Use GPU: cuda:0\n", ">>>>>>>start training : long_term_forecast_weather_96_96_Autoformer_custom_ftM_sl96_ll48_pl96_dm512_nh8_el3_dl1_df512_fc3_ebtimeF_dtTrue_Exp_0>>>>>>>>>>>>>>>>>>>>>>>>>>\n", "train 97382\n", "val 13845\n", "test 27783\n", "\titers: 100, epoch: 1 | loss: 0.6577045\n", "\tspeed: 0.7708s/iter; left time: 23378.0364s\n", "\titers: 200, epoch: 1 | loss: 0.5169938\n", "\tspeed: 0.2633s/iter; left time: 7959.9620s\n", "\titers: 300, epoch: 1 | loss: 0.6188923\n", "\tspeed: 0.2637s/iter; left time: 7946.8571s\n", "\titers: 400, epoch: 1 | loss: 0.6942432\n", "\tspeed: 0.2641s/iter; left time: 7930.4579s\n", "\titers: 500, epoch: 1 | loss: 0.6571175\n", "\tspeed: 0.2643s/iter; left time: 7910.7356s\n", "\titers: 600, epoch: 1 | loss: 0.8084824\n", "\tspeed: 0.2675s/iter; left time: 7979.3065s\n", "\titers: 700, epoch: 1 | loss: 0.6384601\n", "\tspeed: 0.2665s/iter; left time: 7922.8434s\n", "\titers: 800, epoch: 1 | loss: 0.4352117\n", "\tspeed: 0.2679s/iter; left time: 7938.9454s\n", "\titers: 900, epoch: 1 | loss: 0.7034445\n", "\tspeed: 0.2675s/iter; left time: 7900.8778s\n", "\titers: 1000, epoch: 1 | loss: 0.6679058\n", "\tspeed: 0.2686s/iter; left time: 7906.2844s\n", "\titers: 1100, epoch: 1 | loss: 0.8040156\n", "\tspeed: 0.2698s/iter; left time: 7913.0021s\n", "\titers: 1200, epoch: 1 | loss: 0.7698831\n", "\tspeed: 0.2688s/iter; left time: 7857.6708s\n", "\titers: 1300, epoch: 1 | loss: 0.6199859\n", "\tspeed: 0.2662s/iter; left time: 7753.7408s\n", "\titers: 1400, epoch: 1 | loss: 0.6210366\n", "\tspeed: 0.2678s/iter; left time: 7774.2841s\n", "\titers: 1500, epoch: 1 | loss: 0.5534190\n", "\tspeed: 0.2675s/iter; left time: 7737.9176s\n", "\titers: 1600, epoch: 1 | loss: 0.5438284\n", "\tspeed: 0.2650s/iter; left time: 7640.4733s\n", "\titers: 1700, epoch: 1 | loss: 0.8753198\n", "\tspeed: 0.2651s/iter; left time: 7616.3030s\n", "\titers: 1800, epoch: 1 | loss: 0.5979630\n", "\tspeed: 0.2653s/iter; left time: 7594.7304s\n", "\titers: 1900, epoch: 1 | loss: 0.8194704\n", "\tspeed: 0.2655s/iter; left time: 7574.1825s\n", "\titers: 2000, epoch: 1 | loss: 0.7243359\n", "\tspeed: 0.2659s/iter; left time: 7559.9166s\n", "\titers: 2100, epoch: 1 | loss: 0.6342434\n", "\tspeed: 0.2661s/iter; left time: 7538.2843s\n", "\titers: 2200, epoch: 1 | loss: 0.8345943\n", "\tspeed: 0.2663s/iter; left time: 7518.9383s\n", "\titers: 2300, epoch: 1 | loss: 0.7123264\n", "\tspeed: 0.2711s/iter; left time: 7626.9426s\n", "\titers: 2400, epoch: 1 | loss: 0.5129704\n", "\tspeed: 0.2679s/iter; left time: 7508.7041s\n", "\titers: 2500, epoch: 1 | loss: 0.4827012\n", "\tspeed: 0.2654s/iter; left time: 7412.1736s\n", "\titers: 2600, epoch: 1 | loss: 0.5017795\n", "\tspeed: 0.2664s/iter; left time: 7413.0406s\n", "\titers: 2700, epoch: 1 | loss: 0.5551471\n", "\tspeed: 0.2654s/iter; left time: 7359.0876s\n", "\titers: 2800, epoch: 1 | loss: 0.9128373\n", "\tspeed: 0.2655s/iter; left time: 7335.0150s\n", "\titers: 2900, epoch: 1 | loss: 0.4800449\n", "\tspeed: 0.2656s/iter; left time: 7311.0381s\n", "\titers: 3000, epoch: 1 | loss: 0.8959818\n", "\tspeed: 0.2657s/iter; left time: 7288.6623s\n", "Epoch: 1 cost time: 861.6629056930542\n", "Epoch: 1, Steps: 3043 | Train Loss: 0.6441976 Vali Loss: 0.6084214 Test Loss: 0.5987855\n", "Validation loss decreased (inf --> 0.608421). Saving model ...\n", "Updating learning rate to 0.0001\n", "\titers: 100, epoch: 2 | loss: 0.5819595\n", "\tspeed: 9.9217s/iter; left time: 270743.9709s\n", "\titers: 200, epoch: 2 | loss: 0.5215618\n", "\tspeed: 0.2614s/iter; left time: 7106.6557s\n", "\titers: 300, epoch: 2 | loss: 0.5484762\n", "\tspeed: 0.2624s/iter; left time: 7108.9330s\n", "\titers: 400, epoch: 2 | loss: 0.6882823\n", "\tspeed: 0.2629s/iter; left time: 7094.3497s\n", "\titers: 500, epoch: 2 | loss: 0.6901736\n", "\tspeed: 0.2664s/iter; left time: 7163.0262s\n", "\titers: 600, epoch: 2 | loss: 0.8072841\n", "\tspeed: 0.2633s/iter; left time: 7054.6037s\n", "\titers: 700, epoch: 2 | loss: 0.7197369\n", "\tspeed: 0.2640s/iter; left time: 7045.5931s\n", "\titers: 800, epoch: 2 | loss: 0.8606914\n", "\tspeed: 0.2641s/iter; left time: 7020.7009s\n", "\titers: 900, epoch: 2 | loss: 0.6988379\n", "\tspeed: 0.2648s/iter; left time: 7013.4242s\n", "\titers: 1000, epoch: 2 | loss: 0.4492415\n", "\tspeed: 0.2646s/iter; left time: 6981.1000s\n", "\titers: 1100, epoch: 2 | loss: 0.6589584\n", "\tspeed: 0.2669s/iter; left time: 7016.1383s\n", "\titers: 1200, epoch: 2 | loss: 0.5373643\n", "\tspeed: 0.2652s/iter; left time: 6945.2375s\n", "\titers: 1300, epoch: 2 | loss: 0.6475531\n", "\tspeed: 0.2653s/iter; left time: 6921.7638s\n", "\titers: 1400, epoch: 2 | loss: 0.7830465\n", "\tspeed: 0.2651s/iter; left time: 6888.4890s\n", "\titers: 1500, epoch: 2 | loss: 0.5539672\n", "\tspeed: 0.2650s/iter; left time: 6860.7164s\n", "\titers: 1600, epoch: 2 | loss: 0.4431458\n", "\tspeed: 0.2674s/iter; left time: 6895.5351s\n", "\titers: 1700, epoch: 2 | loss: 0.4886265\n", "\tspeed: 0.2688s/iter; left time: 6905.6024s\n", "\titers: 1800, epoch: 2 | loss: 0.5912706\n", "\tspeed: 0.2655s/iter; left time: 6794.5853s\n", "\titers: 1900, epoch: 2 | loss: 0.5040628\n", "\tspeed: 0.2655s/iter; left time: 6765.9082s\n", "\titers: 2000, epoch: 2 | loss: 0.9237311\n", "\tspeed: 0.2655s/iter; left time: 6740.8672s\n", "\titers: 2100, epoch: 2 | loss: 0.4412315\n", "\tspeed: 0.2664s/iter; left time: 6736.0076s\n", "\titers: 2200, epoch: 2 | loss: 0.6789261\n", "\tspeed: 0.2656s/iter; left time: 6689.8923s\n", "\titers: 2300, epoch: 2 | loss: 0.6317608\n", "\tspeed: 0.2657s/iter; left time: 6666.3269s\n", "\titers: 2400, epoch: 2 | loss: 0.6600324\n", "\tspeed: 0.2657s/iter; left time: 6639.5007s\n", "\titers: 2500, epoch: 2 | loss: 0.4690264\n", "\tspeed: 0.2668s/iter; left time: 6639.4289s\n", "\titers: 2600, epoch: 2 | loss: 0.4697467\n", "\tspeed: 0.2659s/iter; left time: 6591.9781s\n", "\titers: 2700, epoch: 2 | loss: 0.4115496\n", "\tspeed: 0.2656s/iter; left time: 6556.6838s\n", "\titers: 2800, epoch: 2 | loss: 0.4825737\n", "\tspeed: 0.2657s/iter; left time: 6532.8420s\n", "\titers: 2900, epoch: 2 | loss: 0.4350396\n", "\tspeed: 0.2662s/iter; left time: 6519.3661s\n", "\titers: 3000, epoch: 2 | loss: 0.6007582\n", "\tspeed: 0.2659s/iter; left time: 6485.8501s\n", "Epoch: 2 cost time: 854.7503187656403\n", "Epoch: 2, Steps: 3043 | Train Loss: 0.5768570 Vali Loss: 0.6141741 Test Loss: 0.5955476\n", "EarlyStopping counter: 1 out of 3\n", "Updating learning rate to 5e-05\n", "\titers: 100, epoch: 3 | loss: 0.4020886\n", "\tspeed: 9.8951s/iter; left time: 239906.5352s\n", "\titers: 200, epoch: 3 | loss: 0.6080592\n", "\tspeed: 0.2633s/iter; left time: 6358.0822s\n", "\titers: 300, epoch: 3 | loss: 0.4361681\n", "\tspeed: 0.2628s/iter; left time: 6319.1420s\n", "\titers: 400, epoch: 3 | loss: 0.4928976\n", "\tspeed: 0.2646s/iter; left time: 6335.6633s\n", "\titers: 500, epoch: 3 | loss: 0.8539575\n", "\tspeed: 0.2649s/iter; left time: 6316.3034s\n", "\titers: 600, epoch: 3 | loss: 0.9211981\n", "\tspeed: 0.2641s/iter; left time: 6270.6140s\n", "\titers: 700, epoch: 3 | loss: 0.5495131\n", "\tspeed: 0.2642s/iter; left time: 6246.4140s\n", "\titers: 800, epoch: 3 | loss: 0.6721783\n", "\tspeed: 0.2654s/iter; left time: 6249.6178s\n", "\titers: 900, epoch: 3 | loss: 0.4905680\n", "\tspeed: 0.2646s/iter; left time: 6202.6620s\n", "\titers: 1000, epoch: 3 | loss: 0.6015739\n", "\tspeed: 0.2645s/iter; left time: 6175.8354s\n", "\titers: 1100, epoch: 3 | loss: 0.4197837\n", "\tspeed: 0.2652s/iter; left time: 6165.1898s\n", "\titers: 1200, epoch: 3 | loss: 0.5154137\n", "\tspeed: 0.2656s/iter; left time: 6146.4720s\n", "\titers: 1300, epoch: 3 | loss: 0.4715622\n", "\tspeed: 0.2656s/iter; left time: 6120.2832s\n", "\titers: 1400, epoch: 3 | loss: 0.3848091\n", "\tspeed: 0.2654s/iter; left time: 6090.4535s\n", "\titers: 1500, epoch: 3 | loss: 0.5434434\n", "\tspeed: 0.2660s/iter; left time: 6076.0615s\n", "\titers: 1600, epoch: 3 | loss: 0.3778383\n", "\tspeed: 0.2662s/iter; left time: 6053.7544s\n", "\titers: 1700, epoch: 3 | loss: 0.4494368\n", "\tspeed: 0.2661s/iter; left time: 6026.3618s\n", "\titers: 1800, epoch: 3 | loss: 0.5124578\n", "\tspeed: 0.2665s/iter; left time: 6008.0751s\n", "\titers: 1900, epoch: 3 | loss: 0.4257696\n", "\tspeed: 0.2661s/iter; left time: 5971.9836s\n", "\titers: 2000, epoch: 3 | loss: 0.7486026\n", "\tspeed: 0.2664s/iter; left time: 5951.8406s\n", "\titers: 2100, epoch: 3 | loss: 0.6845822\n", "\tspeed: 0.2665s/iter; left time: 5928.1156s\n", "\titers: 2200, epoch: 3 | loss: 0.5184685\n", "\tspeed: 0.2667s/iter; left time: 5905.0647s\n", "\titers: 2300, epoch: 3 | loss: 0.6722922\n", "\tspeed: 0.2679s/iter; left time: 5905.3097s\n", "\titers: 2400, epoch: 3 | loss: 0.3545842\n", "\tspeed: 0.2665s/iter; left time: 5849.0628s\n", "\titers: 2500, epoch: 3 | loss: 0.4881207\n", "\tspeed: 0.2661s/iter; left time: 5813.0523s\n", "\titers: 2600, epoch: 3 | loss: 0.6460727\n", "\tspeed: 0.2661s/iter; left time: 5785.7818s\n", "\titers: 2700, epoch: 3 | loss: 0.3996527\n", "\tspeed: 0.2663s/iter; left time: 5764.4156s\n", "\titers: 2800, epoch: 3 | loss: 0.3836053\n", "\tspeed: 0.2661s/iter; left time: 5732.9430s\n", "\titers: 2900, epoch: 3 | loss: 0.8350880\n", "\tspeed: 0.2662s/iter; left time: 5709.4353s\n", "\titers: 3000, epoch: 3 | loss: 0.4462961\n", "\tspeed: 0.2662s/iter; left time: 5681.5142s\n", "Epoch: 3 cost time: 854.8448162078857\n", "Epoch: 3, Steps: 3043 | Train Loss: 0.5251227 Vali Loss: 0.6294528 Test Loss: 0.6055971\n", "EarlyStopping counter: 2 out of 3\n", "Updating learning rate to 2.5e-05\n", "\titers: 100, epoch: 4 | loss: 0.4059069\n", "\tspeed: 9.8827s/iter; left time: 209532.2302s\n", "\titers: 200, epoch: 4 | loss: 0.5919073\n", "\tspeed: 0.2617s/iter; left time: 5522.6177s\n", "\titers: 300, epoch: 4 | loss: 0.4738431\n", "\tspeed: 0.2625s/iter; left time: 5512.5772s\n", "\titers: 400, epoch: 4 | loss: 0.7515507\n", "\tspeed: 0.2631s/iter; left time: 5498.9373s\n", "\titers: 500, epoch: 4 | loss: 0.4512351\n", "\tspeed: 0.2635s/iter; left time: 5480.5270s\n", "\titers: 600, epoch: 4 | loss: 0.3738199\n", "\tspeed: 0.2637s/iter; left time: 5460.1374s\n", "\titers: 700, epoch: 4 | loss: 0.6775550\n", "\tspeed: 0.2640s/iter; left time: 5439.5769s\n", "\titers: 800, epoch: 4 | loss: 0.4694403\n", "\tspeed: 0.2645s/iter; left time: 5423.6278s\n", "\titers: 900, epoch: 4 | loss: 0.5348016\n", "\tspeed: 0.2650s/iter; left time: 5405.7285s\n", "\titers: 1000, epoch: 4 | loss: 0.5859245\n", "\tspeed: 0.2648s/iter; left time: 5376.8213s\n", "\titers: 1100, epoch: 4 | loss: 0.6184357\n", "\tspeed: 0.2648s/iter; left time: 5348.6326s\n", "\titers: 1200, epoch: 4 | loss: 0.5943491\n", "\tspeed: 0.2649s/iter; left time: 5325.8478s\n", "\titers: 1300, epoch: 4 | loss: 0.4917923\n", "\tspeed: 0.2653s/iter; left time: 5305.8145s\n", "\titers: 1400, epoch: 4 | loss: 0.8359009\n", "\tspeed: 0.2653s/iter; left time: 5279.9703s\n", "\titers: 1500, epoch: 4 | loss: 0.4369743\n", "\tspeed: 0.2655s/iter; left time: 5256.6252s\n", "\titers: 1600, epoch: 4 | loss: 0.3325568\n", "\tspeed: 0.2654s/iter; left time: 5229.4019s\n", "\titers: 1700, epoch: 4 | loss: 0.8794593\n", "\tspeed: 0.2656s/iter; left time: 5207.2065s\n", "\titers: 1800, epoch: 4 | loss: 0.7657095\n", "\tspeed: 0.2655s/iter; left time: 5178.1003s\n", "\titers: 1900, epoch: 4 | loss: 0.4387804\n", "\tspeed: 0.2654s/iter; left time: 5149.0256s\n", "\titers: 2000, epoch: 4 | loss: 0.3438526\n", "\tspeed: 0.2658s/iter; left time: 5129.5624s\n", "\titers: 2100, epoch: 4 | loss: 0.4200721\n", "\tspeed: 0.2656s/iter; left time: 5099.9431s\n", "\titers: 2200, epoch: 4 | loss: 0.3257120\n", "\tspeed: 0.2658s/iter; left time: 5077.4079s\n", "\titers: 2300, epoch: 4 | loss: 0.3471428\n", "\tspeed: 0.2658s/iter; left time: 5050.3036s\n", "\titers: 2400, epoch: 4 | loss: 0.4730814\n", "\tspeed: 0.2662s/iter; left time: 5031.5914s\n", "\titers: 2500, epoch: 4 | loss: 0.8215632\n", "\tspeed: 0.2662s/iter; left time: 5005.9417s\n", "\titers: 2600, epoch: 4 | loss: 0.6268657\n", "\tspeed: 0.2668s/iter; left time: 4989.6894s\n", "\titers: 2700, epoch: 4 | loss: 0.5523760\n", "\tspeed: 0.2659s/iter; left time: 4946.4817s\n", "\titers: 2800, epoch: 4 | loss: 0.6757266\n", "\tspeed: 0.2670s/iter; left time: 4940.2427s\n", "\titers: 2900, epoch: 4 | loss: 0.3970938\n", "\tspeed: 0.2663s/iter; left time: 4900.6533s\n", "\titers: 3000, epoch: 4 | loss: 0.4735283\n", "\tspeed: 0.2659s/iter; left time: 4866.3851s\n", "Epoch: 4 cost time: 853.5908789634705\n", "Epoch: 4, Steps: 3043 | Train Loss: 0.4982366 Vali Loss: 0.6389076 Test Loss: 0.6108061\n", "EarlyStopping counter: 3 out of 3\n", "Early stopping\n", ">>>>>>>testing : long_term_forecast_weather_96_96_Autoformer_custom_ftM_sl96_ll48_pl96_dm512_nh8_el3_dl1_df512_fc3_ebtimeF_dtTrue_Exp_0<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n", "test 27783\n", "test shape: (27783, 1, 96, 15) (27783, 1, 96, 15)\n", "test shape: (27783, 96, 15) (27783, 96, 15)\n", "mse:0.5987857580184937, mae:0.46742522716522217\n" ] } ], "source": [ "\n", "run_experiment(\n", " task_name='long_term_forecast',\n", " is_training=1,\n", " model_id='weather_96_96',\n", " model='Autoformer',\n", " data='custom',\n", " root_path='./dataset/',\n", " data_path='UBB_weather_jan2008_may2023_cleaned.csv',\n", " features='M',\n", " target='T(degC)',\n", " freq='h',\n", " checkpoints='./checkpoints/',\n", " seq_len=96,\n", " label_len=48,\n", " pred_len=96,\n", " seasonal_patterns='Yearly',\n", " inverse=False,\n", " mask_rate=0.25,\n", " anomaly_ratio=0.25,\n", " top_k=5,\n", " num_kernels=6,\n", " enc_in=15,\n", " dec_in=15,\n", " c_out=15,\n", " d_model=512,\n", " n_heads=8,\n", " e_layers=3,\n", " d_layers=1,\n", " d_ff=512,\n", " moving_avg=25,\n", " factor=3,\n", " distil=True,\n", " dropout=0.1,\n", " embed='timeF',\n", " activation='gelu',\n", " output_attention=False,\n", " channel_independence=0,\n", " num_workers=10,\n", " itr=1,\n", " train_epochs=5,\n", " batch_size=32,\n", " patience=2,\n", " learning_rate=0.0001,\n", " des='Exp',\n", " loss='MSE',\n", " lradj='type1',\n", " use_amp=False,\n", " use_gpu=True,\n", " gpu=0,\n", " use_multi_gpu=False,\n", " devices='0,1,2,3',\n", " p_hidden_dims=[128, 128],\n", " p_hidden_layers=2\n", ")" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Args in experiment:\n", "\u001b[1mBasic Config\u001b[0m\n", " Task Name: long_term_forecast Is Training: 1 \n", " Model ID: weather_96_192 Model: iTransformer \n", "\n", "\u001b[1mData Loader\u001b[0m\n", " Data: custom Root Path: ./dataset/ \n", " Data Path: UBB_weather_jan2008_may2023_cleaned.csvFeatures: M \n", " Target: T(degC) Freq: h \n", " Checkpoints: ./checkpoints/ \n", "\n", "\u001b[1mForecasting Task\u001b[0m\n", " Seq Len: 96 Label Len: 48 \n", " Pred Len: 192 Seasonal Patterns: Yearly \n", " Inverse: 0 \n", "\n", "\u001b[1mModel Parameters\u001b[0m\n", " Top k: 5 Num Kernels: 6 \n", " Enc In: 15 Dec In: 15 \n", " C Out: 15 d model: 512 \n", " n heads: 8 e layers: 3 \n", " d layers: 1 d FF: 512 \n", " Moving Avg: 25 Factor: 3 \n", " Distil: 1 Dropout: 0.1 \n", " Embed: timeF Activation: gelu \n", " Output Attention: 0 \n", "\n", "\u001b[1mRun Parameters\u001b[0m\n", " Num Workers: 10 Itr: 1 \n", " Train Epochs: 10 Batch Size: 64 \n", " Patience: 3 Learning Rate: 0.0001 \n", " Des: Exp Loss: MSE \n", " Lradj: type1 Use Amp: 0 \n", "\n", "\u001b[1mGPU\u001b[0m\n", " Use GPU: 1 GPU: 0 \n", " Use Multi GPU: 0 Devices: 0,1,2,3 \n", "\n", "\u001b[1mDe-stationary Projector Params\u001b[0m\n", " P Hidden Dims: 128, 128 P Hidden Layers: 2 \n", "\n", "Use GPU: cuda:0\n", ">>>>>>>start training : long_term_forecast_weather_96_192_iTransformer_custom_ftM_sl96_ll48_pl192_dm512_nh8_el3_dl1_df512_fc3_ebtimeF_dtTrue_Exp_0>>>>>>>>>>>>>>>>>>>>>>>>>>\n", "train 97286\n", "val 13749\n", "test 27687\n", "\titers: 100, epoch: 1 | loss: 0.6319987\n", "\tspeed: 0.5232s/iter; left time: 7900.9784s\n", "\titers: 200, epoch: 1 | loss: 0.6519098\n", "\tspeed: 0.0332s/iter; left time: 498.3861s\n", "\titers: 300, epoch: 1 | loss: 0.7134100\n", "\tspeed: 0.0332s/iter; left time: 494.6092s\n", "\titers: 400, epoch: 1 | loss: 0.7306281\n", "\tspeed: 0.0333s/iter; left time: 492.1437s\n", "\titers: 500, epoch: 1 | loss: 0.6799347\n", "\tspeed: 0.0333s/iter; left time: 489.0855s\n", "\titers: 600, epoch: 1 | loss: 0.7081889\n", "\tspeed: 0.0334s/iter; left time: 487.9958s\n", "\titers: 700, epoch: 1 | loss: 0.7133664\n", "\tspeed: 0.0334s/iter; left time: 484.3776s\n", "\titers: 800, epoch: 1 | loss: 0.6814362\n", "\tspeed: 0.0337s/iter; left time: 485.7070s\n", "\titers: 900, epoch: 1 | loss: 0.8842442\n", "\tspeed: 0.0336s/iter; left time: 480.5483s\n", "\titers: 1000, epoch: 1 | loss: 0.8665214\n", "\tspeed: 0.0337s/iter; left time: 478.0717s\n", "\titers: 1100, epoch: 1 | loss: 0.7478440\n", "\tspeed: 0.0337s/iter; left time: 474.9781s\n", "\titers: 1200, epoch: 1 | loss: 0.5902256\n", "\tspeed: 0.0336s/iter; left time: 470.4116s\n", "\titers: 1300, epoch: 1 | loss: 0.6360949\n", "\tspeed: 0.0340s/iter; left time: 471.9562s\n", "\titers: 1400, epoch: 1 | loss: 0.8443511\n", "\tspeed: 0.0350s/iter; left time: 482.5420s\n", "\titers: 1500, epoch: 1 | loss: 0.6294640\n", "\tspeed: 0.0335s/iter; left time: 459.6415s\n", "Epoch: 1 cost time: 100.66374659538269\n", "Epoch: 1, Steps: 1520 | Train Loss: 0.7326445 Vali Loss: 0.6412311 Test Loss: 0.6364308\n", "Validation loss decreased (inf --> 0.641231). Saving model ...\n", "Updating learning rate to 0.0001\n", "\titers: 100, epoch: 2 | loss: 0.6624209\n", "\tspeed: 2.9782s/iter; left time: 40447.2734s\n", "\titers: 200, epoch: 2 | loss: 0.8541698\n", "\tspeed: 0.0334s/iter; left time: 450.1213s\n", "\titers: 300, epoch: 2 | loss: 0.7545565\n", "\tspeed: 0.0333s/iter; left time: 445.6128s\n", "\titers: 400, epoch: 2 | loss: 0.7163254\n", "\tspeed: 0.0333s/iter; left time: 442.4437s\n", "\titers: 500, epoch: 2 | loss: 0.7428417\n", "\tspeed: 0.0333s/iter; left time: 438.8811s\n", "\titers: 600, epoch: 2 | loss: 0.7875831\n", "\tspeed: 0.0333s/iter; left time: 435.6613s\n", "\titers: 700, epoch: 2 | loss: 0.5937236\n", "\tspeed: 0.0333s/iter; left time: 432.3934s\n", "\titers: 800, epoch: 2 | loss: 0.5903151\n", "\tspeed: 0.0333s/iter; left time: 428.9017s\n", "\titers: 900, epoch: 2 | loss: 0.7411769\n", "\tspeed: 0.0333s/iter; left time: 425.8359s\n", "\titers: 1000, epoch: 2 | loss: 0.7320126\n", "\tspeed: 0.0333s/iter; left time: 422.1525s\n", "\titers: 1100, epoch: 2 | loss: 0.6278763\n", "\tspeed: 0.0333s/iter; left time: 418.9368s\n", "\titers: 1200, epoch: 2 | loss: 0.7114463\n", "\tspeed: 0.0335s/iter; left time: 417.9726s\n", "\titers: 1300, epoch: 2 | loss: 0.5833712\n", "\tspeed: 0.0334s/iter; left time: 413.2944s\n", "\titers: 1400, epoch: 2 | loss: 0.7847311\n", "\tspeed: 0.0335s/iter; left time: 410.9863s\n", "\titers: 1500, epoch: 2 | loss: 0.7760926\n", "\tspeed: 0.0334s/iter; left time: 406.9325s\n", "Epoch: 2 cost time: 98.14404916763306\n", "Epoch: 2, Steps: 1520 | Train Loss: 0.7100494 Vali Loss: 0.6343053 Test Loss: 0.6284881\n", "Validation loss decreased (0.641231 --> 0.634305). Saving model ...\n", "Updating learning rate to 5e-05\n", "\titers: 100, epoch: 3 | loss: 0.7196248\n", "\tspeed: 2.9591s/iter; left time: 35689.1833s\n", "\titers: 200, epoch: 3 | loss: 0.5317460\n", "\tspeed: 0.0332s/iter; left time: 397.2627s\n", "\titers: 300, epoch: 3 | loss: 0.6965477\n", "\tspeed: 0.0332s/iter; left time: 394.1483s\n", "\titers: 400, epoch: 3 | loss: 0.6433371\n", "\tspeed: 0.0333s/iter; left time: 391.6033s\n", "\titers: 500, epoch: 3 | loss: 0.6970401\n", "\tspeed: 0.0333s/iter; left time: 388.4838s\n", "\titers: 600, epoch: 3 | loss: 0.6624950\n", "\tspeed: 0.0333s/iter; left time: 384.8810s\n", "\titers: 700, epoch: 3 | loss: 0.6381147\n", "\tspeed: 0.0333s/iter; left time: 381.6429s\n", "\titers: 800, epoch: 3 | loss: 0.6768793\n", "\tspeed: 0.0333s/iter; left time: 378.3567s\n", "\titers: 900, epoch: 3 | loss: 0.8177049\n", "\tspeed: 0.0333s/iter; left time: 375.2096s\n", "\titers: 1000, epoch: 3 | loss: 0.7492304\n", "\tspeed: 0.0333s/iter; left time: 371.6649s\n", "\titers: 1100, epoch: 3 | loss: 0.8265042\n", "\tspeed: 0.0333s/iter; left time: 368.4742s\n", "\titers: 1200, epoch: 3 | loss: 0.7252435\n", "\tspeed: 0.0333s/iter; left time: 364.9555s\n", "\titers: 1300, epoch: 3 | loss: 0.7292184\n", "\tspeed: 0.0333s/iter; left time: 361.6891s\n", "\titers: 1400, epoch: 3 | loss: 0.7192597\n", "\tspeed: 0.0333s/iter; left time: 358.2836s\n", "\titers: 1500, epoch: 3 | loss: 0.7718540\n", "\tspeed: 0.0333s/iter; left time: 355.0868s\n", "Epoch: 3 cost time: 98.33910894393921\n", "Epoch: 3, Steps: 1520 | Train Loss: 0.6965855 Vali Loss: 0.6344623 Test Loss: 0.6275782\n", "EarlyStopping counter: 1 out of 3\n", "Updating learning rate to 2.5e-05\n", "\titers: 100, epoch: 4 | loss: 0.5943870\n", "\tspeed: 2.9741s/iter; left time: 31349.6367s\n", "\titers: 200, epoch: 4 | loss: 0.8553151\n", "\tspeed: 0.0332s/iter; left time: 346.8918s\n", "\titers: 300, epoch: 4 | loss: 0.7277061\n", "\tspeed: 0.0333s/iter; left time: 344.5744s\n", "\titers: 400, epoch: 4 | loss: 0.6289434\n", "\tspeed: 0.0333s/iter; left time: 340.8296s\n", "\titers: 500, epoch: 4 | loss: 0.6766016\n", "\tspeed: 0.0333s/iter; left time: 337.8308s\n", "\titers: 600, epoch: 4 | loss: 0.7626061\n", "\tspeed: 0.0336s/iter; left time: 336.9265s\n", "\titers: 700, epoch: 4 | loss: 0.5919689\n", "\tspeed: 0.0333s/iter; left time: 331.0604s\n", "\titers: 800, epoch: 4 | loss: 0.8799984\n", "\tspeed: 0.0333s/iter; left time: 327.7917s\n", "\titers: 900, epoch: 4 | loss: 0.7135631\n", "\tspeed: 0.0333s/iter; left time: 324.4428s\n", "\titers: 1000, epoch: 4 | loss: 0.7266175\n", "\tspeed: 0.0333s/iter; left time: 321.0754s\n", "\titers: 1100, epoch: 4 | loss: 0.6111798\n", "\tspeed: 0.0333s/iter; left time: 317.9470s\n", "\titers: 1200, epoch: 4 | loss: 0.9278108\n", "\tspeed: 0.0333s/iter; left time: 314.1754s\n", "\titers: 1300, epoch: 4 | loss: 0.7943137\n", "\tspeed: 0.0333s/iter; left time: 311.0693s\n", "\titers: 1400, epoch: 4 | loss: 0.7434091\n", "\tspeed: 0.0333s/iter; left time: 307.7517s\n", "\titers: 1500, epoch: 4 | loss: 0.5963808\n", "\tspeed: 0.0333s/iter; left time: 304.4087s\n", "Epoch: 4 cost time: 97.77491545677185\n", "Epoch: 4, Steps: 1520 | Train Loss: 0.6892050 Vali Loss: 0.6363823 Test Loss: 0.6287793\n", "EarlyStopping counter: 2 out of 3\n", "Updating learning rate to 1.25e-05\n", "\titers: 100, epoch: 5 | loss: 0.6675056\n", "\tspeed: 2.9677s/iter; left time: 26771.8280s\n", "\titers: 200, epoch: 5 | loss: 0.8672423\n", "\tspeed: 0.0331s/iter; left time: 295.2150s\n", "\titers: 300, epoch: 5 | loss: 0.6300294\n", "\tspeed: 0.0333s/iter; left time: 293.7116s\n", "\titers: 400, epoch: 5 | loss: 0.5700001\n", "\tspeed: 0.0332s/iter; left time: 289.6655s\n", "\titers: 500, epoch: 5 | loss: 0.7069750\n", "\tspeed: 0.0332s/iter; left time: 286.3397s\n", "\titers: 600, epoch: 5 | loss: 0.7188261\n", "\tspeed: 0.0333s/iter; left time: 283.9139s\n", "\titers: 700, epoch: 5 | loss: 0.5915625\n", "\tspeed: 0.0333s/iter; left time: 280.4399s\n", "\titers: 800, epoch: 5 | loss: 0.5914523\n", "\tspeed: 0.0333s/iter; left time: 277.0763s\n", "\titers: 900, epoch: 5 | loss: 0.6961073\n", "\tspeed: 0.0333s/iter; left time: 273.8193s\n", "\titers: 1000, epoch: 5 | loss: 0.6148636\n", "\tspeed: 0.0333s/iter; left time: 270.5285s\n", "\titers: 1100, epoch: 5 | loss: 0.5575275\n", "\tspeed: 0.0333s/iter; left time: 267.1082s\n", "\titers: 1200, epoch: 5 | loss: 0.6933200\n", "\tspeed: 0.0333s/iter; left time: 263.7321s\n", "\titers: 1300, epoch: 5 | loss: 0.6726676\n", "\tspeed: 0.0336s/iter; left time: 262.4579s\n", "\titers: 1400, epoch: 5 | loss: 0.7680035\n", "\tspeed: 0.0333s/iter; left time: 257.1378s\n", "\titers: 1500, epoch: 5 | loss: 0.6785454\n", "\tspeed: 0.0333s/iter; left time: 253.8387s\n", "Epoch: 5 cost time: 97.99383234977722\n", "Epoch: 5, Steps: 1520 | Train Loss: 0.6850839 Vali Loss: 0.6403214 Test Loss: 0.6298515\n", "EarlyStopping counter: 3 out of 3\n", "Early stopping\n", ">>>>>>>testing : long_term_forecast_weather_96_192_iTransformer_custom_ftM_sl96_ll48_pl192_dm512_nh8_el3_dl1_df512_fc3_ebtimeF_dtTrue_Exp_0<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n", "test 27687\n", "test shape: (27687, 1, 192, 15) (27687, 1, 192, 15)\n", "test shape: (27687, 192, 15) (27687, 192, 15)\n", "mse:0.6284869909286499, mae:0.4485326409339905\n" ] } ], "source": [ "run_experiment(\n", " task_name='long_term_forecast',\n", " is_training=1,\n", " model_id='weather_96_192',\n", " model='iTransformer',\n", " data='custom',\n", " root_path='./dataset/',\n", " data_path='UBB_weather_jan2008_may2023_cleaned.csv',\n", " features='M',\n", " target='T(degC)',\n", " freq='h',\n", " checkpoints='./checkpoints/',\n", " seq_len=96,\n", " label_len=48,\n", " pred_len=192,\n", " seasonal_patterns='Yearly',\n", " inverse=False,\n", " mask_rate=0.25,\n", " anomaly_ratio=0.25,\n", " top_k=5,\n", " num_kernels=6,\n", " enc_in=15,\n", " dec_in=15,\n", " c_out=15,\n", " d_model=512,\n", " n_heads=8,\n", " e_layers=3,\n", " d_layers=1,\n", " d_ff=512,\n", " moving_avg=25,\n", " factor=3,\n", " distil=True,\n", " dropout=0.1,\n", " embed='timeF',\n", " activation='gelu',\n", " output_attention=False,\n", " channel_independence=0,\n", " num_workers=10,\n", " itr=1,\n", " train_epochs=10,\n", " batch_size=64,\n", " patience=3,\n", " learning_rate=0.0001,\n", " des='Exp',\n", " loss='MSE',\n", " lradj='type1',\n", " use_amp=False,\n", " use_gpu=True,\n", " gpu=0,\n", " use_multi_gpu=False,\n", " devices='0,1,2,3',\n", " p_hidden_dims=[128, 128],\n", " p_hidden_layers=2\n", ")" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Args in experiment:\n", "\u001b[1mBasic Config\u001b[0m\n", " Task Name: long_term_forecast Is Training: 1 \n", " Model ID: weather_96_336 Model: iTransformer \n", "\n", "\u001b[1mData Loader\u001b[0m\n", " Data: custom Root Path: ./dataset/ \n", " Data Path: UBB_weather_jan2008_may2023_cleaned.csvFeatures: M \n", " Target: T(degC) Freq: h \n", " Checkpoints: ./checkpoints/ \n", "\n", "\u001b[1mForecasting Task\u001b[0m\n", " Seq Len: 96 Label Len: 48 \n", " Pred Len: 336 Seasonal Patterns: Yearly \n", " Inverse: 0 \n", "\n", "\u001b[1mModel Parameters\u001b[0m\n", " Top k: 5 Num Kernels: 6 \n", " Enc In: 15 Dec In: 15 \n", " C Out: 15 d model: 512 \n", " n heads: 8 e layers: 3 \n", " d layers: 1 d FF: 512 \n", " Moving Avg: 25 Factor: 3 \n", " Distil: 1 Dropout: 0.1 \n", " Embed: timeF Activation: gelu \n", " Output Attention: 0 \n", "\n", "\u001b[1mRun Parameters\u001b[0m\n", " Num Workers: 10 Itr: 1 \n", " Train Epochs: 10 Batch Size: 64 \n", " Patience: 3 Learning Rate: 0.0001 \n", " Des: Exp Loss: MSE \n", " Lradj: type1 Use Amp: 0 \n", "\n", "\u001b[1mGPU\u001b[0m\n", " Use GPU: 1 GPU: 0 \n", " Use Multi GPU: 0 Devices: 0,1,2,3 \n", "\n", "\u001b[1mDe-stationary Projector Params\u001b[0m\n", " P Hidden Dims: 128, 128 P Hidden Layers: 2 \n", "\n", "Use GPU: cuda:0\n", ">>>>>>>start training : long_term_forecast_weather_96_336_iTransformer_custom_ftM_sl96_ll48_pl336_dm512_nh8_el3_dl1_df512_fc3_ebtimeF_dtTrue_Exp_0>>>>>>>>>>>>>>>>>>>>>>>>>>\n", "train 97142\n", "val 13605\n", "test 27543\n", "\titers: 100, epoch: 1 | loss: 0.7931052\n", "\tspeed: 0.5044s/iter; left time: 7602.2585s\n", "\titers: 200, epoch: 1 | loss: 0.9427231\n", "\tspeed: 0.0339s/iter; left time: 506.9240s\n", "\titers: 300, epoch: 1 | loss: 0.8559125\n", "\tspeed: 0.0337s/iter; left time: 501.7555s\n", "\titers: 400, epoch: 1 | loss: 0.8429323\n", "\tspeed: 0.0339s/iter; left time: 500.8789s\n", "\titers: 500, epoch: 1 | loss: 0.6502054\n", "\tspeed: 0.0339s/iter; left time: 497.8299s\n", "\titers: 600, epoch: 1 | loss: 0.8180294\n", "\tspeed: 0.0339s/iter; left time: 493.9033s\n", "\titers: 700, epoch: 1 | loss: 0.6818818\n", "\tspeed: 0.0341s/iter; left time: 493.6200s\n", "\titers: 800, epoch: 1 | loss: 0.7155097\n", "\tspeed: 0.0339s/iter; left time: 487.2971s\n", "\titers: 900, epoch: 1 | loss: 0.7881265\n", "\tspeed: 0.0341s/iter; left time: 487.0863s\n", "\titers: 1000, epoch: 1 | loss: 0.7692885\n", "\tspeed: 0.0340s/iter; left time: 482.5184s\n", "\titers: 1100, epoch: 1 | loss: 0.7469522\n", "\tspeed: 0.0341s/iter; left time: 479.5335s\n", "\titers: 1200, epoch: 1 | loss: 0.7861203\n", "\tspeed: 0.0342s/iter; left time: 477.2647s\n", "\titers: 1300, epoch: 1 | loss: 0.7912968\n", "\tspeed: 0.0342s/iter; left time: 474.0178s\n", "\titers: 1400, epoch: 1 | loss: 0.7484691\n", "\tspeed: 0.0342s/iter; left time: 471.1634s\n", "\titers: 1500, epoch: 1 | loss: 0.7021341\n", "\tspeed: 0.0341s/iter; left time: 466.8105s\n", "Epoch: 1 cost time: 99.75795102119446\n", "Epoch: 1, Steps: 1517 | Train Loss: 0.7778421 Vali Loss: 0.6846656 Test Loss: 0.6833909\n", "Validation loss decreased (inf --> 0.684666). Saving model ...\n", "Updating learning rate to 0.0001\n", "\titers: 100, epoch: 2 | loss: 0.7090718\n", "\tspeed: 2.8927s/iter; left time: 39207.0833s\n", "\titers: 200, epoch: 2 | loss: 0.6375023\n", "\tspeed: 0.0338s/iter; left time: 454.9829s\n", "\titers: 300, epoch: 2 | loss: 0.7920266\n", "\tspeed: 0.0339s/iter; left time: 452.5089s\n", "\titers: 400, epoch: 2 | loss: 0.7255198\n", "\tspeed: 0.0343s/iter; left time: 454.3789s\n", "\titers: 500, epoch: 2 | loss: 0.8125066\n", "\tspeed: 0.0340s/iter; left time: 447.0485s\n", "\titers: 600, epoch: 2 | loss: 0.8357267\n", "\tspeed: 0.0340s/iter; left time: 443.7983s\n", "\titers: 700, epoch: 2 | loss: 0.7793323\n", "\tspeed: 0.0342s/iter; left time: 442.4736s\n", "\titers: 800, epoch: 2 | loss: 0.6457019\n", "\tspeed: 0.0341s/iter; left time: 438.5784s\n", "\titers: 900, epoch: 2 | loss: 0.8073530\n", "\tspeed: 0.0343s/iter; left time: 436.8322s\n", "\titers: 1000, epoch: 2 | loss: 0.8109322\n", "\tspeed: 0.0340s/iter; left time: 430.7458s\n", "\titers: 1100, epoch: 2 | loss: 0.7235849\n", "\tspeed: 0.0341s/iter; left time: 428.7107s\n", "\titers: 1200, epoch: 2 | loss: 0.7382988\n", "\tspeed: 0.0346s/iter; left time: 431.1741s\n", "\titers: 1300, epoch: 2 | loss: 0.6599451\n", "\tspeed: 0.0346s/iter; left time: 426.9751s\n", "\titers: 1400, epoch: 2 | loss: 0.5809569\n", "\tspeed: 0.0347s/iter; left time: 425.8021s\n", "\titers: 1500, epoch: 2 | loss: 0.8049288\n", "\tspeed: 0.0340s/iter; left time: 412.8650s\n", "Epoch: 2 cost time: 97.9995744228363\n", "Epoch: 2, Steps: 1517 | Train Loss: 0.7561316 Vali Loss: 0.6857628 Test Loss: 0.6826689\n", "EarlyStopping counter: 1 out of 3\n", "Updating learning rate to 5e-05\n", "\titers: 100, epoch: 3 | loss: 0.7773961\n", "\tspeed: 2.9055s/iter; left time: 34973.7906s\n", "\titers: 200, epoch: 3 | loss: 0.7148010\n", "\tspeed: 0.0337s/iter; left time: 401.7655s\n", "\titers: 300, epoch: 3 | loss: 0.7649150\n", "\tspeed: 0.0339s/iter; left time: 400.7488s\n", "\titers: 400, epoch: 3 | loss: 0.7339651\n", "\tspeed: 0.0338s/iter; left time: 397.0123s\n", "\titers: 500, epoch: 3 | loss: 0.7324534\n", "\tspeed: 0.0339s/iter; left time: 394.3869s\n", "\titers: 600, epoch: 3 | loss: 0.7640102\n", "\tspeed: 0.0339s/iter; left time: 391.2350s\n", "\titers: 700, epoch: 3 | loss: 0.7554982\n", "\tspeed: 0.0340s/iter; left time: 389.4207s\n", "\titers: 800, epoch: 3 | loss: 0.7595450\n", "\tspeed: 0.0340s/iter; left time: 385.8155s\n", "\titers: 900, epoch: 3 | loss: 0.6203551\n", "\tspeed: 0.0340s/iter; left time: 382.4740s\n", "\titers: 1000, epoch: 3 | loss: 0.6765125\n", "\tspeed: 0.0340s/iter; left time: 378.8388s\n", "\titers: 1100, epoch: 3 | loss: 0.7214283\n", "\tspeed: 0.0341s/iter; left time: 376.2966s\n", "\titers: 1200, epoch: 3 | loss: 0.6854476\n", "\tspeed: 0.0341s/iter; left time: 372.6309s\n", "\titers: 1300, epoch: 3 | loss: 0.7223969\n", "\tspeed: 0.0341s/iter; left time: 369.6620s\n", "\titers: 1400, epoch: 3 | loss: 0.6785082\n", "\tspeed: 0.0341s/iter; left time: 366.2214s\n", "\titers: 1500, epoch: 3 | loss: 0.7877484\n", "\tspeed: 0.0341s/iter; left time: 362.5818s\n", "Epoch: 3 cost time: 97.60221219062805\n", "Epoch: 3, Steps: 1517 | Train Loss: 0.7434156 Vali Loss: 0.6852496 Test Loss: 0.6809614\n", "EarlyStopping counter: 2 out of 3\n", "Updating learning rate to 2.5e-05\n", "\titers: 100, epoch: 4 | loss: 0.7294958\n", "\tspeed: 2.9164s/iter; left time: 30680.3532s\n", "\titers: 200, epoch: 4 | loss: 0.7124564\n", "\tspeed: 0.0336s/iter; left time: 350.6001s\n", "\titers: 300, epoch: 4 | loss: 0.6435595\n", "\tspeed: 0.0339s/iter; left time: 349.5140s\n", "\titers: 400, epoch: 4 | loss: 0.7540269\n", "\tspeed: 0.0338s/iter; left time: 345.7393s\n", "\titers: 500, epoch: 4 | loss: 0.6740328\n", "\tspeed: 0.0339s/iter; left time: 342.8656s\n", "\titers: 600, epoch: 4 | loss: 0.7243611\n", "\tspeed: 0.0339s/iter; left time: 339.5526s\n", "\titers: 700, epoch: 4 | loss: 0.7342559\n", "\tspeed: 0.0340s/iter; left time: 337.0847s\n", "\titers: 800, epoch: 4 | loss: 0.7063743\n", "\tspeed: 0.0339s/iter; left time: 333.3359s\n", "\titers: 900, epoch: 4 | loss: 0.7248245\n", "\tspeed: 0.0340s/iter; left time: 330.0450s\n", "\titers: 1000, epoch: 4 | loss: 0.6746527\n", "\tspeed: 0.0340s/iter; left time: 327.0794s\n", "\titers: 1100, epoch: 4 | loss: 0.6958483\n", "\tspeed: 0.0344s/iter; left time: 327.2109s\n", "\titers: 1200, epoch: 4 | loss: 0.7509600\n", "\tspeed: 0.0340s/iter; left time: 320.0302s\n", "\titers: 1300, epoch: 4 | loss: 0.7955292\n", "\tspeed: 0.0342s/iter; left time: 318.3433s\n", "\titers: 1400, epoch: 4 | loss: 0.7892160\n", "\tspeed: 0.0340s/iter; left time: 313.8080s\n", "\titers: 1500, epoch: 4 | loss: 0.7631707\n", "\tspeed: 0.0342s/iter; left time: 311.9674s\n", "Epoch: 4 cost time: 97.76825380325317\n", "Epoch: 4, Steps: 1517 | Train Loss: 0.7363005 Vali Loss: 0.6821639 Test Loss: 0.6777636\n", "Validation loss decreased (0.684666 --> 0.682164). Saving model ...\n", "Updating learning rate to 1.25e-05\n", "\titers: 100, epoch: 5 | loss: 0.6480062\n", "\tspeed: 2.9149s/iter; left time: 26242.8954s\n", "\titers: 200, epoch: 5 | loss: 0.7364060\n", "\tspeed: 0.0338s/iter; left time: 300.7923s\n", "\titers: 300, epoch: 5 | loss: 0.7197828\n", "\tspeed: 0.0342s/iter; left time: 301.2522s\n", "\titers: 400, epoch: 5 | loss: 0.7956381\n", "\tspeed: 0.0339s/iter; left time: 294.6585s\n", "\titers: 500, epoch: 5 | loss: 0.8291351\n", "\tspeed: 0.0339s/iter; left time: 291.6737s\n", "\titers: 600, epoch: 5 | loss: 0.7325811\n", "\tspeed: 0.0339s/iter; left time: 288.1639s\n", "\titers: 700, epoch: 5 | loss: 0.6567359\n", "\tspeed: 0.0338s/iter; left time: 284.2640s\n", "\titers: 800, epoch: 5 | loss: 0.7511972\n", "\tspeed: 0.0340s/iter; left time: 282.1681s\n", "\titers: 900, epoch: 5 | loss: 0.8104257\n", "\tspeed: 0.0340s/iter; left time: 279.0764s\n", "\titers: 1000, epoch: 5 | loss: 0.9051831\n", "\tspeed: 0.0341s/iter; left time: 276.0377s\n", "\titers: 1100, epoch: 5 | loss: 0.8522705\n", "\tspeed: 0.0340s/iter; left time: 272.3939s\n", "\titers: 1200, epoch: 5 | loss: 0.7604227\n", "\tspeed: 0.0341s/iter; left time: 269.4036s\n", "\titers: 1300, epoch: 5 | loss: 0.6782746\n", "\tspeed: 0.0341s/iter; left time: 266.0663s\n", "\titers: 1400, epoch: 5 | loss: 0.7345523\n", "\tspeed: 0.0341s/iter; left time: 262.9616s\n", "\titers: 1500, epoch: 5 | loss: 0.6832511\n", "\tspeed: 0.0342s/iter; left time: 259.8289s\n", "Epoch: 5 cost time: 97.44870066642761\n", "Epoch: 5, Steps: 1517 | Train Loss: 0.7322960 Vali Loss: 0.6816483 Test Loss: 0.6764666\n", "Validation loss decreased (0.682164 --> 0.681648). Saving model ...\n", "Updating learning rate to 6.25e-06\n", "\titers: 100, epoch: 6 | loss: 0.6875192\n", "\tspeed: 2.9182s/iter; left time: 21845.8703s\n", "\titers: 200, epoch: 6 | loss: 0.7116101\n", "\tspeed: 0.0338s/iter; left time: 249.7584s\n", "\titers: 300, epoch: 6 | loss: 0.7192253\n", "\tspeed: 0.0339s/iter; left time: 246.8898s\n", "\titers: 400, epoch: 6 | loss: 0.7409244\n", "\tspeed: 0.0339s/iter; left time: 243.4813s\n", "\titers: 500, epoch: 6 | loss: 0.7105592\n", "\tspeed: 0.0339s/iter; left time: 240.3933s\n", "\titers: 600, epoch: 6 | loss: 0.7086514\n", "\tspeed: 0.0340s/iter; left time: 237.4620s\n", "\titers: 700, epoch: 6 | loss: 0.8019913\n", "\tspeed: 0.0341s/iter; left time: 234.7783s\n", "\titers: 800, epoch: 6 | loss: 0.6876934\n", "\tspeed: 0.0340s/iter; left time: 230.8479s\n", "\titers: 900, epoch: 6 | loss: 0.6372680\n", "\tspeed: 0.0340s/iter; left time: 227.6557s\n", "\titers: 1000, epoch: 6 | loss: 0.7132138\n", "\tspeed: 0.0341s/iter; left time: 224.5598s\n", "\titers: 1100, epoch: 6 | loss: 0.6703435\n", "\tspeed: 0.0342s/iter; left time: 221.7892s\n", "\titers: 1200, epoch: 6 | loss: 0.7167631\n", "\tspeed: 0.0344s/iter; left time: 219.4695s\n", "\titers: 1300, epoch: 6 | loss: 0.6552737\n", "\tspeed: 0.0342s/iter; left time: 214.7768s\n", "\titers: 1400, epoch: 6 | loss: 0.7663884\n", "\tspeed: 0.0342s/iter; left time: 211.6906s\n", "\titers: 1500, epoch: 6 | loss: 0.5992211\n", "\tspeed: 0.0340s/iter; left time: 207.1667s\n", "Epoch: 6 cost time: 97.70735669136047\n", "Epoch: 6, Steps: 1517 | Train Loss: 0.7302412 Vali Loss: 0.6840407 Test Loss: 0.6786999\n", "EarlyStopping counter: 1 out of 3\n", "Updating learning rate to 3.125e-06\n", "\titers: 100, epoch: 7 | loss: 0.7595237\n", "\tspeed: 2.9332s/iter; left time: 17508.3854s\n", "\titers: 200, epoch: 7 | loss: 0.6671734\n", "\tspeed: 0.0333s/iter; left time: 195.3628s\n", "\titers: 300, epoch: 7 | loss: 0.6941677\n", "\tspeed: 0.0335s/iter; left time: 193.1595s\n", "\titers: 400, epoch: 7 | loss: 0.6280651\n", "\tspeed: 0.0334s/iter; left time: 189.3114s\n", "\titers: 500, epoch: 7 | loss: 0.7512055\n", "\tspeed: 0.0335s/iter; left time: 186.3288s\n", "\titers: 600, epoch: 7 | loss: 0.8441300\n", "\tspeed: 0.0335s/iter; left time: 183.3663s\n", "\titers: 700, epoch: 7 | loss: 0.6613909\n", "\tspeed: 0.0336s/iter; left time: 180.1728s\n", "\titers: 800, epoch: 7 | loss: 0.6919878\n", "\tspeed: 0.0336s/iter; left time: 176.9146s\n", "\titers: 900, epoch: 7 | loss: 0.6836124\n", "\tspeed: 0.0336s/iter; left time: 173.6235s\n", "\titers: 1000, epoch: 7 | loss: 0.6766745\n", "\tspeed: 0.0337s/iter; left time: 170.7301s\n", "\titers: 1100, epoch: 7 | loss: 0.7390982\n", "\tspeed: 0.0337s/iter; left time: 167.2680s\n", "\titers: 1200, epoch: 7 | loss: 0.7635096\n", "\tspeed: 0.0337s/iter; left time: 164.1685s\n", "\titers: 1300, epoch: 7 | loss: 0.6975073\n", "\tspeed: 0.0337s/iter; left time: 160.8398s\n", "\titers: 1400, epoch: 7 | loss: 0.7663255\n", "\tspeed: 0.0337s/iter; left time: 157.4433s\n", "\titers: 1500, epoch: 7 | loss: 0.6987572\n", "\tspeed: 0.0338s/iter; left time: 154.2987s\n", "Epoch: 7 cost time: 97.17047357559204\n", "Epoch: 7, Steps: 1517 | Train Loss: 0.7290933 Vali Loss: 0.6836217 Test Loss: 0.6780484\n", "EarlyStopping counter: 2 out of 3\n", "Updating learning rate to 1.5625e-06\n", "\titers: 100, epoch: 8 | loss: 0.7107906\n", "\tspeed: 2.9098s/iter; left time: 12954.3853s\n", "\titers: 200, epoch: 8 | loss: 0.8525370\n", "\tspeed: 0.0333s/iter; left time: 145.0568s\n", "\titers: 300, epoch: 8 | loss: 0.7246703\n", "\tspeed: 0.0334s/iter; left time: 141.8688s\n", "\titers: 400, epoch: 8 | loss: 0.7107543\n", "\tspeed: 0.0334s/iter; left time: 138.7324s\n", "\titers: 500, epoch: 8 | loss: 0.7062993\n", "\tspeed: 0.0334s/iter; left time: 135.3256s\n", "\titers: 600, epoch: 8 | loss: 0.6274781\n", "\tspeed: 0.0335s/iter; left time: 132.3295s\n", "\titers: 700, epoch: 8 | loss: 0.6695981\n", "\tspeed: 0.0335s/iter; left time: 129.2194s\n", "\titers: 800, epoch: 8 | loss: 0.5822155\n", "\tspeed: 0.0336s/iter; left time: 126.1048s\n", "\titers: 900, epoch: 8 | loss: 0.6507615\n", "\tspeed: 0.0336s/iter; left time: 122.8508s\n", "\titers: 1000, epoch: 8 | loss: 0.8946386\n", "\tspeed: 0.0336s/iter; left time: 119.4699s\n", "\titers: 1100, epoch: 8 | loss: 0.5478238\n", "\tspeed: 0.0337s/iter; left time: 116.4210s\n", "\titers: 1200, epoch: 8 | loss: 0.7767902\n", "\tspeed: 0.0338s/iter; left time: 113.1935s\n", "\titers: 1300, epoch: 8 | loss: 0.6329966\n", "\tspeed: 0.0337s/iter; left time: 109.5083s\n", "\titers: 1400, epoch: 8 | loss: 0.8020332\n", "\tspeed: 0.0337s/iter; left time: 106.2431s\n", "\titers: 1500, epoch: 8 | loss: 0.7550086\n", "\tspeed: 0.0337s/iter; left time: 102.9973s\n", "Epoch: 8 cost time: 96.5048770904541\n", "Epoch: 8, Steps: 1517 | Train Loss: 0.7286756 Vali Loss: 0.6839706 Test Loss: 0.6785330\n", "EarlyStopping counter: 3 out of 3\n", "Early stopping\n", ">>>>>>>testing : long_term_forecast_weather_96_336_iTransformer_custom_ftM_sl96_ll48_pl336_dm512_nh8_el3_dl1_df512_fc3_ebtimeF_dtTrue_Exp_0<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n", "test 27543\n", "test shape: (27543, 1, 336, 15) (27543, 1, 336, 15)\n", "test shape: (27543, 336, 15) (27543, 336, 15)\n", "mse:0.6764664053916931, mae:0.4780843257904053\n" ] } ], "source": [ "run_experiment(\n", " task_name='long_term_forecast',\n", " is_training=1,\n", " model_id='weather_96_336',\n", " model='iTransformer',\n", " data='custom',\n", " root_path='./dataset/',\n", " data_path='UBB_weather_jan2008_may2023_cleaned.csv',\n", " features='M',\n", " target='T(degC)',\n", " freq='h',\n", " checkpoints='./checkpoints/',\n", " seq_len=96,\n", " label_len=48,\n", " pred_len=336,\n", " seasonal_patterns='Yearly',\n", " inverse=False,\n", " mask_rate=0.25,\n", " anomaly_ratio=0.25,\n", " top_k=5,\n", " num_kernels=6,\n", " enc_in=15,\n", " dec_in=15,\n", " c_out=15,\n", " d_model=512,\n", " n_heads=8,\n", " e_layers=3,\n", " d_layers=1,\n", " d_ff=512,\n", " moving_avg=25,\n", " factor=3,\n", " distil=True,\n", " dropout=0.1,\n", " embed='timeF',\n", " activation='gelu',\n", " output_attention=False,\n", " channel_independence=0,\n", " num_workers=10,\n", " itr=1,\n", " train_epochs=10,\n", " batch_size=64,\n", " patience=3,\n", " learning_rate=0.0001,\n", " des='Exp',\n", " loss='MSE',\n", " lradj='type1',\n", " use_amp=False,\n", " use_gpu=True,\n", " gpu=0,\n", " use_multi_gpu=False,\n", " devices='0,1,2,3',\n", " p_hidden_dims=[128, 128],\n", " p_hidden_layers=2\n", ")" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Args in experiment:\n", "\u001b[1mBasic Config\u001b[0m\n", " Task Name: long_term_forecast Is Training: 1 \n", " Model ID: weather_96_720 Model: iTransformer \n", "\n", "\u001b[1mData Loader\u001b[0m\n", " Data: custom Root Path: ./dataset/ \n", " Data Path: UBB_weather_jan2008_may2023_cleaned.csvFeatures: M \n", " Target: T(degC) Freq: h \n", " Checkpoints: ./checkpoints/ \n", "\n", "\u001b[1mForecasting Task\u001b[0m\n", " Seq Len: 96 Label Len: 48 \n", " Pred Len: 720 Seasonal Patterns: Yearly \n", " Inverse: 0 \n", "\n", "\u001b[1mModel Parameters\u001b[0m\n", " Top k: 5 Num Kernels: 6 \n", " Enc In: 15 Dec In: 15 \n", " C Out: 15 d model: 512 \n", " n heads: 8 e layers: 3 \n", " d layers: 1 d FF: 512 \n", " Moving Avg: 25 Factor: 3 \n", " Distil: 1 Dropout: 0.1 \n", " Embed: timeF Activation: gelu \n", " Output Attention: 0 \n", "\n", "\u001b[1mRun Parameters\u001b[0m\n", " Num Workers: 10 Itr: 1 \n", " Train Epochs: 10 Batch Size: 64 \n", " Patience: 3 Learning Rate: 0.0001 \n", " Des: Exp Loss: MSE \n", " Lradj: type1 Use Amp: 0 \n", "\n", "\u001b[1mGPU\u001b[0m\n", " Use GPU: 1 GPU: 0 \n", " Use Multi GPU: 0 Devices: 0,1,2,3 \n", "\n", "\u001b[1mDe-stationary Projector Params\u001b[0m\n", " P Hidden Dims: 128, 128 P Hidden Layers: 2 \n", "\n", "Use GPU: cuda:0\n", ">>>>>>>start training : long_term_forecast_weather_96_720_iTransformer_custom_ftM_sl96_ll48_pl720_dm512_nh8_el3_dl1_df512_fc3_ebtimeF_dtTrue_Exp_0>>>>>>>>>>>>>>>>>>>>>>>>>>\n", "train 96758\n", "val 13221\n", "test 27159\n", "\titers: 100, epoch: 1 | loss: 0.8186886\n", "\tspeed: 0.4809s/iter; left time: 7218.2142s\n", "\titers: 200, epoch: 1 | loss: 0.9193029\n", "\tspeed: 0.0363s/iter; left time: 541.9517s\n", "\titers: 300, epoch: 1 | loss: 0.8635051\n", "\tspeed: 0.0363s/iter; left time: 538.1011s\n", "\titers: 400, epoch: 1 | loss: 0.9511719\n", "\tspeed: 0.0365s/iter; left time: 536.5602s\n", "\titers: 500, epoch: 1 | loss: 0.8397924\n", "\tspeed: 0.0365s/iter; left time: 533.7490s\n", "\titers: 600, epoch: 1 | loss: 0.8933093\n", "\tspeed: 0.0366s/iter; left time: 531.4531s\n", "\titers: 700, epoch: 1 | loss: 0.8581073\n", "\tspeed: 0.0365s/iter; left time: 526.4436s\n", "\titers: 800, epoch: 1 | loss: 0.9031666\n", "\tspeed: 0.0367s/iter; left time: 524.8332s\n", "\titers: 900, epoch: 1 | loss: 0.8583406\n", "\tspeed: 0.0366s/iter; left time: 520.4335s\n", "\titers: 1000, epoch: 1 | loss: 0.7092524\n", "\tspeed: 0.0367s/iter; left time: 518.3753s\n", "\titers: 1100, epoch: 1 | loss: 0.8884393\n", "\tspeed: 0.0367s/iter; left time: 514.8250s\n", "\titers: 1200, epoch: 1 | loss: 0.8068309\n", "\tspeed: 0.0367s/iter; left time: 509.9717s\n", "\titers: 1300, epoch: 1 | loss: 0.9036927\n", "\tspeed: 0.0372s/iter; left time: 514.3981s\n", "\titers: 1400, epoch: 1 | loss: 0.8407988\n", "\tspeed: 0.0368s/iter; left time: 504.4484s\n", "\titers: 1500, epoch: 1 | loss: 0.7890908\n", "\tspeed: 0.0368s/iter; left time: 501.2926s\n", "Epoch: 1 cost time: 100.85920572280884\n", "Epoch: 1, Steps: 1511 | Train Loss: 0.8412123 Vali Loss: 0.7538418 Test Loss: 0.7387907\n", "Validation loss decreased (inf --> 0.753842). Saving model ...\n", "Updating learning rate to 0.0001\n", "\titers: 100, epoch: 2 | loss: 0.7212669\n", "\tspeed: 2.9278s/iter; left time: 39524.7334s\n", "\titers: 200, epoch: 2 | loss: 0.7987832\n", "\tspeed: 0.0362s/iter; left time: 485.5914s\n", "\titers: 300, epoch: 2 | loss: 0.7034885\n", "\tspeed: 0.0363s/iter; left time: 483.3952s\n", "\titers: 400, epoch: 2 | loss: 0.8625731\n", "\tspeed: 0.0362s/iter; left time: 477.2227s\n", "\titers: 500, epoch: 2 | loss: 0.7460996\n", "\tspeed: 0.0363s/iter; left time: 475.1000s\n", "\titers: 600, epoch: 2 | loss: 0.7312562\n", "\tspeed: 0.0365s/iter; left time: 474.1529s\n", "\titers: 700, epoch: 2 | loss: 0.7549384\n", "\tspeed: 0.0364s/iter; left time: 469.0008s\n", "\titers: 800, epoch: 2 | loss: 0.7915602\n", "\tspeed: 0.0364s/iter; left time: 465.6072s\n", "\titers: 900, epoch: 2 | loss: 0.8160855\n", "\tspeed: 0.0365s/iter; left time: 464.0756s\n", "\titers: 1000, epoch: 2 | loss: 0.7993005\n", "\tspeed: 0.0366s/iter; left time: 461.3265s\n", "\titers: 1100, epoch: 2 | loss: 0.9280266\n", "\tspeed: 0.0365s/iter; left time: 455.7280s\n", "\titers: 1200, epoch: 2 | loss: 0.7378565\n", "\tspeed: 0.0367s/iter; left time: 455.2252s\n", "\titers: 1300, epoch: 2 | loss: 0.8085539\n", "\tspeed: 0.0365s/iter; left time: 449.0834s\n", "\titers: 1400, epoch: 2 | loss: 0.7740736\n", "\tspeed: 0.0366s/iter; left time: 446.2242s\n", "\titers: 1500, epoch: 2 | loss: 0.8670415\n", "\tspeed: 0.0367s/iter; left time: 444.3549s\n", "Epoch: 2 cost time: 101.1522319316864\n", "Epoch: 2, Steps: 1511 | Train Loss: 0.8167034 Vali Loss: 0.7499349 Test Loss: 0.7313063\n", "Validation loss decreased (0.753842 --> 0.749935). Saving model ...\n", "Updating learning rate to 5e-05\n", "\titers: 100, epoch: 3 | loss: 0.7513472\n", "\tspeed: 2.8860s/iter; left time: 34600.6616s\n", "\titers: 200, epoch: 3 | loss: 0.7207974\n", "\tspeed: 0.0361s/iter; left time: 429.5270s\n", "\titers: 300, epoch: 3 | loss: 0.8303027\n", "\tspeed: 0.0363s/iter; left time: 427.8890s\n", "\titers: 400, epoch: 3 | loss: 0.8579456\n", "\tspeed: 0.0362s/iter; left time: 422.9204s\n", "\titers: 500, epoch: 3 | loss: 0.7680019\n", "\tspeed: 0.0364s/iter; left time: 421.6676s\n", "\titers: 600, epoch: 3 | loss: 0.7718320\n", "\tspeed: 0.0365s/iter; left time: 419.6053s\n", "\titers: 700, epoch: 3 | loss: 0.8720611\n", "\tspeed: 0.0364s/iter; left time: 414.4455s\n", "\titers: 800, epoch: 3 | loss: 0.8626361\n", "\tspeed: 0.0365s/iter; left time: 412.4312s\n", "\titers: 900, epoch: 3 | loss: 0.8626949\n", "\tspeed: 0.0364s/iter; left time: 407.5471s\n", "\titers: 1000, epoch: 3 | loss: 0.8619117\n", "\tspeed: 0.0365s/iter; left time: 404.8395s\n", "\titers: 1100, epoch: 3 | loss: 0.8770922\n", "\tspeed: 0.0366s/iter; left time: 402.5574s\n", "\titers: 1200, epoch: 3 | loss: 0.8296466\n", "\tspeed: 0.0366s/iter; left time: 399.0220s\n", "\titers: 1300, epoch: 3 | loss: 0.7416665\n", "\tspeed: 0.0366s/iter; left time: 394.7497s\n", "\titers: 1400, epoch: 3 | loss: 0.7638373\n", "\tspeed: 0.0366s/iter; left time: 391.0372s\n", "\titers: 1500, epoch: 3 | loss: 0.8404391\n", "\tspeed: 0.0370s/iter; left time: 391.2810s\n", "Epoch: 3 cost time: 101.6574113368988\n", "Epoch: 3, Steps: 1511 | Train Loss: 0.8010316 Vali Loss: 0.7477803 Test Loss: 0.7277666\n", "Validation loss decreased (0.749935 --> 0.747780). Saving model ...\n", "Updating learning rate to 2.5e-05\n", "\titers: 100, epoch: 4 | loss: 0.7045190\n", "\tspeed: 2.8823s/iter; left time: 30200.9942s\n", "\titers: 200, epoch: 4 | loss: 0.7066467\n", "\tspeed: 0.0372s/iter; left time: 385.8379s\n", "\titers: 300, epoch: 4 | loss: 0.7749660\n", "\tspeed: 0.0373s/iter; left time: 383.6457s\n", "\titers: 400, epoch: 4 | loss: 0.7730563\n", "\tspeed: 0.0371s/iter; left time: 377.4509s\n", "\titers: 500, epoch: 4 | loss: 0.7639590\n", "\tspeed: 0.0369s/iter; left time: 372.2015s\n", "\titers: 600, epoch: 4 | loss: 0.7582878\n", "\tspeed: 0.0369s/iter; left time: 368.6490s\n", "\titers: 700, epoch: 4 | loss: 0.9018844\n", "\tspeed: 0.0374s/iter; left time: 369.8718s\n", "\titers: 800, epoch: 4 | loss: 0.6901655\n", "\tspeed: 0.0376s/iter; left time: 367.6767s\n", "\titers: 900, epoch: 4 | loss: 0.6724777\n", "\tspeed: 0.0370s/iter; left time: 357.8945s\n", "\titers: 1000, epoch: 4 | loss: 0.7750159\n", "\tspeed: 0.0366s/iter; left time: 350.4160s\n", "\titers: 1100, epoch: 4 | loss: 0.8424389\n", "\tspeed: 0.0369s/iter; left time: 349.6281s\n", "\titers: 1200, epoch: 4 | loss: 0.8488315\n", "\tspeed: 0.0369s/iter; left time: 345.6933s\n", "\titers: 1300, epoch: 4 | loss: 0.7979546\n", "\tspeed: 0.0373s/iter; left time: 345.8738s\n", "\titers: 1400, epoch: 4 | loss: 0.8120037\n", "\tspeed: 0.0369s/iter; left time: 338.3853s\n", "\titers: 1500, epoch: 4 | loss: 0.7583222\n", "\tspeed: 0.0372s/iter; left time: 337.6237s\n", "Epoch: 4 cost time: 102.54616856575012\n", "Epoch: 4, Steps: 1511 | Train Loss: 0.7920635 Vali Loss: 0.7454294 Test Loss: 0.7269430\n", "Validation loss decreased (0.747780 --> 0.745429). Saving model ...\n", "Updating learning rate to 1.25e-05\n", "\titers: 100, epoch: 5 | loss: 0.7698524\n", "\tspeed: 2.9592s/iter; left time: 26535.4235s\n", "\titers: 200, epoch: 5 | loss: 0.8332277\n", "\tspeed: 0.0360s/iter; left time: 319.6006s\n", "\titers: 300, epoch: 5 | loss: 0.7530847\n", "\tspeed: 0.0361s/iter; left time: 316.4214s\n", "\titers: 400, epoch: 5 | loss: 0.8121219\n", "\tspeed: 0.0364s/iter; left time: 315.2340s\n", "\titers: 500, epoch: 5 | loss: 0.7293662\n", "\tspeed: 0.0362s/iter; left time: 310.5064s\n", "\titers: 600, epoch: 5 | loss: 0.9086708\n", "\tspeed: 0.0364s/iter; left time: 308.4401s\n", "\titers: 700, epoch: 5 | loss: 0.8745173\n", "\tspeed: 0.0364s/iter; left time: 304.4880s\n", "\titers: 800, epoch: 5 | loss: 0.7855228\n", "\tspeed: 0.0364s/iter; left time: 300.9170s\n", "\titers: 900, epoch: 5 | loss: 0.7439116\n", "\tspeed: 0.0368s/iter; left time: 300.1789s\n", "\titers: 1000, epoch: 5 | loss: 0.7423583\n", "\tspeed: 0.0364s/iter; left time: 293.8119s\n", "\titers: 1100, epoch: 5 | loss: 0.7481327\n", "\tspeed: 0.0364s/iter; left time: 290.3948s\n", "\titers: 1200, epoch: 5 | loss: 0.7128110\n", "\tspeed: 0.0365s/iter; left time: 286.8655s\n", "\titers: 1300, epoch: 5 | loss: 0.7890885\n", "\tspeed: 0.0365s/iter; left time: 283.7424s\n", "\titers: 1400, epoch: 5 | loss: 0.8448302\n", "\tspeed: 0.0365s/iter; left time: 279.9167s\n", "\titers: 1500, epoch: 5 | loss: 0.8243079\n", "\tspeed: 0.0365s/iter; left time: 276.2486s\n", "Epoch: 5 cost time: 100.8719744682312\n", "Epoch: 5, Steps: 1511 | Train Loss: 0.7870604 Vali Loss: 0.7474295 Test Loss: 0.7288673\n", "EarlyStopping counter: 1 out of 3\n", "Updating learning rate to 6.25e-06\n", "\titers: 100, epoch: 6 | loss: 0.8212607\n", "\tspeed: 2.8750s/iter; left time: 21436.3366s\n", "\titers: 200, epoch: 6 | loss: 0.7863473\n", "\tspeed: 0.0360s/iter; left time: 264.5231s\n", "\titers: 300, epoch: 6 | loss: 0.7405903\n", "\tspeed: 0.0363s/iter; left time: 263.5315s\n", "\titers: 400, epoch: 6 | loss: 0.8260189\n", "\tspeed: 0.0363s/iter; left time: 259.5481s\n", "\titers: 500, epoch: 6 | loss: 0.8392119\n", "\tspeed: 0.0365s/iter; left time: 257.8012s\n", "\titers: 600, epoch: 6 | loss: 0.7545788\n", "\tspeed: 0.0364s/iter; left time: 253.0886s\n", "\titers: 700, epoch: 6 | loss: 0.7812423\n", "\tspeed: 0.0365s/iter; left time: 250.1772s\n", "\titers: 800, epoch: 6 | loss: 0.8350452\n", "\tspeed: 0.0365s/iter; left time: 246.5843s\n", "\titers: 900, epoch: 6 | loss: 0.6707399\n", "\tspeed: 0.0363s/iter; left time: 241.4308s\n", "\titers: 1000, epoch: 6 | loss: 0.7334297\n", "\tspeed: 0.0363s/iter; left time: 237.6859s\n", "\titers: 1100, epoch: 6 | loss: 0.6790267\n", "\tspeed: 0.0365s/iter; left time: 235.6367s\n", "\titers: 1200, epoch: 6 | loss: 0.8249638\n", "\tspeed: 0.0366s/iter; left time: 232.9375s\n", "\titers: 1300, epoch: 6 | loss: 0.9560454\n", "\tspeed: 0.0363s/iter; left time: 226.8663s\n", "\titers: 1400, epoch: 6 | loss: 0.7251940\n", "\tspeed: 0.0367s/iter; left time: 225.9093s\n", "\titers: 1500, epoch: 6 | loss: 0.7827789\n", "\tspeed: 0.0366s/iter; left time: 221.8729s\n", "Epoch: 6 cost time: 100.52545094490051\n", "Epoch: 6, Steps: 1511 | Train Loss: 0.7844729 Vali Loss: 0.7454467 Test Loss: 0.7270975\n", "EarlyStopping counter: 2 out of 3\n", "Updating learning rate to 3.125e-06\n", "\titers: 100, epoch: 7 | loss: 0.7286347\n", "\tspeed: 2.8951s/iter; left time: 17211.2138s\n", "\titers: 200, epoch: 7 | loss: 0.7529159\n", "\tspeed: 0.0360s/iter; left time: 210.2816s\n", "\titers: 300, epoch: 7 | loss: 0.7762430\n", "\tspeed: 0.0363s/iter; left time: 208.6470s\n", "\titers: 400, epoch: 7 | loss: 0.6557502\n", "\tspeed: 0.0363s/iter; left time: 204.8490s\n", "\titers: 500, epoch: 7 | loss: 0.7646010\n", "\tspeed: 0.0362s/iter; left time: 200.8117s\n", "\titers: 600, epoch: 7 | loss: 0.7039447\n", "\tspeed: 0.0364s/iter; left time: 198.0846s\n", "\titers: 700, epoch: 7 | loss: 0.8644934\n", "\tspeed: 0.0364s/iter; left time: 194.4660s\n", "\titers: 800, epoch: 7 | loss: 0.7479609\n", "\tspeed: 0.0365s/iter; left time: 191.4545s\n", "\titers: 900, epoch: 7 | loss: 0.7625357\n", "\tspeed: 0.0363s/iter; left time: 186.6147s\n", "\titers: 1000, epoch: 7 | loss: 0.8668007\n", "\tspeed: 0.0365s/iter; left time: 183.9524s\n", "\titers: 1100, epoch: 7 | loss: 0.7765721\n", "\tspeed: 0.0364s/iter; left time: 180.1804s\n", "\titers: 1200, epoch: 7 | loss: 0.8833662\n", "\tspeed: 0.0365s/iter; left time: 176.7267s\n", "\titers: 1300, epoch: 7 | loss: 0.7631060\n", "\tspeed: 0.0364s/iter; left time: 172.6286s\n", "\titers: 1400, epoch: 7 | loss: 0.7764639\n", "\tspeed: 0.0367s/iter; left time: 170.2958s\n", "\titers: 1500, epoch: 7 | loss: 0.8403539\n", "\tspeed: 0.0368s/iter; left time: 167.3386s\n", "Epoch: 7 cost time: 100.56971859931946\n", "Epoch: 7, Steps: 1511 | Train Loss: 0.7832189 Vali Loss: 0.7480445 Test Loss: 0.7283733\n", "EarlyStopping counter: 3 out of 3\n", "Early stopping\n", ">>>>>>>testing : long_term_forecast_weather_96_720_iTransformer_custom_ftM_sl96_ll48_pl720_dm512_nh8_el3_dl1_df512_fc3_ebtimeF_dtTrue_Exp_0<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n", "test 27159\n", "test shape: (27159, 1, 720, 15) (27159, 1, 720, 15)\n", "test shape: (27159, 720, 15) (27159, 720, 15)\n", "mse:0.7269436717033386, mae:0.504772424697876\n" ] } ], "source": [ "run_experiment(\n", " task_name='long_term_forecast',\n", " is_training=1,\n", " model_id='weather_96_720',\n", " model='iTransformer',\n", " data='custom',\n", " root_path='./dataset/',\n", " data_path='UBB_weather_jan2008_may2023_cleaned.csv',\n", " features='M',\n", " target='T(degC)',\n", " freq='h',\n", " checkpoints='./checkpoints/',\n", " seq_len=96,\n", " label_len=48,\n", " pred_len=720,\n", " seasonal_patterns='Yearly',\n", " inverse=False,\n", " mask_rate=0.25,\n", " anomaly_ratio=0.25,\n", " top_k=5,\n", " num_kernels=6,\n", " enc_in=15,\n", " dec_in=15,\n", " c_out=15,\n", " d_model=512,\n", " n_heads=8,\n", " e_layers=3,\n", " d_layers=1,\n", " d_ff=512,\n", " moving_avg=25,\n", " factor=3,\n", " distil=True,\n", " dropout=0.1,\n", " embed='timeF',\n", " activation='gelu',\n", " output_attention=False,\n", " channel_independence=0,\n", " num_workers=10,\n", " itr=1,\n", " train_epochs=10,\n", " batch_size=64,\n", " patience=3,\n", " learning_rate=0.0001,\n", " des='Exp',\n", " loss='MSE',\n", " lradj='type1',\n", " use_amp=False,\n", " use_gpu=True,\n", " gpu=0,\n", " use_multi_gpu=False,\n", " devices='0,1,2,3',\n", " p_hidden_dims=[128, 128],\n", " p_hidden_layers=2\n", ")" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Args in experiment:\n", "\u001b[1mBasic Config\u001b[0m\n", " Task Name: long_term_forecast Is Training: 1 \n", " Model ID: weather_96_192 Model: Autoformer \n", "\n", "\u001b[1mData Loader\u001b[0m\n", " Data: custom Root Path: ./dataset/ \n", " Data Path: UBB_weather_jan2008_may2023_cleaned.csvFeatures: M \n", " Target: T(degC) Freq: h \n", " Checkpoints: ./checkpoints/ \n", "\n", "\u001b[1mForecasting Task\u001b[0m\n", " Seq Len: 96 Label Len: 48 \n", " Pred Len: 192 Seasonal Patterns: Yearly \n", " Inverse: 0 \n", "\n", "\u001b[1mModel Parameters\u001b[0m\n", " Top k: 5 Num Kernels: 6 \n", " Enc In: 15 Dec In: 15 \n", " C Out: 15 d model: 512 \n", " n heads: 8 e layers: 3 \n", " d layers: 1 d FF: 512 \n", " Moving Avg: 25 Factor: 3 \n", " Distil: 1 Dropout: 0.1 \n", " Embed: timeF Activation: gelu \n", " Output Attention: 0 \n", "\n", "\u001b[1mRun Parameters\u001b[0m\n", " Num Workers: 10 Itr: 1 \n", " Train Epochs: 5 Batch Size: 32 \n", " Patience: 2 Learning Rate: 0.0001 \n", " Des: Exp Loss: MSE \n", " Lradj: type1 Use Amp: 0 \n", "\n", "\u001b[1mGPU\u001b[0m\n", " Use GPU: 1 GPU: 0 \n", " Use Multi GPU: 0 Devices: 0,1,2,3 \n", "\n", "\u001b[1mDe-stationary Projector Params\u001b[0m\n", " P Hidden Dims: 128, 128 P Hidden Layers: 2 \n", "\n", "Use GPU: cuda:0\n", ">>>>>>>start training : long_term_forecast_weather_96_192_Autoformer_custom_ftM_sl96_ll48_pl192_dm512_nh8_el3_dl1_df512_fc3_ebtimeF_dtTrue_Exp_0>>>>>>>>>>>>>>>>>>>>>>>>>>\n", "train 97286\n", "val 13749\n", "test 27687\n", "\titers: 100, epoch: 1 | loss: 0.7060729\n", "\tspeed: 0.8425s/iter; left time: 12722.9764s\n", "\titers: 200, epoch: 1 | loss: 0.6771862\n", "\tspeed: 0.3385s/iter; left time: 5077.6111s\n", "\titers: 300, epoch: 1 | loss: 0.7004231\n", "\tspeed: 0.3397s/iter; left time: 5061.1392s\n", "\titers: 400, epoch: 1 | loss: 0.7645301\n", "\tspeed: 0.3405s/iter; left time: 5039.0335s\n", "\titers: 500, epoch: 1 | loss: 0.7449467\n", "\tspeed: 0.3412s/iter; left time: 5016.1337s\n", "\titers: 600, epoch: 1 | loss: 0.7320319\n", "\tspeed: 0.3417s/iter; left time: 4989.4232s\n", "\titers: 700, epoch: 1 | loss: 0.7510488\n", "\tspeed: 0.3430s/iter; left time: 4973.2816s\n", "\titers: 800, epoch: 1 | loss: 0.9012752\n", "\tspeed: 0.3423s/iter; left time: 4929.6750s\n", "\titers: 900, epoch: 1 | loss: 0.8091200\n", "\tspeed: 0.3422s/iter; left time: 4893.9333s\n", "\titers: 1000, epoch: 1 | loss: 0.7016364\n", "\tspeed: 0.3422s/iter; left time: 4859.7735s\n", "\titers: 1100, epoch: 1 | loss: 0.7492526\n", "\tspeed: 0.3426s/iter; left time: 4830.3614s\n", "\titers: 1200, epoch: 1 | loss: 0.7009193\n", "\tspeed: 0.3428s/iter; left time: 4799.7894s\n", "\titers: 1300, epoch: 1 | loss: 0.7306798\n", "\tspeed: 0.3430s/iter; left time: 4768.0947s\n", "\titers: 1400, epoch: 1 | loss: 0.7014756\n", "\tspeed: 0.3428s/iter; left time: 4730.3397s\n", "\titers: 1500, epoch: 1 | loss: 0.6614472\n", "\tspeed: 0.3432s/iter; left time: 4702.7468s\n", "\titers: 1600, epoch: 1 | loss: 0.6686432\n", "\tspeed: 0.3431s/iter; left time: 4666.4807s\n", "\titers: 1700, epoch: 1 | loss: 0.6874031\n", "\tspeed: 0.3431s/iter; left time: 4631.8456s\n", "\titers: 1800, epoch: 1 | loss: 0.7572113\n", "\tspeed: 0.3430s/iter; left time: 4596.9132s\n", "\titers: 1900, epoch: 1 | loss: 0.7560040\n", "\tspeed: 0.3430s/iter; left time: 4562.8277s\n", "\titers: 2000, epoch: 1 | loss: 0.7085792\n", "\tspeed: 0.3433s/iter; left time: 4531.9632s\n", "\titers: 2100, epoch: 1 | loss: 0.7287388\n", "\tspeed: 0.3431s/iter; left time: 4495.5869s\n", "\titers: 2200, epoch: 1 | loss: 0.6930690\n", "\tspeed: 0.3435s/iter; left time: 4465.3578s\n", "\titers: 2300, epoch: 1 | loss: 0.7869411\n", "\tspeed: 0.3434s/iter; left time: 4430.2379s\n", "\titers: 2400, epoch: 1 | loss: 0.6414000\n", "\tspeed: 0.3433s/iter; left time: 4394.0726s\n", "\titers: 2500, epoch: 1 | loss: 0.6788224\n", "\tspeed: 0.3432s/iter; left time: 4358.7891s\n", "\titers: 2600, epoch: 1 | loss: 0.5642128\n", "\tspeed: 0.3432s/iter; left time: 4324.8183s\n", "\titers: 2700, epoch: 1 | loss: 0.8333097\n", "\tspeed: 0.3434s/iter; left time: 4293.3441s\n", "\titers: 2800, epoch: 1 | loss: 0.6527547\n", "\tspeed: 0.3437s/iter; left time: 4262.6340s\n", "\titers: 2900, epoch: 1 | loss: 0.7997938\n", "\tspeed: 0.3441s/iter; left time: 4232.5578s\n", "\titers: 3000, epoch: 1 | loss: 0.5456027\n", "\tspeed: 0.3440s/iter; left time: 4197.7515s\n", "Epoch: 1 cost time: 1092.175326347351\n", "Epoch: 1, Steps: 3040 | Train Loss: 0.7076277 Vali Loss: 0.6414957 Test Loss: 0.6355927\n", "Validation loss decreased (inf --> 0.641496). Saving model ...\n", "Updating learning rate to 0.0001\n", "\titers: 100, epoch: 2 | loss: 0.6992500\n", "\tspeed: 10.1408s/iter; left time: 122307.6460s\n", "\titers: 200, epoch: 2 | loss: 0.7742710\n", "\tspeed: 0.3394s/iter; left time: 4059.6762s\n", "\titers: 300, epoch: 2 | loss: 0.7515810\n", "\tspeed: 0.3402s/iter; left time: 4034.7057s\n", "\titers: 400, epoch: 2 | loss: 0.5345064\n", "\tspeed: 0.3408s/iter; left time: 4008.1316s\n", "\titers: 500, epoch: 2 | loss: 0.6825959\n", "\tspeed: 0.3415s/iter; left time: 3982.3927s\n", "\titers: 600, epoch: 2 | loss: 0.5134490\n", "\tspeed: 0.3421s/iter; left time: 3955.3536s\n", "\titers: 700, epoch: 2 | loss: 0.4707636\n", "\tspeed: 0.3422s/iter; left time: 3922.3440s\n", "\titers: 800, epoch: 2 | loss: 0.7712355\n", "\tspeed: 0.3424s/iter; left time: 3889.6865s\n", "\titers: 900, epoch: 2 | loss: 0.5709189\n", "\tspeed: 0.3429s/iter; left time: 3861.8199s\n", "\titers: 1000, epoch: 2 | loss: 0.7720557\n", "\tspeed: 0.3428s/iter; left time: 3826.4826s\n", "\titers: 1100, epoch: 2 | loss: 0.6867329\n", "\tspeed: 0.3429s/iter; left time: 3792.9764s\n", "\titers: 1200, epoch: 2 | loss: 0.5870683\n", "\tspeed: 0.3427s/iter; left time: 3756.7493s\n", "\titers: 1300, epoch: 2 | loss: 0.8347874\n", "\tspeed: 0.3431s/iter; left time: 3725.9904s\n", "\titers: 1400, epoch: 2 | loss: 0.7612745\n", "\tspeed: 0.3430s/iter; left time: 3690.5842s\n", "\titers: 1500, epoch: 2 | loss: 0.5558271\n", "\tspeed: 0.3432s/iter; left time: 3658.6178s\n", "\titers: 1600, epoch: 2 | loss: 0.6585294\n", "\tspeed: 0.3431s/iter; left time: 3623.8055s\n", "\titers: 1700, epoch: 2 | loss: 0.9225779\n", "\tspeed: 0.3431s/iter; left time: 3589.1414s\n", "\titers: 1800, epoch: 2 | loss: 0.5713859\n", "\tspeed: 0.3433s/iter; left time: 3557.0609s\n", "\titers: 1900, epoch: 2 | loss: 0.5017359\n", "\tspeed: 0.3433s/iter; left time: 3522.2527s\n", "\titers: 2000, epoch: 2 | loss: 0.8573118\n", "\tspeed: 0.3434s/iter; left time: 3489.2132s\n", "\titers: 2100, epoch: 2 | loss: 0.4587389\n", "\tspeed: 0.3439s/iter; left time: 3460.2618s\n", "\titers: 2200, epoch: 2 | loss: 0.7088269\n", "\tspeed: 0.3434s/iter; left time: 3421.0573s\n", "\titers: 2300, epoch: 2 | loss: 0.8105705\n", "\tspeed: 0.3436s/iter; left time: 3387.9583s\n", "\titers: 2400, epoch: 2 | loss: 0.7165201\n", "\tspeed: 0.3435s/iter; left time: 3352.9334s\n", "\titers: 2500, epoch: 2 | loss: 0.7126307\n", "\tspeed: 0.3437s/iter; left time: 3320.9407s\n", "\titers: 2600, epoch: 2 | loss: 0.6259704\n", "\tspeed: 0.3436s/iter; left time: 3285.3309s\n", "\titers: 2700, epoch: 2 | loss: 0.5505719\n", "\tspeed: 0.3439s/iter; left time: 3253.5148s\n", "\titers: 2800, epoch: 2 | loss: 0.5994757\n", "\tspeed: 0.3444s/iter; left time: 3224.1806s\n", "\titers: 2900, epoch: 2 | loss: 0.5347224\n", "\tspeed: 0.3436s/iter; left time: 3182.3860s\n", "\titers: 3000, epoch: 2 | loss: 0.4842962\n", "\tspeed: 0.3439s/iter; left time: 3150.4006s\n", "Epoch: 2 cost time: 1087.4813332557678\n", "Epoch: 2, Steps: 3040 | Train Loss: 0.6603346 Vali Loss: 0.6557665 Test Loss: 0.6360005\n", "EarlyStopping counter: 1 out of 2\n", "Updating learning rate to 5e-05\n", "\titers: 100, epoch: 3 | loss: 0.4823048\n", "\tspeed: 10.1050s/iter; left time: 91157.5664s\n", "\titers: 200, epoch: 3 | loss: 0.5864404\n", "\tspeed: 0.3391s/iter; left time: 3025.1934s\n", "\titers: 300, epoch: 3 | loss: 0.8313769\n", "\tspeed: 0.3401s/iter; left time: 3000.1274s\n", "\titers: 400, epoch: 3 | loss: 0.5550111\n", "\tspeed: 0.3410s/iter; left time: 2973.4446s\n", "\titers: 500, epoch: 3 | loss: 0.5298777\n", "\tspeed: 0.3415s/iter; left time: 2944.1562s\n", "\titers: 600, epoch: 3 | loss: 0.5452624\n", "\tspeed: 0.3416s/iter; left time: 2910.5768s\n", "\titers: 700, epoch: 3 | loss: 0.6584248\n", "\tspeed: 0.3421s/iter; left time: 2880.5901s\n", "\titers: 800, epoch: 3 | loss: 0.6080624\n", "\tspeed: 0.3428s/iter; left time: 2852.2729s\n", "\titers: 900, epoch: 3 | loss: 0.5303823\n", "\tspeed: 0.3426s/iter; left time: 2816.6708s\n", "\titers: 1000, epoch: 3 | loss: 0.6928530\n", "\tspeed: 0.3429s/iter; left time: 2784.8975s\n", "\titers: 1100, epoch: 3 | loss: 0.5821518\n", "\tspeed: 0.3429s/iter; left time: 2750.3339s\n", "\titers: 1200, epoch: 3 | loss: 0.6861767\n", "\tspeed: 0.3429s/iter; left time: 2715.8302s\n", "\titers: 1300, epoch: 3 | loss: 0.5501401\n", "\tspeed: 0.3430s/iter; left time: 2682.4965s\n", "\titers: 1400, epoch: 3 | loss: 0.5270120\n", "\tspeed: 0.3432s/iter; left time: 2649.7191s\n", "\titers: 1500, epoch: 3 | loss: 0.5163071\n", "\tspeed: 0.3430s/iter; left time: 2613.6415s\n", "\titers: 1600, epoch: 3 | loss: 0.5919408\n", "\tspeed: 0.3432s/iter; left time: 2581.5086s\n", "\titers: 1700, epoch: 3 | loss: 0.7169980\n", "\tspeed: 0.3432s/iter; left time: 2547.1484s\n", "\titers: 1800, epoch: 3 | loss: 0.6343219\n", "\tspeed: 0.3436s/iter; left time: 2515.2141s\n", "\titers: 1900, epoch: 3 | loss: 0.8134232\n", "\tspeed: 0.3437s/iter; left time: 2481.4987s\n", "\titers: 2000, epoch: 3 | loss: 0.6951222\n", "\tspeed: 0.3443s/iter; left time: 2451.9620s\n", "\titers: 2100, epoch: 3 | loss: 0.6766552\n", "\tspeed: 0.3438s/iter; left time: 2414.1367s\n", "\titers: 2200, epoch: 3 | loss: 0.5743182\n", "\tspeed: 0.3437s/iter; left time: 2379.0082s\n", "\titers: 2300, epoch: 3 | loss: 0.5882503\n", "\tspeed: 0.3438s/iter; left time: 2344.9944s\n", "\titers: 2400, epoch: 3 | loss: 0.9680011\n", "\tspeed: 0.3436s/iter; left time: 2309.5231s\n", "\titers: 2500, epoch: 3 | loss: 0.7025395\n", "\tspeed: 0.3436s/iter; left time: 2274.9981s\n", "\titers: 2600, epoch: 3 | loss: 0.8608876\n", "\tspeed: 0.3436s/iter; left time: 2240.3695s\n", "\titers: 2700, epoch: 3 | loss: 0.6358204\n", "\tspeed: 0.3445s/iter; left time: 2212.3510s\n", "\titers: 2800, epoch: 3 | loss: 0.6699333\n", "\tspeed: 0.3441s/iter; left time: 2174.9651s\n", "\titers: 2900, epoch: 3 | loss: 0.4781308\n", "\tspeed: 0.3440s/iter; left time: 2139.7309s\n", "\titers: 3000, epoch: 3 | loss: 0.9126721\n", "\tspeed: 0.3437s/iter; left time: 2103.8730s\n", "Epoch: 3 cost time: 1087.6600723266602\n", "Epoch: 3, Steps: 3040 | Train Loss: 0.6233715 Vali Loss: 0.6607839 Test Loss: 0.6382161\n", "EarlyStopping counter: 2 out of 2\n", "Early stopping\n", ">>>>>>>testing : long_term_forecast_weather_96_192_Autoformer_custom_ftM_sl96_ll48_pl192_dm512_nh8_el3_dl1_df512_fc3_ebtimeF_dtTrue_Exp_0<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n", "test 27687\n", "test shape: (27687, 1, 192, 15) (27687, 1, 192, 15)\n", "test shape: (27687, 192, 15) (27687, 192, 15)\n", "mse:0.6355927586555481, mae:0.48880577087402344\n" ] } ], "source": [ "run_experiment(\n", " task_name='long_term_forecast',\n", " is_training=1,\n", " model_id='weather_96_192',\n", " model='Autoformer',\n", " data='custom',\n", " root_path='./dataset/',\n", " data_path='UBB_weather_jan2008_may2023_cleaned.csv',\n", " features='M',\n", " target='T(degC)',\n", " freq='h',\n", " checkpoints='./checkpoints/',\n", " seq_len=96,\n", " label_len=48,\n", " pred_len=192,\n", " seasonal_patterns='Yearly',\n", " inverse=False,\n", " mask_rate=0.25,\n", " anomaly_ratio=0.25,\n", " top_k=5,\n", " num_kernels=6,\n", " enc_in=15,\n", " dec_in=15,\n", " c_out=15,\n", " d_model=512,\n", " n_heads=8,\n", " e_layers=3,\n", " d_layers=1,\n", " d_ff=512,\n", " moving_avg=25,\n", " factor=3,\n", " distil=True,\n", " dropout=0.1,\n", " embed='timeF',\n", " activation='gelu',\n", " output_attention=False,\n", " channel_independence=0,\n", " num_workers=10,\n", " itr=1,\n", " train_epochs=5,\n", " batch_size=32,\n", " patience=2,\n", " learning_rate=0.0001,\n", " des='Exp',\n", " loss='MSE',\n", " lradj='type1',\n", " use_amp=False,\n", " use_gpu=True,\n", " gpu=0,\n", " use_multi_gpu=False,\n", " devices='0,1,2,3',\n", " p_hidden_dims=[128, 128],\n", " p_hidden_layers=2\n", ")" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Args in experiment:\n", "\u001b[1mBasic Config\u001b[0m\n", " Task Name: long_term_forecast Is Training: 1 \n", " Model ID: weather_96_336 Model: Autoformer \n", "\n", "\u001b[1mData Loader\u001b[0m\n", " Data: custom Root Path: ./dataset/ \n", " Data Path: UBB_weather_jan2008_may2023_cleaned.csvFeatures: M \n", " Target: T(degC) Freq: h \n", " Checkpoints: ./checkpoints/ \n", "\n", "\u001b[1mForecasting Task\u001b[0m\n", " Seq Len: 96 Label Len: 48 \n", " Pred Len: 336 Seasonal Patterns: Yearly \n", " Inverse: 0 \n", "\n", "\u001b[1mModel Parameters\u001b[0m\n", " Top k: 5 Num Kernels: 6 \n", " Enc In: 15 Dec In: 15 \n", " C Out: 15 d model: 512 \n", " n heads: 8 e layers: 3 \n", " d layers: 1 d FF: 512 \n", " Moving Avg: 25 Factor: 3 \n", " Distil: 1 Dropout: 0.1 \n", " Embed: timeF Activation: gelu \n", " Output Attention: 0 \n", "\n", "\u001b[1mRun Parameters\u001b[0m\n", " Num Workers: 10 Itr: 1 \n", " Train Epochs: 5 Batch Size: 24 \n", " Patience: 2 Learning Rate: 0.0001 \n", " Des: Exp Loss: MSE \n", " Lradj: type1 Use Amp: 0 \n", "\n", "\u001b[1mGPU\u001b[0m\n", " Use GPU: 1 GPU: 0 \n", " Use Multi GPU: 0 Devices: 0,1,2,3 \n", "\n", "\u001b[1mDe-stationary Projector Params\u001b[0m\n", " P Hidden Dims: 128, 128 P Hidden Layers: 2 \n", "\n", "Use GPU: cuda:0\n", ">>>>>>>start training : long_term_forecast_weather_96_336_Autoformer_custom_ftM_sl96_ll48_pl336_dm512_nh8_el3_dl1_df512_fc3_ebtimeF_dtTrue_Exp_0>>>>>>>>>>>>>>>>>>>>>>>>>>\n", "train 97142\n", "val 13605\n", "test 27543\n", "\titers: 100, epoch: 1 | loss: 0.9880684\n", "\tspeed: 0.9135s/iter; left time: 18393.3364s\n", "\titers: 200, epoch: 1 | loss: 0.8404264\n", "\tspeed: 0.3652s/iter; left time: 7317.4780s\n", "\titers: 300, epoch: 1 | loss: 0.7706644\n", "\tspeed: 0.3632s/iter; left time: 7240.8349s\n", "\titers: 400, epoch: 1 | loss: 0.7337714\n", "\tspeed: 0.3673s/iter; left time: 7285.6701s\n", "\titers: 500, epoch: 1 | loss: 0.6906027\n", "\tspeed: 0.3639s/iter; left time: 7181.5976s\n", "\titers: 600, epoch: 1 | loss: 0.7600725\n", "\tspeed: 0.3637s/iter; left time: 7141.8931s\n", "\titers: 700, epoch: 1 | loss: 0.8692102\n", "\tspeed: 0.3638s/iter; left time: 7107.5470s\n", "\titers: 800, epoch: 1 | loss: 0.8181397\n", "\tspeed: 0.3641s/iter; left time: 7076.7870s\n", "\titers: 900, epoch: 1 | loss: 0.7191927\n", "\tspeed: 0.3639s/iter; left time: 7035.5585s\n", "\titers: 1000, epoch: 1 | loss: 0.6759537\n", "\tspeed: 0.3647s/iter; left time: 7015.0041s\n", "\titers: 1100, epoch: 1 | loss: 0.7286678\n", "\tspeed: 0.3667s/iter; left time: 7016.4720s\n", "\titers: 1200, epoch: 1 | loss: 0.9390303\n", "\tspeed: 0.3649s/iter; left time: 6946.5821s\n", "\titers: 1300, epoch: 1 | loss: 0.7407528\n", "\tspeed: 0.3646s/iter; left time: 6904.7031s\n", "\titers: 1400, epoch: 1 | loss: 0.6314340\n", "\tspeed: 0.3649s/iter; left time: 6874.1372s\n", "\titers: 1500, epoch: 1 | loss: 0.5287334\n", "\tspeed: 0.3649s/iter; left time: 6837.0126s\n", "\titers: 1600, epoch: 1 | loss: 0.6374760\n", "\tspeed: 0.3652s/iter; left time: 6805.1083s\n", "\titers: 1700, epoch: 1 | loss: 0.8312553\n", "\tspeed: 0.3648s/iter; left time: 6762.1537s\n", "\titers: 1800, epoch: 1 | loss: 0.8006389\n", "\tspeed: 0.3652s/iter; left time: 6732.1551s\n", "\titers: 1900, epoch: 1 | loss: 0.7357878\n", "\tspeed: 0.3650s/iter; left time: 6692.5095s\n", "\titers: 2000, epoch: 1 | loss: 0.6158478\n", "\tspeed: 0.3655s/iter; left time: 6665.3169s\n", "\titers: 2100, epoch: 1 | loss: 0.7522695\n", "\tspeed: 0.3651s/iter; left time: 6621.1492s\n", "\titers: 2200, epoch: 1 | loss: 0.7121110\n", "\tspeed: 0.3649s/iter; left time: 6581.6534s\n", "\titers: 2300, epoch: 1 | loss: 0.7539850\n", "\tspeed: 0.3654s/iter; left time: 6554.5921s\n", "\titers: 2400, epoch: 1 | loss: 0.5645919\n", "\tspeed: 0.3655s/iter; left time: 6518.2687s\n", "\titers: 2500, epoch: 1 | loss: 0.6918775\n", "\tspeed: 0.3651s/iter; left time: 6475.2232s\n", "\titers: 2600, epoch: 1 | loss: 0.5531322\n", "\tspeed: 0.3654s/iter; left time: 6444.2286s\n", "\titers: 2700, epoch: 1 | loss: 0.7716080\n", "\tspeed: 0.3653s/iter; left time: 6405.3382s\n", "\titers: 2800, epoch: 1 | loss: 0.7074283\n", "\tspeed: 0.3651s/iter; left time: 6366.2461s\n", "\titers: 2900, epoch: 1 | loss: 0.8437936\n", "\tspeed: 0.3655s/iter; left time: 6337.1409s\n", "\titers: 3000, epoch: 1 | loss: 0.6570129\n", "\tspeed: 0.3659s/iter; left time: 6306.2238s\n", "\titers: 3100, epoch: 1 | loss: 0.8429404\n", "\tspeed: 0.3653s/iter; left time: 6259.7433s\n", "\titers: 3200, epoch: 1 | loss: 0.5499986\n", "\tspeed: 0.3651s/iter; left time: 6219.7776s\n", "\titers: 3300, epoch: 1 | loss: 0.8154365\n", "\tspeed: 0.3656s/iter; left time: 6192.3551s\n", "\titers: 3400, epoch: 1 | loss: 0.8554825\n", "\tspeed: 0.3655s/iter; left time: 6154.1215s\n", "\titers: 3500, epoch: 1 | loss: 0.5510955\n", "\tspeed: 0.3652s/iter; left time: 6111.9044s\n", "\titers: 3600, epoch: 1 | loss: 0.9184366\n", "\tspeed: 0.3662s/iter; left time: 6092.2388s\n", "\titers: 3700, epoch: 1 | loss: 0.7983560\n", "\tspeed: 0.3659s/iter; left time: 6051.0486s\n", "\titers: 3800, epoch: 1 | loss: 0.7316183\n", "\tspeed: 0.3651s/iter; left time: 6000.4189s\n", "\titers: 3900, epoch: 1 | loss: 0.8186337\n", "\tspeed: 0.3654s/iter; left time: 5969.1988s\n", "\titers: 4000, epoch: 1 | loss: 1.0068882\n", "\tspeed: 0.3655s/iter; left time: 5933.9268s\n", "Epoch: 1 cost time: 1533.1646897792816\n", "Epoch: 1, Steps: 4047 | Train Loss: 0.7403493 Vali Loss: 0.6590570 Test Loss: 0.6583017\n", "Validation loss decreased (inf --> 0.659057). Saving model ...\n", "Updating learning rate to 0.0001\n", "\titers: 100, epoch: 2 | loss: 0.7022683\n", "\tspeed: 11.3016s/iter; left time: 181831.0183s\n", "\titers: 200, epoch: 2 | loss: 0.9740535\n", "\tspeed: 0.3731s/iter; left time: 5964.9167s\n", "\titers: 300, epoch: 2 | loss: 0.6890996\n", "\tspeed: 0.3749s/iter; left time: 5956.2367s\n", "\titers: 400, epoch: 2 | loss: 0.9389350\n", "\tspeed: 0.3770s/iter; left time: 5951.8070s\n", "\titers: 500, epoch: 2 | loss: 0.6729711\n", "\tspeed: 0.3724s/iter; left time: 5842.3951s\n", "\titers: 600, epoch: 2 | loss: 0.7830299\n", "\tspeed: 0.3676s/iter; left time: 5730.0848s\n", "\titers: 700, epoch: 2 | loss: 0.7277954\n", "\tspeed: 0.3965s/iter; left time: 6140.9185s\n", "\titers: 800, epoch: 2 | loss: 0.7912410\n", "\tspeed: 0.3776s/iter; left time: 5811.4298s\n", "\titers: 900, epoch: 2 | loss: 0.4887069\n", "\tspeed: 0.3829s/iter; left time: 5854.7786s\n", "\titers: 1000, epoch: 2 | loss: 0.6884834\n", "\tspeed: 0.3785s/iter; left time: 5748.4550s\n", "\titers: 1100, epoch: 2 | loss: 0.8089579\n", "\tspeed: 0.3802s/iter; left time: 5737.5069s\n", "\titers: 1200, epoch: 2 | loss: 0.7049085\n", "\tspeed: 0.3675s/iter; left time: 5507.8583s\n", "\titers: 1300, epoch: 2 | loss: 0.6414325\n", "\tspeed: 0.3752s/iter; left time: 5585.7552s\n", "\titers: 1400, epoch: 2 | loss: 0.6514473\n", "\tspeed: 0.3793s/iter; left time: 5610.2023s\n", "\titers: 1500, epoch: 2 | loss: 0.6143963\n", "\tspeed: 0.3909s/iter; left time: 5741.6019s\n", "\titers: 1600, epoch: 2 | loss: 0.8066220\n", "\tspeed: 0.3764s/iter; left time: 5491.8551s\n", "\titers: 1700, epoch: 2 | loss: 0.6554816\n", "\tspeed: 0.3835s/iter; left time: 5556.6488s\n", "\titers: 1800, epoch: 2 | loss: 0.6771674\n", "\tspeed: 0.3814s/iter; left time: 5488.2723s\n", "\titers: 1900, epoch: 2 | loss: 0.6343329\n", "\tspeed: 0.3729s/iter; left time: 5328.7960s\n", "\titers: 2000, epoch: 2 | loss: 0.6431516\n", "\tspeed: 0.3929s/iter; left time: 5574.7634s\n", "\titers: 2100, epoch: 2 | loss: 0.7611627\n", "\tspeed: 0.3749s/iter; left time: 5281.3751s\n", "\titers: 2200, epoch: 2 | loss: 0.6797409\n", "\tspeed: 0.3831s/iter; left time: 5358.9864s\n", "\titers: 2300, epoch: 2 | loss: 0.7058476\n", "\tspeed: 0.3732s/iter; left time: 5183.2952s\n", "\titers: 2400, epoch: 2 | loss: 0.8141038\n", "\tspeed: 0.3952s/iter; left time: 5449.2618s\n", "\titers: 2500, epoch: 2 | loss: 0.7527688\n", "\tspeed: 0.3794s/iter; left time: 5193.5887s\n", "\titers: 2600, epoch: 2 | loss: 0.6934429\n", "\tspeed: 0.3698s/iter; left time: 5024.6414s\n", "\titers: 2700, epoch: 2 | loss: 0.7187514\n", "\tspeed: 0.3717s/iter; left time: 5014.3904s\n", "\titers: 2800, epoch: 2 | loss: 0.7011213\n", "\tspeed: 0.3853s/iter; left time: 5159.4199s\n", "\titers: 2900, epoch: 2 | loss: 0.8202876\n", "\tspeed: 0.3721s/iter; left time: 4944.3977s\n", "\titers: 3000, epoch: 2 | loss: 0.6800453\n", "\tspeed: 0.3686s/iter; left time: 4861.4658s\n", "\titers: 3100, epoch: 2 | loss: 0.5844565\n", "\tspeed: 0.3705s/iter; left time: 4849.3535s\n", "\titers: 3200, epoch: 2 | loss: 0.7243845\n", "\tspeed: 0.3831s/iter; left time: 4976.3103s\n", "\titers: 3300, epoch: 2 | loss: 0.7069282\n", "\tspeed: 0.3787s/iter; left time: 4881.0264s\n", "\titers: 3400, epoch: 2 | loss: 0.8794814\n", "\tspeed: 0.3684s/iter; left time: 4711.0939s\n", "\titers: 3500, epoch: 2 | loss: 0.7776490\n", "\tspeed: 0.3676s/iter; left time: 4663.9149s\n", "\titers: 3600, epoch: 2 | loss: 0.7343450\n", "\tspeed: 0.3737s/iter; left time: 4704.6107s\n", "\titers: 3700, epoch: 2 | loss: 0.8265253\n", "\tspeed: 0.3705s/iter; left time: 4627.7193s\n", "\titers: 3800, epoch: 2 | loss: 0.6352152\n", "\tspeed: 0.3732s/iter; left time: 4623.1153s\n", "\titers: 3900, epoch: 2 | loss: 0.6256530\n", "\tspeed: 0.3801s/iter; left time: 4671.6054s\n", "\titers: 4000, epoch: 2 | loss: 0.7043225\n", "\tspeed: 0.3851s/iter; left time: 4693.6295s\n", "Epoch: 2 cost time: 1580.0327162742615\n", "Epoch: 2, Steps: 4047 | Train Loss: 0.7091736 Vali Loss: 0.6688146 Test Loss: 0.6576338\n", "EarlyStopping counter: 1 out of 2\n", "Updating learning rate to 5e-05\n", "\titers: 100, epoch: 3 | loss: 0.5226471\n", "\tspeed: 11.8437s/iter; left time: 142621.6211s\n", "\titers: 200, epoch: 3 | loss: 0.6867081\n", "\tspeed: 0.3852s/iter; left time: 4600.6275s\n", "\titers: 300, epoch: 3 | loss: 0.8440647\n", "\tspeed: 0.3861s/iter; left time: 4572.1686s\n", "\titers: 400, epoch: 3 | loss: 0.6405030\n", "\tspeed: 0.3875s/iter; left time: 4549.8239s\n", "\titers: 500, epoch: 3 | loss: 0.5559281\n", "\tspeed: 0.3876s/iter; left time: 4512.3382s\n", "\titers: 600, epoch: 3 | loss: 0.5952459\n", "\tspeed: 0.3879s/iter; left time: 4477.5077s\n", "\titers: 700, epoch: 3 | loss: 0.6088215\n", "\tspeed: 0.3875s/iter; left time: 4434.2106s\n", "\titers: 800, epoch: 3 | loss: 0.5898892\n", "\tspeed: 0.3884s/iter; left time: 4404.9525s\n", "\titers: 900, epoch: 3 | loss: 0.6149271\n", "\tspeed: 0.3884s/iter; left time: 4366.6550s\n", "\titers: 1000, epoch: 3 | loss: 0.5418059\n", "\tspeed: 0.3890s/iter; left time: 4334.2206s\n", "\titers: 1100, epoch: 3 | loss: 0.6649979\n", "\tspeed: 0.3886s/iter; left time: 4291.1479s\n", "\titers: 1200, epoch: 3 | loss: 0.5812250\n", "\tspeed: 0.3886s/iter; left time: 4252.2190s\n", "\titers: 1300, epoch: 3 | loss: 0.5225907\n", "\tspeed: 0.3887s/iter; left time: 4213.8788s\n", "\titers: 1400, epoch: 3 | loss: 0.9102845\n", "\tspeed: 0.3889s/iter; left time: 4178.0655s\n", "\titers: 1500, epoch: 3 | loss: 0.7994068\n", "\tspeed: 0.3885s/iter; left time: 4134.9006s\n", "\titers: 1600, epoch: 3 | loss: 0.7581812\n", "\tspeed: 0.3891s/iter; left time: 4101.4135s\n", "\titers: 1700, epoch: 3 | loss: 0.5427315\n", "\tspeed: 0.3894s/iter; left time: 4066.1177s\n", "\titers: 1800, epoch: 3 | loss: 0.7830305\n", "\tspeed: 0.3891s/iter; left time: 4024.4514s\n", "\titers: 1900, epoch: 3 | loss: 0.6597770\n", "\tspeed: 0.3892s/iter; left time: 3986.6961s\n", "\titers: 2000, epoch: 3 | loss: 0.6462795\n", "\tspeed: 0.3919s/iter; left time: 3975.0497s\n", "\titers: 2100, epoch: 3 | loss: 0.5859250\n", "\tspeed: 0.3894s/iter; left time: 3910.5066s\n", "\titers: 2200, epoch: 3 | loss: 0.7551333\n", "\tspeed: 0.3893s/iter; left time: 3870.0101s\n", "\titers: 2300, epoch: 3 | loss: 0.8439524\n", "\tspeed: 0.3896s/iter; left time: 3834.4576s\n", "\titers: 2400, epoch: 3 | loss: 0.6858028\n", "\tspeed: 0.3892s/iter; left time: 3791.5422s\n", "\titers: 2500, epoch: 3 | loss: 0.9280378\n", "\tspeed: 0.3891s/iter; left time: 3752.1090s\n", "\titers: 2600, epoch: 3 | loss: 0.7284593\n", "\tspeed: 0.3892s/iter; left time: 3713.9121s\n", "\titers: 2700, epoch: 3 | loss: 0.6601533\n", "\tspeed: 0.3893s/iter; left time: 3675.6295s\n", "\titers: 2800, epoch: 3 | loss: 0.6197154\n", "\tspeed: 0.3893s/iter; left time: 3636.6065s\n", "\titers: 2900, epoch: 3 | loss: 0.5689324\n", "\tspeed: 0.3897s/iter; left time: 3601.4967s\n", "\titers: 3000, epoch: 3 | loss: 0.6846474\n", "\tspeed: 0.3891s/iter; left time: 3557.3855s\n", "\titers: 3100, epoch: 3 | loss: 0.7466645\n", "\tspeed: 0.3892s/iter; left time: 3519.3702s\n", "\titers: 3200, epoch: 3 | loss: 0.7093028\n", "\tspeed: 0.3895s/iter; left time: 3483.0346s\n", "\titers: 3300, epoch: 3 | loss: 0.6642067\n", "\tspeed: 0.3895s/iter; left time: 3444.0419s\n", "\titers: 3400, epoch: 3 | loss: 0.7061267\n", "\tspeed: 0.3893s/iter; left time: 3403.2035s\n", "\titers: 3500, epoch: 3 | loss: 0.5829228\n", "\tspeed: 0.3897s/iter; left time: 3368.1838s\n", "\titers: 3600, epoch: 3 | loss: 0.8503827\n", "\tspeed: 0.3900s/iter; left time: 3331.7421s\n", "\titers: 3700, epoch: 3 | loss: 0.8098344\n", "\tspeed: 0.3895s/iter; left time: 3288.1963s\n", "\titers: 3800, epoch: 3 | loss: 0.6199698\n", "\tspeed: 0.3892s/iter; left time: 3246.2965s\n", "\titers: 3900, epoch: 3 | loss: 0.7025493\n", "\tspeed: 0.3894s/iter; left time: 3209.1884s\n", "\titers: 4000, epoch: 3 | loss: 0.5690978\n", "\tspeed: 0.3893s/iter; left time: 3169.6261s\n", "Epoch: 3 cost time: 1623.6088049411774\n", "Epoch: 3, Steps: 4047 | Train Loss: 0.6875451 Vali Loss: 0.6669970 Test Loss: 0.6563502\n", "EarlyStopping counter: 2 out of 2\n", "Early stopping\n", ">>>>>>>testing : long_term_forecast_weather_96_336_Autoformer_custom_ftM_sl96_ll48_pl336_dm512_nh8_el3_dl1_df512_fc3_ebtimeF_dtTrue_Exp_0<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n", "test 27543\n", "test shape: (27543, 1, 336, 15) (27543, 1, 336, 15)\n", "test shape: (27543, 336, 15) (27543, 336, 15)\n", "mse:0.6583003997802734, mae:0.4999295175075531\n" ] } ], "source": [ "run_experiment(\n", " task_name='long_term_forecast',\n", " is_training=1,\n", " model_id='weather_96_336',\n", " model='Autoformer',\n", " data='custom',\n", " root_path='./dataset/',\n", " data_path='UBB_weather_jan2008_may2023_cleaned.csv',\n", " features='M',\n", " target='T(degC)',\n", " freq='h',\n", " checkpoints='./checkpoints/',\n", " seq_len=96,\n", " label_len=48,\n", " pred_len=336,\n", " seasonal_patterns='Yearly',\n", " inverse=False,\n", " mask_rate=0.25,\n", " anomaly_ratio=0.25,\n", " top_k=5,\n", " num_kernels=6,\n", " enc_in=15,\n", " dec_in=15,\n", " c_out=15,\n", " d_model=512,\n", " n_heads=8,\n", " e_layers=3,\n", " d_layers=1,\n", " d_ff=512,\n", " moving_avg=25,\n", " factor=3,\n", " distil=True,\n", " dropout=0.1,\n", " embed='timeF',\n", " activation='gelu',\n", " output_attention=False,\n", " channel_independence=0,\n", " num_workers=10,\n", " itr=1,\n", " train_epochs=5,\n", " batch_size=24,\n", " patience=2,\n", " learning_rate=0.0001,\n", " des='Exp',\n", " loss='MSE',\n", " lradj='type1',\n", " use_amp=False,\n", " use_gpu=True,\n", " gpu=0,\n", " use_multi_gpu=False,\n", " devices='0,1,2,3',\n", " p_hidden_dims=[128, 128],\n", " p_hidden_layers=2\n", ")" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Args in experiment:\n", "\u001b[1mBasic Config\u001b[0m\n", " Task Name: long_term_forecast Is Training: 1 \n", " Model ID: weather_96_720 Model: Autoformer \n", "\n", "\u001b[1mData Loader\u001b[0m\n", " Data: custom Root Path: ./dataset/ \n", " Data Path: UBB_weather_jan2008_may2023_cleaned.csvFeatures: M \n", " Target: T(degC) Freq: h \n", " Checkpoints: ./checkpoints/ \n", "\n", "\u001b[1mForecasting Task\u001b[0m\n", " Seq Len: 96 Label Len: 48 \n", " Pred Len: 720 Seasonal Patterns: Yearly \n", " Inverse: 0 \n", "\n", "\u001b[1mModel Parameters\u001b[0m\n", " Top k: 5 Num Kernels: 6 \n", " Enc In: 15 Dec In: 15 \n", " C Out: 15 d model: 512 \n", " n heads: 8 e layers: 3 \n", " d layers: 1 d FF: 512 \n", " Moving Avg: 25 Factor: 3 \n", " Distil: 1 Dropout: 0.1 \n", " Embed: timeF Activation: gelu \n", " Output Attention: 0 \n", "\n", "\u001b[1mRun Parameters\u001b[0m\n", " Num Workers: 10 Itr: 1 \n", " Train Epochs: 5 Batch Size: 16 \n", " Patience: 2 Learning Rate: 0.0001 \n", " Des: Exp Loss: MSE \n", " Lradj: type1 Use Amp: 0 \n", "\n", "\u001b[1mGPU\u001b[0m\n", " Use GPU: 1 GPU: 0 \n", " Use Multi GPU: 0 Devices: 0,1,2,3 \n", "\n", "\u001b[1mDe-stationary Projector Params\u001b[0m\n", " P Hidden Dims: 128, 128 P Hidden Layers: 2 \n", "\n", "Use GPU: cuda:0\n", ">>>>>>>start training : long_term_forecast_weather_96_720_Autoformer_custom_ftM_sl96_ll48_pl720_dm512_nh8_el3_dl1_df512_fc3_ebtimeF_dtTrue_Exp_0>>>>>>>>>>>>>>>>>>>>>>>>>>\n", "train 96758\n", "val 13221\n", "test 27159\n", "\titers: 100, epoch: 1 | loss: 0.9603574\n", "\tspeed: 0.9555s/iter; left time: 28795.7470s\n", "\titers: 200, epoch: 1 | loss: 0.7239472\n", "\tspeed: 0.4501s/iter; left time: 13518.6511s\n", "\titers: 300, epoch: 1 | loss: 0.7322806\n", "\tspeed: 0.4508s/iter; left time: 13493.9899s\n", "\titers: 400, epoch: 1 | loss: 0.6686575\n", "\tspeed: 0.4513s/iter; left time: 13465.9501s\n", "\titers: 500, epoch: 1 | loss: 0.7217173\n", "\tspeed: 0.4425s/iter; left time: 13157.9215s\n", "\titers: 600, epoch: 1 | loss: 0.7687663\n", "\tspeed: 0.4565s/iter; left time: 13529.2246s\n", "\titers: 700, epoch: 1 | loss: 0.8749666\n", "\tspeed: 0.4431s/iter; left time: 13086.0838s\n", "\titers: 800, epoch: 1 | loss: 0.6909215\n", "\tspeed: 0.4438s/iter; left time: 13062.9196s\n", "\titers: 900, epoch: 1 | loss: 0.6541351\n", "\tspeed: 0.4686s/iter; left time: 13748.1732s\n", "\titers: 1000, epoch: 1 | loss: 0.6853172\n", "\tspeed: 0.4394s/iter; left time: 12845.5933s\n", "\titers: 1100, epoch: 1 | loss: 0.6631300\n", "\tspeed: 0.4509s/iter; left time: 13136.2609s\n", "\titers: 1200, epoch: 1 | loss: 0.9201595\n", "\tspeed: 0.4579s/iter; left time: 13296.5421s\n", "\titers: 1300, epoch: 1 | loss: 0.6946401\n", "\tspeed: 0.4555s/iter; left time: 13181.1450s\n", "\titers: 1400, epoch: 1 | loss: 0.8304020\n", "\tspeed: 0.4587s/iter; left time: 13226.1131s\n", "\titers: 1500, epoch: 1 | loss: 0.7758315\n", "\tspeed: 0.4584s/iter; left time: 13172.8894s\n", "\titers: 1600, epoch: 1 | loss: 0.7143643\n", "\tspeed: 0.4587s/iter; left time: 13134.9933s\n", "\titers: 1700, epoch: 1 | loss: 0.8073043\n", "\tspeed: 0.4582s/iter; left time: 13074.1070s\n", "\titers: 1800, epoch: 1 | loss: 0.8250932\n", "\tspeed: 0.4590s/iter; left time: 13052.9928s\n", "\titers: 1900, epoch: 1 | loss: 0.7570876\n", "\tspeed: 0.4606s/iter; left time: 13052.0512s\n", "\titers: 2000, epoch: 1 | loss: 0.7466280\n", "\tspeed: 0.4593s/iter; left time: 12968.0591s\n", "\titers: 2100, epoch: 1 | loss: 1.0208782\n", "\tspeed: 0.4590s/iter; left time: 12914.0404s\n", "\titers: 2200, epoch: 1 | loss: 0.8428599\n", "\tspeed: 0.4586s/iter; left time: 12856.5536s\n", "\titers: 2300, epoch: 1 | loss: 0.6175047\n", "\tspeed: 0.4589s/iter; left time: 12820.1779s\n", "\titers: 2400, epoch: 1 | loss: 0.6624880\n", "\tspeed: 0.4586s/iter; left time: 12766.3461s\n", "\titers: 2500, epoch: 1 | loss: 0.7401301\n", "\tspeed: 0.4588s/iter; left time: 12725.5138s\n", "\titers: 2600, epoch: 1 | loss: 1.0302334\n", "\tspeed: 0.4593s/iter; left time: 12692.0670s\n", "\titers: 2700, epoch: 1 | loss: 0.6006274\n", "\tspeed: 0.4582s/iter; left time: 12616.6535s\n", "\titers: 2800, epoch: 1 | loss: 0.7174718\n", "\tspeed: 0.4588s/iter; left time: 12587.4120s\n", "\titers: 2900, epoch: 1 | loss: 0.7868289\n", "\tspeed: 0.4587s/iter; left time: 12539.4349s\n", "\titers: 3000, epoch: 1 | loss: 0.7666978\n", "\tspeed: 0.4589s/iter; left time: 12499.1845s\n", "\titers: 3100, epoch: 1 | loss: 0.7119358\n", "\tspeed: 0.4623s/iter; left time: 12545.7585s\n", "\titers: 3200, epoch: 1 | loss: 0.6228486\n", "\tspeed: 0.4592s/iter; left time: 12414.8197s\n", "\titers: 3300, epoch: 1 | loss: 0.7141392\n", "\tspeed: 0.4536s/iter; left time: 12217.9038s\n", "\titers: 3400, epoch: 1 | loss: 0.9214259\n", "\tspeed: 0.4516s/iter; left time: 12118.0460s\n", "\titers: 3500, epoch: 1 | loss: 1.0003225\n", "\tspeed: 0.4522s/iter; left time: 12091.2662s\n", "\titers: 3600, epoch: 1 | loss: 0.8784891\n", "\tspeed: 0.4502s/iter; left time: 11991.9458s\n", "\titers: 3700, epoch: 1 | loss: 0.6330586\n", "\tspeed: 0.4533s/iter; left time: 12027.8128s\n", "\titers: 3800, epoch: 1 | loss: 0.5600875\n", "\tspeed: 0.4459s/iter; left time: 11787.0193s\n", "\titers: 3900, epoch: 1 | loss: 0.9674342\n", "\tspeed: 0.4615s/iter; left time: 12152.8462s\n", "\titers: 4000, epoch: 1 | loss: 0.7633109\n", "\tspeed: 0.4560s/iter; left time: 11963.6661s\n", "\titers: 4100, epoch: 1 | loss: 0.7559458\n", "\tspeed: 0.4590s/iter; left time: 11997.4139s\n", "\titers: 4200, epoch: 1 | loss: 0.7904814\n", "\tspeed: 0.4512s/iter; left time: 11747.9231s\n", "\titers: 4300, epoch: 1 | loss: 0.8458348\n", "\tspeed: 0.4515s/iter; left time: 11709.0167s\n", "\titers: 4400, epoch: 1 | loss: 0.6421102\n", "\tspeed: 0.4479s/iter; left time: 11572.4343s\n", "\titers: 4500, epoch: 1 | loss: 0.8389468\n", "\tspeed: 0.4449s/iter; left time: 11450.1041s\n", "\titers: 4600, epoch: 1 | loss: 0.8301011\n", "\tspeed: 0.4585s/iter; left time: 11753.2583s\n", "\titers: 4700, epoch: 1 | loss: 0.7046257\n", "\tspeed: 0.4423s/iter; left time: 11293.9490s\n", "\titers: 4800, epoch: 1 | loss: 0.6008570\n", "\tspeed: 0.4412s/iter; left time: 11223.5723s\n", "\titers: 4900, epoch: 1 | loss: 0.7205773\n", "\tspeed: 0.4434s/iter; left time: 11234.7667s\n", "\titers: 5000, epoch: 1 | loss: 0.5982564\n", "\tspeed: 0.4540s/iter; left time: 11456.2707s\n", "\titers: 5100, epoch: 1 | loss: 0.9625741\n", "\tspeed: 0.4360s/iter; left time: 10958.2162s\n", "\titers: 5200, epoch: 1 | loss: 0.7655671\n", "\tspeed: 0.4493s/iter; left time: 11249.3214s\n", "\titers: 5300, epoch: 1 | loss: 0.6220975\n", "\tspeed: 0.4585s/iter; left time: 11432.3555s\n", "\titers: 5400, epoch: 1 | loss: 0.7891681\n", "\tspeed: 0.4598s/iter; left time: 11419.5634s\n", "\titers: 5500, epoch: 1 | loss: 0.7770585\n", "\tspeed: 0.4595s/iter; left time: 11366.4488s\n", "\titers: 5600, epoch: 1 | loss: 0.6804238\n", "\tspeed: 0.4593s/iter; left time: 11316.3338s\n", "\titers: 5700, epoch: 1 | loss: 0.8709115\n", "\tspeed: 0.4596s/iter; left time: 11275.9706s\n", "\titers: 5800, epoch: 1 | loss: 0.7448921\n", "\tspeed: 0.4613s/iter; left time: 11272.4460s\n", "\titers: 5900, epoch: 1 | loss: 0.7820476\n", "\tspeed: 0.4592s/iter; left time: 11174.7847s\n", "\titers: 6000, epoch: 1 | loss: 0.6000647\n", "\tspeed: 0.4593s/iter; left time: 11130.6486s\n", "Epoch: 1 cost time: 2799.2763035297394\n", "Epoch: 1, Steps: 6047 | Train Loss: 0.7766487 Vali Loss: 0.7057465 Test Loss: 0.6894733\n", "Validation loss decreased (inf --> 0.705747). Saving model ...\n", "Updating learning rate to 0.0001\n", "\titers: 100, epoch: 2 | loss: 0.7532099\n", "\tspeed: 13.3548s/iter; left time: 321703.1171s\n", "\titers: 200, epoch: 2 | loss: 0.8019987\n", "\tspeed: 0.4398s/iter; left time: 10551.0888s\n", "\titers: 300, epoch: 2 | loss: 0.8271407\n", "\tspeed: 0.4459s/iter; left time: 10653.0767s\n", "\titers: 400, epoch: 2 | loss: 0.7467194\n", "\tspeed: 0.4352s/iter; left time: 10352.2873s\n", "\titers: 500, epoch: 2 | loss: 0.7556756\n", "\tspeed: 0.4440s/iter; left time: 10518.6698s\n", "\titers: 600, epoch: 2 | loss: 0.7573571\n", "\tspeed: 0.4450s/iter; left time: 10498.1535s\n", "\titers: 700, epoch: 2 | loss: 0.7155163\n", "\tspeed: 0.4551s/iter; left time: 10689.3955s\n", "\titers: 800, epoch: 2 | loss: 0.9697452\n", "\tspeed: 0.4368s/iter; left time: 10217.0801s\n", "\titers: 900, epoch: 2 | loss: 0.9269638\n", "\tspeed: 0.4584s/iter; left time: 10675.9065s\n", "\titers: 1000, epoch: 2 | loss: 0.7326458\n", "\tspeed: 0.4620s/iter; left time: 10712.1622s\n", "\titers: 1100, epoch: 2 | loss: 0.7186947\n", "\tspeed: 0.4593s/iter; left time: 10604.4516s\n", "\titers: 1200, epoch: 2 | loss: 0.6712628\n", "\tspeed: 0.4600s/iter; left time: 10574.0652s\n", "\titers: 1300, epoch: 2 | loss: 0.6584099\n", "\tspeed: 0.4594s/iter; left time: 10515.4738s\n", "\titers: 1400, epoch: 2 | loss: 0.7468253\n", "\tspeed: 0.4595s/iter; left time: 10472.0363s\n", "\titers: 1500, epoch: 2 | loss: 0.6793265\n", "\tspeed: 0.4599s/iter; left time: 10435.7688s\n", "\titers: 1600, epoch: 2 | loss: 0.6462870\n", "\tspeed: 0.4560s/iter; left time: 10299.9205s\n", "\titers: 1700, epoch: 2 | loss: 0.7196621\n", "\tspeed: 0.4425s/iter; left time: 9952.3929s\n", "\titers: 1800, epoch: 2 | loss: 0.7323074\n", "\tspeed: 0.4535s/iter; left time: 10154.0506s\n", "\titers: 1900, epoch: 2 | loss: 0.8430954\n", "\tspeed: 0.4596s/iter; left time: 10245.0483s\n", "\titers: 2000, epoch: 2 | loss: 0.8802910\n", "\tspeed: 0.4522s/iter; left time: 10032.9257s\n", "\titers: 2100, epoch: 2 | loss: 0.6665894\n", "\tspeed: 0.4524s/iter; left time: 9992.3549s\n", "\titers: 2200, epoch: 2 | loss: 0.8783111\n", "\tspeed: 0.4460s/iter; left time: 9807.4838s\n", "\titers: 2300, epoch: 2 | loss: 0.8385394\n", "\tspeed: 0.4519s/iter; left time: 9891.7577s\n", "\titers: 2400, epoch: 2 | loss: 0.6595713\n", "\tspeed: 0.4470s/iter; left time: 9740.7519s\n", "\titers: 2500, epoch: 2 | loss: 0.7473293\n", "\tspeed: 0.4488s/iter; left time: 9733.6714s\n", "\titers: 2600, epoch: 2 | loss: 0.6331670\n", "\tspeed: 0.4590s/iter; left time: 9910.0367s\n", "\titers: 2700, epoch: 2 | loss: 0.6662247\n", "\tspeed: 0.4339s/iter; left time: 9323.0648s\n", "\titers: 2800, epoch: 2 | loss: 0.7056764\n", "\tspeed: 0.4327s/iter; left time: 9255.1089s\n", "\titers: 2900, epoch: 2 | loss: 0.6300455\n", "\tspeed: 0.4310s/iter; left time: 9175.3680s\n", "\titers: 3000, epoch: 2 | loss: 0.9379979\n", "\tspeed: 0.4302s/iter; left time: 9116.1701s\n", "\titers: 3100, epoch: 2 | loss: 0.8031844\n", "\tspeed: 0.4295s/iter; left time: 9057.6004s\n", "\titers: 3200, epoch: 2 | loss: 0.5837590\n", "\tspeed: 0.4295s/iter; left time: 9014.6180s\n", "\titers: 3300, epoch: 2 | loss: 0.7023572\n", "\tspeed: 0.4295s/iter; left time: 8972.4452s\n", "\titers: 3400, epoch: 2 | loss: 0.8969774\n", "\tspeed: 0.4298s/iter; left time: 8934.1551s\n", "\titers: 3500, epoch: 2 | loss: 0.6130797\n", "\tspeed: 0.4301s/iter; left time: 8897.6800s\n", "\titers: 3600, epoch: 2 | loss: 0.7826036\n", "\tspeed: 0.4316s/iter; left time: 8886.4233s\n", "\titers: 3700, epoch: 2 | loss: 0.6421943\n", "\tspeed: 0.4299s/iter; left time: 8808.9846s\n", "\titers: 3800, epoch: 2 | loss: 0.7009106\n", "\tspeed: 0.4295s/iter; left time: 8757.8461s\n", "\titers: 3900, epoch: 2 | loss: 0.8425238\n", "\tspeed: 0.4293s/iter; left time: 8710.0280s\n", "\titers: 4000, epoch: 2 | loss: 0.6857973\n", "\tspeed: 0.4321s/iter; left time: 8723.5258s\n", "\titers: 4100, epoch: 2 | loss: 0.7124602\n", "\tspeed: 0.4326s/iter; left time: 8690.8675s\n", "\titers: 4200, epoch: 2 | loss: 0.7794118\n", "\tspeed: 0.4301s/iter; left time: 8597.4233s\n", "\titers: 4300, epoch: 2 | loss: 0.8366086\n", "\tspeed: 0.4339s/iter; left time: 8630.6446s\n", "\titers: 4400, epoch: 2 | loss: 0.6617534\n", "\tspeed: 0.4363s/iter; left time: 8633.1436s\n", "\titers: 4500, epoch: 2 | loss: 0.6948798\n", "\tspeed: 0.4391s/iter; left time: 8645.3875s\n", "\titers: 4600, epoch: 2 | loss: 1.0512407\n", "\tspeed: 0.4437s/iter; left time: 8691.9839s\n", "\titers: 4700, epoch: 2 | loss: 0.8630439\n", "\tspeed: 0.4457s/iter; left time: 8687.0924s\n", "\titers: 4800, epoch: 2 | loss: 0.7595628\n", "\tspeed: 0.4652s/iter; left time: 9020.5246s\n", "\titers: 4900, epoch: 2 | loss: 0.9543853\n", "\tspeed: 0.4600s/iter; left time: 8873.7230s\n", "\titers: 5000, epoch: 2 | loss: 0.5918649\n", "\tspeed: 0.4439s/iter; left time: 8517.3192s\n", "\titers: 5100, epoch: 2 | loss: 0.7126524\n", "\tspeed: 0.4593s/iter; left time: 8767.3785s\n", "\titers: 5200, epoch: 2 | loss: 0.9577637\n", "\tspeed: 0.4492s/iter; left time: 8530.7793s\n", "\titers: 5300, epoch: 2 | loss: 0.7966100\n", "\tspeed: 0.4640s/iter; left time: 8763.7341s\n", "\titers: 5400, epoch: 2 | loss: 0.7233946\n", "\tspeed: 0.4469s/iter; left time: 8396.9809s\n", "\titers: 5500, epoch: 2 | loss: 0.6371168\n", "\tspeed: 0.4448s/iter; left time: 8313.0104s\n", "\titers: 5600, epoch: 2 | loss: 0.5235192\n", "\tspeed: 0.4485s/iter; left time: 8336.3323s\n", "\titers: 5700, epoch: 2 | loss: 0.6933604\n", "\tspeed: 0.4659s/iter; left time: 8613.7011s\n", "\titers: 5800, epoch: 2 | loss: 0.6909462\n", "\tspeed: 0.4417s/iter; left time: 8122.3986s\n", "\titers: 5900, epoch: 2 | loss: 0.7535721\n", "\tspeed: 0.4525s/iter; left time: 8275.7755s\n", "\titers: 6000, epoch: 2 | loss: 0.9199245\n", "\tspeed: 0.4481s/iter; left time: 8149.5853s\n", "Epoch: 2 cost time: 2742.611443042755\n", "Epoch: 2, Steps: 6047 | Train Loss: 0.7549241 Vali Loss: 0.7068630 Test Loss: 0.6860026\n", "EarlyStopping counter: 1 out of 2\n", "Updating learning rate to 5e-05\n", "\titers: 100, epoch: 3 | loss: 0.7342012\n", "\tspeed: 13.5630s/iter; left time: 244703.7962s\n", "\titers: 200, epoch: 3 | loss: 0.8351719\n", "\tspeed: 0.4497s/iter; left time: 8067.6505s\n", "\titers: 300, epoch: 3 | loss: 0.6503428\n", "\tspeed: 0.4516s/iter; left time: 8056.8956s\n", "\titers: 400, epoch: 3 | loss: 0.8123377\n", "\tspeed: 0.4306s/iter; left time: 7639.7205s\n", "\titers: 500, epoch: 3 | loss: 0.8017388\n", "\tspeed: 0.4418s/iter; left time: 7794.1005s\n", "\titers: 600, epoch: 3 | loss: 0.5547916\n", "\tspeed: 0.4494s/iter; left time: 7883.1194s\n", "\titers: 700, epoch: 3 | loss: 0.7729843\n", "\tspeed: 0.4493s/iter; left time: 7836.6886s\n", "\titers: 800, epoch: 3 | loss: 0.7011228\n", "\tspeed: 0.4488s/iter; left time: 7783.3905s\n", "\titers: 900, epoch: 3 | loss: 0.5704036\n", "\tspeed: 0.4601s/iter; left time: 7933.7306s\n", "\titers: 1000, epoch: 3 | loss: 0.6628679\n", "\tspeed: 0.4597s/iter; left time: 7880.0870s\n", "\titers: 1100, epoch: 3 | loss: 0.6710958\n", "\tspeed: 0.4592s/iter; left time: 7825.9725s\n", "\titers: 1200, epoch: 3 | loss: 0.7772375\n", "\tspeed: 0.4594s/iter; left time: 7783.0100s\n", "\titers: 1300, epoch: 3 | loss: 0.9008939\n", "\tspeed: 0.4591s/iter; left time: 7733.0006s\n", "\titers: 1400, epoch: 3 | loss: 0.7370104\n", "\tspeed: 0.4593s/iter; left time: 7689.4562s\n", "\titers: 1500, epoch: 3 | loss: 0.7635467\n", "\tspeed: 0.4602s/iter; left time: 7658.9449s\n", "\titers: 1600, epoch: 3 | loss: 0.6638049\n", "\tspeed: 0.4613s/iter; left time: 7631.3346s\n", "\titers: 1700, epoch: 3 | loss: 0.6592820\n", "\tspeed: 0.4602s/iter; left time: 7566.0964s\n", "\titers: 1800, epoch: 3 | loss: 0.8229528\n", "\tspeed: 0.4592s/iter; left time: 7504.2003s\n", "\titers: 1900, epoch: 3 | loss: 0.6086338\n", "\tspeed: 0.4589s/iter; left time: 7454.1781s\n", "\titers: 2000, epoch: 3 | loss: 0.7786828\n", "\tspeed: 0.4594s/iter; left time: 7415.7936s\n", "\titers: 2100, epoch: 3 | loss: 0.7509465\n", "\tspeed: 0.4594s/iter; left time: 7369.2241s\n", "\titers: 2200, epoch: 3 | loss: 0.7214549\n", "\tspeed: 0.4596s/iter; left time: 7327.6959s\n", "\titers: 2300, epoch: 3 | loss: 0.5923977\n", "\tspeed: 0.4593s/iter; left time: 7276.1773s\n", "\titers: 2400, epoch: 3 | loss: 0.8323786\n", "\tspeed: 0.4588s/iter; left time: 7223.1569s\n", "\titers: 2500, epoch: 3 | loss: 0.7950462\n", "\tspeed: 0.4596s/iter; left time: 7189.7087s\n", "\titers: 2600, epoch: 3 | loss: 0.8599079\n", "\tspeed: 0.4495s/iter; left time: 6986.3935s\n", "\titers: 2700, epoch: 3 | loss: 0.8405094\n", "\tspeed: 0.4484s/iter; left time: 6924.4968s\n", "\titers: 2800, epoch: 3 | loss: 0.6957900\n", "\tspeed: 0.4583s/iter; left time: 7030.5134s\n", "\titers: 2900, epoch: 3 | loss: 0.6310622\n", "\tspeed: 0.4574s/iter; left time: 6971.3429s\n", "\titers: 3000, epoch: 3 | loss: 0.7998003\n", "\tspeed: 0.4364s/iter; left time: 6607.9305s\n", "\titers: 3100, epoch: 3 | loss: 0.7274541\n", "\tspeed: 0.4319s/iter; left time: 6497.1222s\n", "\titers: 3200, epoch: 3 | loss: 0.7466780\n", "\tspeed: 0.4303s/iter; left time: 6430.0421s\n", "\titers: 3300, epoch: 3 | loss: 0.9553334\n", "\tspeed: 0.4300s/iter; left time: 6381.9745s\n", "\titers: 3400, epoch: 3 | loss: 0.6766061\n", "\tspeed: 0.4300s/iter; left time: 6339.5773s\n", "\titers: 3500, epoch: 3 | loss: 0.7906755\n", "\tspeed: 0.4303s/iter; left time: 6300.4868s\n", "\titers: 3600, epoch: 3 | loss: 0.7500862\n", "\tspeed: 0.4298s/iter; left time: 6250.3893s\n", "\titers: 3700, epoch: 3 | loss: 0.6910461\n", "\tspeed: 0.4301s/iter; left time: 6210.8952s\n", "\titers: 3800, epoch: 3 | loss: 0.6598073\n", "\tspeed: 0.4298s/iter; left time: 6164.8671s\n", "\titers: 3900, epoch: 3 | loss: 0.8743470\n", "\tspeed: 0.4310s/iter; left time: 6138.4390s\n", "\titers: 4000, epoch: 3 | loss: 0.5323969\n", "\tspeed: 0.4299s/iter; left time: 6079.5293s\n", "\titers: 4100, epoch: 3 | loss: 0.7580138\n", "\tspeed: 0.4297s/iter; left time: 6033.8365s\n", "\titers: 4200, epoch: 3 | loss: 0.5997500\n", "\tspeed: 0.4297s/iter; left time: 5991.3225s\n", "\titers: 4300, epoch: 3 | loss: 0.8090878\n", "\tspeed: 0.4297s/iter; left time: 5947.3277s\n", "\titers: 4400, epoch: 3 | loss: 0.8762347\n", "\tspeed: 0.4305s/iter; left time: 5915.8187s\n", "\titers: 4500, epoch: 3 | loss: 0.7804387\n", "\tspeed: 0.4302s/iter; left time: 5868.1518s\n", "\titers: 4600, epoch: 3 | loss: 0.7935297\n", "\tspeed: 0.4301s/iter; left time: 5824.5206s\n", "\titers: 4700, epoch: 3 | loss: 0.6878918\n", "\tspeed: 0.4294s/iter; left time: 5772.4300s\n", "\titers: 4800, epoch: 3 | loss: 0.7715616\n", "\tspeed: 0.4326s/iter; left time: 5771.7026s\n", "\titers: 4900, epoch: 3 | loss: 0.6926800\n", "\tspeed: 0.4301s/iter; left time: 5695.9298s\n", "\titers: 5000, epoch: 3 | loss: 0.8709217\n", "\tspeed: 0.4297s/iter; left time: 5647.2396s\n", "\titers: 5100, epoch: 3 | loss: 0.7834487\n", "\tspeed: 0.4301s/iter; left time: 5609.2908s\n", "\titers: 5200, epoch: 3 | loss: 0.8809763\n", "\tspeed: 0.4295s/iter; left time: 5558.1488s\n", "\titers: 5300, epoch: 3 | loss: 0.7954376\n", "\tspeed: 0.4297s/iter; left time: 5518.7337s\n", "\titers: 5400, epoch: 3 | loss: 0.6373386\n", "\tspeed: 0.4297s/iter; left time: 5474.7064s\n", "\titers: 5500, epoch: 3 | loss: 0.7209170\n", "\tspeed: 0.4297s/iter; left time: 5432.4957s\n", "\titers: 5600, epoch: 3 | loss: 0.7291998\n", "\tspeed: 0.4310s/iter; left time: 5405.2811s\n", "\titers: 5700, epoch: 3 | loss: 0.7159649\n", "\tspeed: 0.4295s/iter; left time: 5343.3986s\n", "\titers: 5800, epoch: 3 | loss: 0.5798818\n", "\tspeed: 0.4295s/iter; left time: 5300.7332s\n", "\titers: 5900, epoch: 3 | loss: 0.6896509\n", "\tspeed: 0.4296s/iter; left time: 5258.9075s\n", "\titers: 6000, epoch: 3 | loss: 0.7372069\n", "\tspeed: 0.4312s/iter; left time: 5235.4232s\n", "Epoch: 3 cost time: 2725.88139295578\n", "Epoch: 3, Steps: 6047 | Train Loss: 0.7435989 Vali Loss: 0.7022479 Test Loss: 0.6843458\n", "Validation loss decreased (0.705747 --> 0.702248). Saving model ...\n", "Updating learning rate to 2.5e-05\n", "\titers: 100, epoch: 4 | loss: 0.7065171\n", "\tspeed: 12.4752s/iter; left time: 149639.8662s\n", "\titers: 200, epoch: 4 | loss: 0.5705279\n", "\tspeed: 0.4264s/iter; left time: 5072.0280s\n", "\titers: 300, epoch: 4 | loss: 0.6989686\n", "\tspeed: 0.4277s/iter; left time: 5045.2146s\n", "\titers: 400, epoch: 4 | loss: 0.7848147\n", "\tspeed: 0.4283s/iter; left time: 5009.3582s\n", "\titers: 500, epoch: 4 | loss: 0.7471876\n", "\tspeed: 0.4290s/iter; left time: 4974.0489s\n", "\titers: 600, epoch: 4 | loss: 0.7752126\n", "\tspeed: 0.4290s/iter; left time: 4931.7570s\n", "\titers: 700, epoch: 4 | loss: 0.6908473\n", "\tspeed: 0.4287s/iter; left time: 4885.1463s\n", "\titers: 800, epoch: 4 | loss: 0.7518895\n", "\tspeed: 0.4289s/iter; left time: 4844.3302s\n", "\titers: 900, epoch: 4 | loss: 0.8174099\n", "\tspeed: 0.4290s/iter; left time: 4802.3067s\n", "\titers: 1000, epoch: 4 | loss: 0.8079087\n", "\tspeed: 0.4292s/iter; left time: 4761.8489s\n", "\titers: 1100, epoch: 4 | loss: 0.6462372\n", "\tspeed: 0.4292s/iter; left time: 4719.1722s\n", "\titers: 1200, epoch: 4 | loss: 0.7332570\n", "\tspeed: 0.4290s/iter; left time: 4674.1227s\n", "\titers: 1300, epoch: 4 | loss: 0.7177713\n", "\tspeed: 0.4292s/iter; left time: 4633.6036s\n", "\titers: 1400, epoch: 4 | loss: 0.7758083\n", "\tspeed: 0.4298s/iter; left time: 4597.1281s\n", "\titers: 1500, epoch: 4 | loss: 0.6090555\n", "\tspeed: 0.4300s/iter; left time: 4555.9343s\n", "\titers: 1600, epoch: 4 | loss: 0.8874197\n", "\tspeed: 0.4294s/iter; left time: 4506.0876s\n", "\titers: 1700, epoch: 4 | loss: 0.7954507\n", "\tspeed: 0.4294s/iter; left time: 4463.6073s\n", "\titers: 1800, epoch: 4 | loss: 0.5900148\n", "\tspeed: 0.4292s/iter; left time: 4419.0528s\n", "\titers: 1900, epoch: 4 | loss: 0.7397209\n", "\tspeed: 0.4293s/iter; left time: 4377.0400s\n", "\titers: 2000, epoch: 4 | loss: 0.6715164\n", "\tspeed: 0.4293s/iter; left time: 4333.9571s\n", "\titers: 2100, epoch: 4 | loss: 0.7238402\n", "\tspeed: 0.4293s/iter; left time: 4291.0025s\n", "\titers: 2200, epoch: 4 | loss: 0.8411543\n", "\tspeed: 0.4293s/iter; left time: 4247.5070s\n", "\titers: 2300, epoch: 4 | loss: 0.7549417\n", "\tspeed: 0.4293s/iter; left time: 4205.2778s\n", "\titers: 2400, epoch: 4 | loss: 0.8012406\n", "\tspeed: 0.4293s/iter; left time: 4162.5473s\n", "\titers: 2500, epoch: 4 | loss: 0.8655900\n", "\tspeed: 0.4292s/iter; left time: 4117.9063s\n", "\titers: 2600, epoch: 4 | loss: 0.7771162\n", "\tspeed: 0.4294s/iter; left time: 4077.2519s\n", "\titers: 2700, epoch: 4 | loss: 0.6840048\n", "\tspeed: 0.4292s/iter; left time: 4032.0339s\n", "\titers: 2800, epoch: 4 | loss: 0.8155959\n", "\tspeed: 0.4292s/iter; left time: 3989.4443s\n", "\titers: 2900, epoch: 4 | loss: 0.7739085\n", "\tspeed: 0.4296s/iter; left time: 3950.4420s\n", "\titers: 3000, epoch: 4 | loss: 0.7221929\n", "\tspeed: 0.4292s/iter; left time: 3904.0250s\n", "\titers: 3100, epoch: 4 | loss: 0.7903285\n", "\tspeed: 0.4292s/iter; left time: 3860.5904s\n", "\titers: 3200, epoch: 4 | loss: 0.6881965\n", "\tspeed: 0.4293s/iter; left time: 3818.9221s\n", "\titers: 3300, epoch: 4 | loss: 0.8531624\n", "\tspeed: 0.4294s/iter; left time: 3776.4881s\n", "\titers: 3400, epoch: 4 | loss: 0.5989477\n", "\tspeed: 0.4301s/iter; left time: 3739.7819s\n", "\titers: 3500, epoch: 4 | loss: 0.6276078\n", "\tspeed: 0.4295s/iter; left time: 3691.3171s\n", "\titers: 3600, epoch: 4 | loss: 0.5275269\n", "\tspeed: 0.4300s/iter; left time: 3653.1425s\n", "\titers: 3700, epoch: 4 | loss: 0.7738121\n", "\tspeed: 0.4293s/iter; left time: 3604.0144s\n", "\titers: 3800, epoch: 4 | loss: 0.6206883\n", "\tspeed: 0.4293s/iter; left time: 3560.7182s\n", "\titers: 3900, epoch: 4 | loss: 0.8676984\n", "\tspeed: 0.4291s/iter; left time: 3516.3746s\n", "\titers: 4000, epoch: 4 | loss: 0.6717831\n", "\tspeed: 0.4295s/iter; left time: 3477.1540s\n", "\titers: 4100, epoch: 4 | loss: 0.7502180\n", "\tspeed: 0.4291s/iter; left time: 3430.9809s\n", "\titers: 4200, epoch: 4 | loss: 0.8553344\n", "\tspeed: 0.4295s/iter; left time: 3390.7250s\n", "\titers: 4300, epoch: 4 | loss: 0.5384691\n", "\tspeed: 0.4293s/iter; left time: 3346.1177s\n", "\titers: 4400, epoch: 4 | loss: 0.6827880\n", "\tspeed: 0.4293s/iter; left time: 3303.1331s\n", "\titers: 4500, epoch: 4 | loss: 0.7101130\n", "\tspeed: 0.4292s/iter; left time: 3259.5795s\n", "\titers: 4600, epoch: 4 | loss: 0.7779287\n", "\tspeed: 0.4296s/iter; left time: 3219.4927s\n", "\titers: 4700, epoch: 4 | loss: 0.7405453\n", "\tspeed: 0.4292s/iter; left time: 3174.0632s\n", "\titers: 4800, epoch: 4 | loss: 0.9232990\n", "\tspeed: 0.4293s/iter; left time: 3131.3884s\n", "\titers: 4900, epoch: 4 | loss: 0.7479228\n", "\tspeed: 0.4295s/iter; left time: 3090.2711s\n", "\titers: 5000, epoch: 4 | loss: 0.8009843\n", "\tspeed: 0.4295s/iter; left time: 3047.4343s\n", "\titers: 5100, epoch: 4 | loss: 0.8730049\n", "\tspeed: 0.4294s/iter; left time: 3003.3102s\n", "\titers: 5200, epoch: 4 | loss: 0.6912510\n", "\tspeed: 0.4295s/iter; left time: 2961.3733s\n", "\titers: 5300, epoch: 4 | loss: 0.7960681\n", "\tspeed: 0.4293s/iter; left time: 2917.1187s\n", "\titers: 5400, epoch: 4 | loss: 0.7624743\n", "\tspeed: 0.4294s/iter; left time: 2874.7379s\n", "\titers: 5500, epoch: 4 | loss: 0.8327021\n", "\tspeed: 0.4296s/iter; left time: 2833.1050s\n", "\titers: 5600, epoch: 4 | loss: 0.7181480\n", "\tspeed: 0.4297s/iter; left time: 2790.6988s\n", "\titers: 5700, epoch: 4 | loss: 0.7366198\n", "\tspeed: 0.4309s/iter; left time: 2755.6484s\n", "\titers: 5800, epoch: 4 | loss: 0.7489323\n", "\tspeed: 0.4297s/iter; left time: 2705.1680s\n", "\titers: 5900, epoch: 4 | loss: 0.7941884\n", "\tspeed: 0.4293s/iter; left time: 2659.6782s\n", "\titers: 6000, epoch: 4 | loss: 0.7131475\n", "\tspeed: 0.4292s/iter; left time: 2615.8742s\n", "Epoch: 4 cost time: 2643.05841255188\n", "Epoch: 4, Steps: 6047 | Train Loss: 0.7366459 Vali Loss: 0.7018580 Test Loss: 0.6836772\n", "Validation loss decreased (0.702248 --> 0.701858). Saving model ...\n", "Updating learning rate to 1.25e-05\n", "\titers: 100, epoch: 5 | loss: 0.7352363\n", "\tspeed: 12.4857s/iter; left time: 74264.9133s\n", "\titers: 200, epoch: 5 | loss: 0.7377717\n", "\tspeed: 0.4265s/iter; left time: 2494.2303s\n", "\titers: 300, epoch: 5 | loss: 0.9615692\n", "\tspeed: 0.4270s/iter; left time: 2454.5697s\n", "\titers: 400, epoch: 5 | loss: 0.5982816\n", "\tspeed: 0.4287s/iter; left time: 2421.0559s\n", "\titers: 500, epoch: 5 | loss: 0.6705336\n", "\tspeed: 0.4286s/iter; left time: 2377.6095s\n", "\titers: 600, epoch: 5 | loss: 0.8813511\n", "\tspeed: 0.4286s/iter; left time: 2334.9506s\n", "\titers: 700, epoch: 5 | loss: 0.7509829\n", "\tspeed: 0.4286s/iter; left time: 2292.0589s\n", "\titers: 800, epoch: 5 | loss: 0.7049814\n", "\tspeed: 0.4287s/iter; left time: 2249.6784s\n", "\titers: 900, epoch: 5 | loss: 0.6206592\n", "\tspeed: 0.4288s/iter; left time: 2207.3744s\n", "\titers: 1000, epoch: 5 | loss: 0.6774248\n", "\tspeed: 0.4291s/iter; left time: 2166.2474s\n", "\titers: 1100, epoch: 5 | loss: 0.7320400\n", "\tspeed: 0.4309s/iter; left time: 2132.0558s\n", "\titers: 1200, epoch: 5 | loss: 0.6946516\n", "\tspeed: 0.4328s/iter; left time: 2098.0126s\n", "\titers: 1300, epoch: 5 | loss: 0.8305169\n", "\tspeed: 0.4291s/iter; left time: 2037.3046s\n", "\titers: 1400, epoch: 5 | loss: 0.6649070\n", "\tspeed: 0.4293s/iter; left time: 1995.3154s\n", "\titers: 1500, epoch: 5 | loss: 0.6948108\n", "\tspeed: 0.4292s/iter; left time: 1952.1184s\n", "\titers: 1600, epoch: 5 | loss: 0.7577039\n", "\tspeed: 0.4294s/iter; left time: 1910.0909s\n", "\titers: 1700, epoch: 5 | loss: 0.7762374\n", "\tspeed: 0.4291s/iter; left time: 1865.6743s\n", "\titers: 1800, epoch: 5 | loss: 0.8698243\n", "\tspeed: 0.4293s/iter; left time: 1823.7363s\n", "\titers: 1900, epoch: 5 | loss: 0.8455759\n", "\tspeed: 0.4291s/iter; left time: 1779.8084s\n", "\titers: 2000, epoch: 5 | loss: 0.7913331\n", "\tspeed: 0.4294s/iter; left time: 1738.1457s\n", "\titers: 2100, epoch: 5 | loss: 0.7185851\n", "\tspeed: 0.4291s/iter; left time: 1693.9520s\n", "\titers: 2200, epoch: 5 | loss: 0.8393289\n", "\tspeed: 0.4293s/iter; left time: 1651.9028s\n", "\titers: 2300, epoch: 5 | loss: 0.7780721\n", "\tspeed: 0.4291s/iter; left time: 1608.1020s\n", "\titers: 2400, epoch: 5 | loss: 0.6690277\n", "\tspeed: 0.4293s/iter; left time: 1566.1444s\n", "\titers: 2500, epoch: 5 | loss: 0.6604879\n", "\tspeed: 0.4300s/iter; left time: 1525.5289s\n", "\titers: 2600, epoch: 5 | loss: 0.7575426\n", "\tspeed: 0.4295s/iter; left time: 1480.8826s\n", "\titers: 2700, epoch: 5 | loss: 0.7422808\n", "\tspeed: 0.4294s/iter; left time: 1437.4734s\n", "\titers: 2800, epoch: 5 | loss: 0.5996737\n", "\tspeed: 0.4293s/iter; left time: 1394.3863s\n", "\titers: 2900, epoch: 5 | loss: 0.7312164\n", "\tspeed: 0.4296s/iter; left time: 1352.3466s\n", "\titers: 3000, epoch: 5 | loss: 0.8099957\n", "\tspeed: 0.4291s/iter; left time: 1307.8198s\n", "\titers: 3100, epoch: 5 | loss: 0.6976437\n", "\tspeed: 0.4292s/iter; left time: 1265.3904s\n", "\titers: 3200, epoch: 5 | loss: 0.7571465\n", "\tspeed: 0.4294s/iter; left time: 1222.8958s\n", "\titers: 3300, epoch: 5 | loss: 0.5847676\n", "\tspeed: 0.4295s/iter; left time: 1180.2910s\n", "\titers: 3400, epoch: 5 | loss: 0.7553720\n", "\tspeed: 0.4307s/iter; left time: 1140.4018s\n", "\titers: 3500, epoch: 5 | loss: 0.7341670\n", "\tspeed: 0.4296s/iter; left time: 1094.4962s\n", "\titers: 3600, epoch: 5 | loss: 0.7768233\n", "\tspeed: 0.4291s/iter; left time: 1050.5184s\n", "\titers: 3700, epoch: 5 | loss: 0.7674068\n", "\tspeed: 0.4293s/iter; left time: 1008.1113s\n", "\titers: 3800, epoch: 5 | loss: 0.5441766\n", "\tspeed: 0.4295s/iter; left time: 965.4800s\n", "\titers: 3900, epoch: 5 | loss: 0.7359335\n", "\tspeed: 0.4297s/iter; left time: 922.8980s\n", "\titers: 4000, epoch: 5 | loss: 0.7594821\n", "\tspeed: 0.4295s/iter; left time: 879.5603s\n", "\titers: 4100, epoch: 5 | loss: 0.6794572\n", "\tspeed: 0.4296s/iter; left time: 836.9500s\n", "\titers: 4200, epoch: 5 | loss: 0.7166788\n", "\tspeed: 0.4298s/iter; left time: 794.2770s\n", "\titers: 4300, epoch: 5 | loss: 0.8769036\n", "\tspeed: 0.4294s/iter; left time: 750.5077s\n", "\titers: 4400, epoch: 5 | loss: 0.9126329\n", "\tspeed: 0.4296s/iter; left time: 707.9335s\n", "\titers: 4500, epoch: 5 | loss: 0.7238624\n", "\tspeed: 0.4296s/iter; left time: 665.0659s\n", "\titers: 4600, epoch: 5 | loss: 0.7744576\n", "\tspeed: 0.4296s/iter; left time: 622.0214s\n", "\titers: 4700, epoch: 5 | loss: 0.7180898\n", "\tspeed: 0.4298s/iter; left time: 579.3144s\n", "\titers: 4800, epoch: 5 | loss: 0.9450315\n", "\tspeed: 0.4295s/iter; left time: 535.9900s\n", "\titers: 4900, epoch: 5 | loss: 0.6091806\n", "\tspeed: 0.4295s/iter; left time: 493.0983s\n", "\titers: 5000, epoch: 5 | loss: 0.7721329\n", "\tspeed: 0.4293s/iter; left time: 449.9377s\n", "\titers: 5100, epoch: 5 | loss: 0.9383146\n", "\tspeed: 0.4296s/iter; left time: 407.2774s\n", "\titers: 5200, epoch: 5 | loss: 0.7085751\n", "\tspeed: 0.4298s/iter; left time: 364.4624s\n", "\titers: 5300, epoch: 5 | loss: 0.7751033\n", "\tspeed: 0.4305s/iter; left time: 321.9838s\n", "\titers: 5400, epoch: 5 | loss: 0.7805765\n", "\tspeed: 0.4305s/iter; left time: 278.9425s\n", "\titers: 5500, epoch: 5 | loss: 0.6966825\n", "\tspeed: 0.4296s/iter; left time: 235.4009s\n", "\titers: 5600, epoch: 5 | loss: 0.7149451\n", "\tspeed: 0.4294s/iter; left time: 192.3862s\n", "\titers: 5700, epoch: 5 | loss: 0.6835595\n", "\tspeed: 0.4295s/iter; left time: 149.4490s\n", "\titers: 5800, epoch: 5 | loss: 0.7239579\n", "\tspeed: 0.4295s/iter; left time: 106.5264s\n", "\titers: 5900, epoch: 5 | loss: 0.5544484\n", "\tspeed: 0.4295s/iter; left time: 63.5644s\n", "\titers: 6000, epoch: 5 | loss: 0.7319254\n", "\tspeed: 0.4296s/iter; left time: 20.6229s\n", "Epoch: 5 cost time: 2643.91592669487\n", "Epoch: 5, Steps: 6047 | Train Loss: 0.7328586 Vali Loss: 0.7076379 Test Loss: 0.6880387\n", "EarlyStopping counter: 1 out of 2\n", "Updating learning rate to 6.25e-06\n", ">>>>>>>testing : long_term_forecast_weather_96_720_Autoformer_custom_ftM_sl96_ll48_pl720_dm512_nh8_el3_dl1_df512_fc3_ebtimeF_dtTrue_Exp_0<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n", "test 27159\n", "test shape: (27159, 1, 720, 15) (27159, 1, 720, 15)\n", "test shape: (27159, 720, 15) (27159, 720, 15)\n", "mse:0.6836745738983154, mae:0.5159773826599121\n" ] } ], "source": [ "run_experiment(\n", " task_name='long_term_forecast',\n", " is_training=1,\n", " model_id='weather_96_720',\n", " model='Autoformer',\n", " data='custom',\n", " root_path='./dataset/',\n", " data_path='UBB_weather_jan2008_may2023_cleaned.csv',\n", " features='M',\n", " target='T(degC)',\n", " freq='h',\n", " checkpoints='./checkpoints/',\n", " seq_len=96,\n", " label_len=48,\n", " pred_len=720,\n", " seasonal_patterns='Yearly',\n", " inverse=False,\n", " mask_rate=0.25,\n", " anomaly_ratio=0.25,\n", " top_k=5,\n", " num_kernels=6,\n", " enc_in=15,\n", " dec_in=15,\n", " c_out=15,\n", " d_model=512,\n", " n_heads=8,\n", " e_layers=3,\n", " d_layers=1,\n", " d_ff=512,\n", " moving_avg=25,\n", " factor=3,\n", " distil=True,\n", " dropout=0.1,\n", " embed='timeF',\n", " activation='gelu',\n", " output_attention=False,\n", " channel_independence=0,\n", " num_workers=10,\n", " itr=1,\n", " train_epochs=5,\n", " batch_size=16,\n", " patience=2,\n", " learning_rate=0.0001,\n", " des='Exp',\n", " loss='MSE',\n", " lradj='type1',\n", " use_amp=False,\n", " use_gpu=True,\n", " gpu=0,\n", " use_multi_gpu=False,\n", " devices='0,1,2,3',\n", " p_hidden_dims=[128, 128],\n", " p_hidden_layers=2\n", ")" ] } ], "metadata": { "kernelspec": { "display_name": "base", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.8.18" } }, "nbformat": 4, "nbformat_minor": 2 }