# -*- coding: utf-8 -*-
from gwlsa.model_utils import ProcessingChainBuilder, Net_constructor
from gwlsa.model_utils import Net_trainer, Net_loader
from gwlsa.model_utils import ProcessingContext
from gwlsa.model_utils import split_csv_train_val_test
from post_process.plot_logs import plot_logfile
import gwlsa_settings as GS
# from memory_profiler import profile
# from torch.profiler import profile, record_function, ProfilerActivity
from utils.general_utils import timer

import warnings
warnings.filterwarnings('ignore')

def data_spliter():
    net_params = GS.net_params
    split_params = GS.split_params
    # 将数据集保存为csv文件
    split_csv_train_val_test(net_params['raw_data_path'],
                             net_params['x_column_names'],
                             net_params['y_column_name'],
                             net_params['spatial_column_names'],
                             net_params['test_ratio'],
                             net_params['valid_ratio'],
                             net_params['batch_size'],
                             net_params['max_val_size'],
                             net_params['max_test_size'],
                             csv_encoding=net_params['csv_encoding'],
                             subfolder_name=split_params['split_csv_saved_dir'], )

def loadmodel_test(var_name, var_params):
    net_params = GS.net_params
    net_params[var_name] = var_params  # 修改需要变化的参数

    # step1: 构建处理链
    chain = (
        ProcessingChainBuilder()
        .add_strategy(Net_constructor())
        .add_strategy(Net_loader())
        .build()
    )
    # step 2: 设置上下文参数
    context = ProcessingContext(None, **net_params)
    # step 3: 执行处理链
    result = chain.process(context)

@timer
def run_gnnwr(var_name, var_params):

    net_params = GS.net_params
    net_params[var_name] = var_params # 修改需要变化的参数

    # step1: 构建处理链
    chain = (
        ProcessingChainBuilder()
        .add_strategy(Net_constructor())
        .add_strategy(Net_trainer())
        # .add_strategy(Net_loader())
        .build()
    )
    # step 2: 设置上下文参数
    context = ProcessingContext(None, **net_params)
    # step 3: 执行处理链
    result = chain.process(context)
    # step 4: 绘制loss曲线
    file_path = result.params['log_save_path']
    loss_pic_path = result.params['loss_pic_path']
    plot_logfile(file_path, metric1='Train Loss', metric2='Valid Loss',
                 show_fig=False,
                 save_pic=True,
                 save_pic_path=loss_pic_path)
    # step 5:将当前的变量参数，写入到文件
    with open(result.params['out_path'] / '当前的var_params值.txt', 'w') as file:
        # 使用print函数的file参数将输出重定向到文件
       file.write(str(var_name))
       file.write('\n')
       file.write(str(var_params))



if __name__ == "__main__":
    dense_layers_list = [
        # [256],
        # [256, 256],
        # [256, 256, 256],
        # [256, 256, 256, 256],
        # [256, 256, 256, 256, 256],
        # [256, 256, 256, 256, 256, 256],
        # [256, 256, 256, 256, 256, 256, 256],
        # [512],
        # [512, 512],
        # [512, 512, 512],
        [512, 512, 512, 512],
        # [512, 512, 512, 512, 512],
        # [512, 512, 512, 512, 512, 512],
        # [512, 512, 512, 512, 512, 512, 512]
    ]
    for dense_layers in dense_layers_list:
        run_gnnwr('dense_layers', dense_layers)

    # loadmodel_test('dense_layers', dense_layers_list[0])