import math
import warnings
from inspect import currentframe, getframeinfo
from typing import Any

import torch
import torch.nn as nn
import pytorch_lightning as pl
from bayes_opt import BayesianOptimization, UtilityFunction
from pytorch_lightning.callbacks import EarlyStopping, ModelCheckpoint
from pytorch_lightning.loggers import CSVLogger, TensorBoardLogger
from pytorch_lightning.utilities.types import OptimizerLRScheduler
from torch.utils.data import DataLoader, random_split, Dataset
import scipy.io as scio
import numpy as np
import torch.optim as optim
from torch.utils.tensorboard import SummaryWriter
import os

from BO_WDBP_exp import WDBP

os.environ["KMP_DUPLICATE_LIB_OK"] = "TRUE"
import CR_DSPPytorch as risdsp
import util

BASE_DIR = os.path.dirname(__file__)


class NPDataset(torch.utils.data.Dataset):
    def __init__(self, array, label, block_size):
        self.array = np.csingle(array)
        self.label = np.csingle(label)
        self.block_size = block_size
        self.case_num = self.array.shape[-2]

    def __len__(self):
        return self.label.shape[-1]

    def __getitem__(self, indx):
        def view_as_real(x):
            return np.stack((x.real, x.imag), axis=-1)

        M = int(np.floor(self.block_size / 2))
        if isinstance(indx, np.ndarray):
            ret = self.array[np.newaxis, ..., indx]
            return view_as_real(ret)
        else:
            ret = np.take(self.array, indices=np.arange(
                -M + 2 * indx, 2 * indx + M), axis=-1, mode='wrap')
            return view_as_real(ret), view_as_real(self.label[..., indx])


def gen_dataloader(data_path, block_size, batch_size, symbol_num=0, constellations=util.CONST_16QAM, shuffle=False,
                   count_from_end=False):
    modOrder = 4
    data = scio.loadmat(data_path)
    prbs = data['prbs']

    sig = data['sig']

    if symbol_num != 0:
        if count_from_end:
            sig = sig[..., -symbol_num * 2:]
            prbs = prbs[..., -symbol_num * modOrder:]
        else:
            sig = sig[..., 0:symbol_num * 2]
            prbs = prbs[..., 0:symbol_num * modOrder]
    '''实验是单偏振的'''
    # labelx = util.map(prbs[0, ...], constellations)
    # labely = util.map(prbs[1, ...], constellations)
    # labelx = labelx.reshape(1, -1)
    # labely = labely.reshape(1, -1)
    # label = np.concatenate([labelx, labely], axis=0)
    label = util.map(prbs, constellations, modOrder)
    dataset = NPDataset(sig, label, block_size)
    dataloader = torch.utils.data.DataLoader(dataset, batch_size, shuffle=shuffle)
    return dataloader, dataset, prbs


def eval_ber(model, dataset, prbs, constellations=util.CONST_16QAM, use_cuda=True, power_factor=1):
    if not isinstance(dataset, NPDataset):
        raise ValueError('dataset is supposed to be an instance of NPDataset.')
    model = model.eval()
    lms = risdsp.FIRLayer(tap=32, case_num=1, power_norm=True, centor_one=True)
    pr = risdsp.PhaseRecLayer(1024)

    if use_cuda and torch.cuda.is_available():
        model = model.cuda()

    chosen_device = model.device
    block_size = len(dataset) * 2
    indxes = np.arange(block_size)
    sig = dataset[indxes]
    sig = torch.from_numpy(sig).to(chosen_device)
    sig = np.sqrt(power_factor) * sig

    sig = model(sig)

    del model

    '''CMA，原本应有均衡PMD的，但由于已经移除，所以这里不做考虑'''
    lms = lms.to(chosen_device)
    pr = pr.to(chosen_device)
    sig = pr(sig[..., 1::2, :])

    lms.fit(sig,
            err_mode='DDM',
            constellations=constellations,
            iter_num=10,
            block_size=4028,
            remain=2048,
            lr=1e-4)
    with torch.no_grad():
        sig = lms(sig)
        sig = torch.view_as_complex(sig)
        sig = sig.cpu().data.numpy().squeeze()

        sig, ber, _ = util.pr_ber(sig, prbs, constellations)
    return sig, ber


class LMWDBP(pl.LightningModule):
    def __init__(self, model_kwargs, lr=1e-3, optimizer='Adam', weight_decay=0.02, pr_win=1024):
        super(LMWDBP, self).__init__()
        self.model = WDBP(**model_kwargs)
        # self.ste = risdsp.SymbolTimingEstLayer()
        self.optimizer = optimizer
        self.lr = lr
        if self.optimizer == 'Adam':
            hp_opt = 0
        elif self.optimizer == 'SGD':
            hp_opt = 1
        else:
            raise ValueError('Invalid optimizer')

        self.hyper_parameters = {
            'model_kwargs': model_kwargs,
            'optimizer': optimizer,
            'weight_decay': weight_decay,
        }
        self.weight_decay = weight_decay
        self.pr = risdsp.PhaseRecLayer(pr_win)

    def forward(self, x):
        x = self.model(x)
        if self.training:
            # x = self.ste(x)
            x = x[..., 1::2, :]
            x = self.pr(x)
        return x

    def configure_optimizers(self) -> OptimizerLRScheduler:
        if self.optimizer == 'Adam':
            return optim.Adam(
                [
                    {'params': self.model.edc_parameters(), 'lr': self.lr[0]},
                    {'params': self.model.nl_parameters(), 'lr': self.lr[1]}
                ], weight_decay=self.weight_decay
            )
        elif self.optimizer == 'SGD':
            return optim.SGD(self.parameters(), lr=self.lr, weight_decay=self.weight_decay)
        else:
            raise ValueError('Invalid optimizer.')

    def err_function(self, x, y):
        batch_size = x.shape[0]
        if x.ndim == 4:
            x = x[..., round(x.shape[-2] // 2), :]

        rotate_fac = np.exp(1j * np.array([0, np.pi / 2, np.pi, np.pi / 2 * 3]))
        rotate_fac = rotate_fac.reshape(*[1] * (x.ndim - 1), -1)
        rotate_fac = torch.from_numpy(rotate_fac).to(x.device)
        rotate_fac.requires_grad_(False)
        rotate_fac = torch.view_as_real(rotate_fac)

        x = x[..., np.newaxis, :]
        y = y[..., np.newaxis, :]

        x = risdsp.cmul(x, rotate_fac)
        res = y - x
        err = torch.sum(res ** 2, dim=-1)
        err = torch.min(err, dim=-1)[0]
        err = torch.mean(err, dim=[0, 1])
        return err

    def training_step(self, batch, batch_idx):
        x, y = batch
        output = self(x)
        err = self.err_function(output, y)
        err = err * 1000
        self.log('train_loss', err, on_step=False, on_epoch=True, prog_bar=True, logger=True)
        return {'loss': err}

    def validation_step(self, batch, batch_idx):
        x, y = batch
        output = self(x)
        with torch.no_grad():
            output = output[..., 1::2, :]
            output = self.pr(output)
        err = self.err_function(output, y)
        err = err * 1000
        self.log('val_loss', err, on_step=False, on_epoch=True, prog_bar=True, logger=True)
        return {'val_loss': err}

    def test_step(self, batch, batch_idx):
        x, y = batch
        output = self(x)
        err = self.err_function(output, y)
        return {'tst_loss': err}


def vary_lp(lp_range=[1]):
    lp_range = [*lp_range]
    frame = currentframe()
    experiment_name = getframeinfo(frame).function
    resultDir = os.path.join(BASE_DIR, 'result_exp/LMWDBP')
    if not os.path.exists(resultDir):
        os.makedirs(resultDir)
        print(f'Create result dir at \"{resultDir}\"')
    resultPath = os.path.join(resultDir, '{}'.format(experiment_name))
    if not os.path.exists(resultPath):
        os.mkdir(resultPath)
        print(f'Create result path at \"{resultPath}\"')
    checkpointSavePath = os.path.join(resultPath, 'chkpts')
    WDBPsearchPath = os.path.join(BASE_DIR, f'result_exp/WDBP/{experiment_name}')

    if not os.path.exists(checkpointSavePath):
        os.mkdir(checkpointSavePath)
        print(f'Create result path at \"{checkpointSavePath}\"')

    '''Behavior control'''
    max_epochs = 10
    batch_size = 64
    splitAmount = 65536
    tr_val_ratio = 7
    trsetLen = round(splitAmount * tr_val_ratio / (tr_val_ratio + 3))
    valsetLen = round(splitAmount * 3 / (tr_val_ratio + 3))
    tstsetLen = 110000
    lr = (1e-5, 1e-5)
    patience = 6
    log_interval = 20

    block_size = 2048
    assert (block_size % 2 == 0)
    constellations = util.CONST_16QAM
    resume_from_checkpoints = False
    print(f'Resume from checkpoint: {resume_from_checkpoints}')

    for lpIndx, lp in enumerate(lp_range):
        trSetPath = os.path.join(
            BASE_DIR, f'data/experiment/16QAM20GBaud1800kmHe/trSet_lp_{lp}.mat'
        )
        valSetPath = os.path.join(
            BASE_DIR, f'data/experiment/16QAM20GBaud1800kmHe/trSet_lp_{lp}.mat'
        )
        tstSetPath = os.path.join(
            BASE_DIR, f'data/experiment/16QAM20GBaud1800kmHe/tstSet_lp_{lp}.mat'
        )
        '''Form dataloaders'''
        spanLen = 100e3
        data = scio.loadmat(trSetPath)
        spanNum = data['spanNum'][0, 0]
        L = spanLen * spanNum
        symbolRate = 20e9

        train_dataloader, _, _ = gen_dataloader(trSetPath,
                                                block_size=block_size,
                                                symbol_num=trsetLen,
                                                batch_size=batch_size,
                                                constellations=constellations,
                                                shuffle=True)
        val_dataloader, valSet, prbsVal = gen_dataloader(valSetPath,
                                                         block_size=block_size,
                                                         symbol_num=valsetLen,
                                                         batch_size=batch_size,
                                                         constellations=constellations,
                                                         shuffle=False,
                                                         count_from_end=True)
        test_dataloader, tstSet, prbsTst = gen_dataloader(tstSetPath,
                                                          block_size=block_size,
                                                          symbol_num=tstsetLen,
                                                          batch_size=batch_size,
                                                          constellations=constellations,
                                                          shuffle=False)
        '''Form dataloader end'''
        step = 9
        # total_taps = 2560
        # least_taps_per_layer = int(math.ceil(total_taps / step))
        least_taps_per_layer = 256

        if os.path.exists(WDBPsearchPath):
            dat = scio.loadmat(os.path.join(WDBPsearchPath, f'search_result_lp_{lp}_step_{step}.mat'))
            nltap = dat['nltap'][0][0].astype(int)
            init_comp_fac = dat['init_comp_fac'][0][0]
            std_dev = dat['std_dev'][0, 0]
            print('Previous search result read.')
        else:
            nltap = 7
            init_comp_fac = 1.5
            std_dev = 2.6
            print(f'Init nltap to {nltap} and cr {init_comp_fac}.')

        model_kwargs = {
            'D': -17e-6,
            'tap_per_edc': least_taps_per_layer,
            'sample_factor': 2,
            'symbol_rate': symbolRate,
            'power_norm': False,
            'case_num': 1,
            'gm': -1.3e-3,
            'step': step,
            'trans_length': L,
            'lp': lp,
            'alpha': 0.2e-3,
            'nltap': nltap,
            'std_dev': std_dev,
            'init_comp_fac': init_comp_fac,
            'pre_cd': 0,
            'edc_init_method': 'FSM',
            'lmbd_each_layer': 5.12589879e-05,
        }

        pl_module_kwargs = {
            'model_kwargs': model_kwargs,
            'lr': lr,
            'optimizer': 'Adam',
            'weight_decay': 0,
            'pr_win': 301
        }

        mdl = LMWDBP(**pl_module_kwargs)

        '''Specify callbacks'''
        early_stop_callback = EarlyStopping(monitor='val_loss', patience=patience, strict=False,
                                            verbose=True, mode='min', min_delta=1e-2)
        check_point_callback = ModelCheckpoint(
            monitor='val_loss',
            verbose=True,
            save_last=None,
            save_top_k=1,
            save_weights_only=True,
            mode='min',
            dirpath=checkpointSavePath,
            filename=f'LADBP_lp_{lp}_step_{step}_tapsPerLinearLayer_{least_taps_per_layer}',
        )
        '''Specify callbacks end'''
        logger = TensorBoardLogger(save_dir=resultPath, name='log', version=f'step_{step}_lp_{lp}')
        trainer_kwargs = {
            'accelerator': 'gpu',
            'callbacks': [early_stop_callback, check_point_callback],
            'max_epochs': max_epochs,
            'check_val_every_n_epoch': 1,
            'num_sanity_val_steps': 0,
            'log_every_n_steps': log_interval,
            'val_check_interval': 200,
            'logger': logger,
        }

        trainer = pl.Trainer(**trainer_kwargs)
        if resume_from_checkpoints:
            ckpt_path = os.path.join(checkpointSavePath, 'model.ckpt')
        else:
            ckpt_path = None
        trainer.fit(mdl, train_dataloaders=train_dataloader, val_dataloaders=val_dataloader, ckpt_path=ckpt_path)

        sig, ber = eval_ber(mdl, tstSet, prbsTst, constellations=util.CONST_16QAM, use_cuda=False)
        Q = util.ber2q(np.mean(ber))
        print(f'lp {lp}, Q factor {Q}')


def eval_vary_lp(lp_range=[1]):
    lp_range = [*lp_range]
    step = 9
    resultDir = os.path.join(BASE_DIR, 'result_exp/LADBP')
    if not os.path.exists(resultDir):
        os.makedirs(resultDir)
        print(f'Create result dir at \"{resultDir}\"')
    resultPath = os.path.join(resultDir, 'vary_lp')
    if not os.path.exists(resultPath):
        raise ValueError('Here is lack of vary_lp path')
    checkpointSavePath = os.path.join(resultPath, 'chkpts')
    AVRDBPsearchPath = os.path.join(BASE_DIR, f'result_exp/AVR-DBP/vary_lp')

    if not os.path.exists(checkpointSavePath):
        os.mkdir(checkpointSavePath)
        print(f'Create result path at \"{checkpointSavePath}\"')

    '''Behavior control'''
    batch_size = 32
    trsetLen = 8192
    valsetLen = 4096
    tstsetLen = 110000
    lr = (1e-5, 1e-8)

    block_size = 2048
    assert (block_size % 2 == 0)
    constellations = util.CONST_16QAM
    resume_from_checkpoints = False
    print(f'Resume from checkpoint: {resume_from_checkpoints}')
    '''保存实验结果'''
    save_result_flag = True
    save_dict = {}
    Qcache = np.zeros([len(lp_range)])
    bercache = np.zeros_like(Qcache)
    Q2cache = np.zeros_like(Qcache)
    for lpIndx, lp in enumerate(lp_range):
        trSetPath = os.path.join(
            BASE_DIR, f'data/experiment/16QAM20GBaud1800kmHe/trSet_lp_{lp}.mat'
        )
        valSetPath = os.path.join(
            BASE_DIR, f'data/experiment/16QAM20GBaud1800kmHe/trSet_lp_{lp}.mat'
        )
        tstSetPath = os.path.join(
            BASE_DIR, f'data/experiment/16QAM20GBaud1800kmHe/tstSet_lp_{lp}.mat'
        )
        '''Form dataloaders'''
        spanLen = 100e3
        data = scio.loadmat(trSetPath)
        spanNum = data['spanNum'][0, 0]
        L = spanLen * spanNum
        symbolRate = 20e9

        train_dataloader, _, _ = gen_dataloader(trSetPath,
                                                block_size=block_size,
                                                symbol_num=trsetLen,
                                                batch_size=batch_size,
                                                constellations=constellations,
                                                shuffle=True)
        val_dataloader, valSet, prbsVal = gen_dataloader(valSetPath,
                                                         block_size=block_size,
                                                         symbol_num=valsetLen,
                                                         batch_size=batch_size,
                                                         constellations=constellations,
                                                         shuffle=False,
                                                         count_from_end=True)
        test_dataloader, tstSet, prbsTst = gen_dataloader(tstSetPath,
                                                          block_size=block_size,
                                                          symbol_num=tstsetLen,
                                                          batch_size=batch_size,
                                                          constellations=constellations,
                                                          shuffle=False)
        '''Form dataloader end'''

        # total_taps = 2560
        # least_taps_per_layer = int(math.ceil(total_taps / step))
        least_taps_per_layer = 256

        if os.path.exists(AVRDBPsearchPath):
            dat = scio.loadmat(os.path.join(AVRDBPsearchPath, f'search_result_lp_{lp}_step_{step}.mat'))
            nltap = dat['nltap'][0][0].astype(int)
            init_comp_fac = dat['init_comp_fac'][0][0]
            print('Previous search result read.')
        else:
            nltap = 7
            init_comp_fac = 1.5
            print(f'Init nltap to {nltap} and cr {init_comp_fac}.')

        model_kwargs = {
            'D': -17e-6,
            'tap_per_edc': least_taps_per_layer,
            'sample_factor': 2,
            'symbol_rate': symbolRate,
            'power_norm': False,
            'case_num': 1,
            'gm': -1.3e-3,
            'step': step,
            'trans_length': L,
            'lp': lp,
            'alpha': 0.2e-3,
            'init_comp_fac': init_comp_fac,
            'nltap': nltap,
            'pre_cd': 0,
            'edc_init_method': 'FSM',
            'lmbd_each_layer': 5.12589879e-05,
        }
        pl_module_kwargs = {
            'model_kwargs': model_kwargs,
            'lr': lr,
            'optimizer': 'Adam',
            'weight_decay': 0,
            'pr_win': 301
        }

        mdl = LADBP.load_from_checkpoint(os.path.join(
            checkpointSavePath, f'LADBP_lp_{lp}_step_{step}_tapsPerLinearLayer_{least_taps_per_layer}.ckpt'
        ), **pl_module_kwargs)

        use_BO = True
        if not use_BO:
            sig, ber = eval_ber(mdl, tstSet, prbsTst, use_cuda=True)
        else:
            def obj_function(lp):
                p = util.dBm2w(lp)
                p0 = util.dBm2w(1)
                sig, ber = eval_ber(mdl, tstSet, prbsTst, use_cuda=True, power_factor=p / p0)
                ber = np.mean(ber)
                Q = util.ber2q(ber)
                return Q

            pbounds = {'lp': (-5, 5)}
            optimizer = BayesianOptimization(
                f=obj_function,
                pbounds=pbounds,
                verbose=2
            )
            utility = UtilityFunction(kind='ei', xi=0.05)
            optimizer.maximize(init_points=5, n_iter=10, acquisition_function=utility)
            print(optimizer.max)
            lp_opt = optimizer.max['params']['lp']
            print(f'The best lp is {lp_opt}')
            p_best = util.dBm2w(lp_opt)
            p_ref = util.dBm2w(1)
            sig, ber = eval_ber(mdl, tstSet, prbsTst, use_cuda=True, power_factor=p_best / p_ref)
        Q = util.ber2q(np.mean(ber))
        Qcache[lpIndx] = Q
        Q2 = util.ber2Q2(np.mean(ber))
        Q2cache[lpIndx] = Q2
        bercache[lpIndx] = ber.item()
        print(f'lp {lp},ber {ber.item()} Q factor {Q} Q2 factor {Q2}')
    save_dict['lp'] = np.array(lp_range)
    save_dict['Q'] = Qcache
    save_dict['Q2'] = Q2cache
    save_dict['ber'] = bercache
    if save_result_flag:
        scio.savemat(os.path.join(
            resultPath, f'eval_results_vary_lp_in_step_{step}.mat'
        ), save_dict)
        print(f'Result has been preserved at \"{resultPath}\"')


def vary_step(step_range=[9]):
    step_range = [*step_range]
    lp = 1
    frame = currentframe()
    experiment_name = getframeinfo(frame).function
    resultDir = os.path.join(BASE_DIR, 'result_exp/LADBP')
    if not os.path.exists(resultDir):
        os.makedirs(resultDir)
        print(f'Create result dir at \"{resultDir}\"')
    resultPath = os.path.join(resultDir, '{}'.format(experiment_name))
    if not os.path.exists(resultPath):
        os.mkdir(resultPath)
        print(f'Create result path at \"{resultPath}\"')
    checkpointSavePath = os.path.join(resultPath, 'chkpts')
    AVRDBPsearchPath = os.path.join(BASE_DIR, f'result_exp/AVR-DBP/{experiment_name}')

    if not os.path.exists(checkpointSavePath):
        os.mkdir(checkpointSavePath)
        print(f'Create result path at \"{checkpointSavePath}\"')

    '''Behavior control'''
    max_epochs = 10
    batch_size = 64
    splitAmount = 65536
    tr_val_ratio = 7
    trsetLen = round(splitAmount * tr_val_ratio / (tr_val_ratio + 3))
    valsetLen = round(splitAmount * 3 / (tr_val_ratio + 3))
    tstsetLen = 110000
    lr = (5e-6, 1e-7)
    patience = 6
    log_interval = 20

    block_size = 2048
    assert (block_size % 2 == 0)
    constellations = util.CONST_16QAM
    resume_from_checkpoints = False
    print(f'Resume from checkpoint: {resume_from_checkpoints}')

    for stIndx, step in enumerate(step_range):
        trSetPath = os.path.join(
            BASE_DIR, f'data/experiment/16QAM20GBaud1800kmHe/trSet_lp_{lp}.mat'
        )
        valSetPath = os.path.join(
            BASE_DIR, f'data/experiment/16QAM20GBaud1800kmHe/trSet_lp_{lp}.mat'
        )
        tstSetPath = os.path.join(
            BASE_DIR, f'data/experiment/16QAM20GBaud1800kmHe/tstSet_lp_{lp}.mat'
        )
        '''Form dataloaders'''
        spanLen = 100e3
        data = scio.loadmat(trSetPath)
        spanNum = data['spanNum'][0, 0]
        L = spanLen * spanNum
        symbolRate = 20e9

        train_dataloader, _, _ = gen_dataloader(trSetPath,
                                                block_size=block_size,
                                                symbol_num=trsetLen,
                                                batch_size=batch_size,
                                                constellations=constellations,
                                                shuffle=True)
        val_dataloader, valSet, prbsVal = gen_dataloader(valSetPath,
                                                         block_size=block_size,
                                                         symbol_num=valsetLen,
                                                         batch_size=batch_size,
                                                         constellations=constellations,
                                                         shuffle=False,
                                                         count_from_end=True)
        test_dataloader, tstSet, prbsTst = gen_dataloader(tstSetPath,
                                                          block_size=block_size,
                                                          symbol_num=tstsetLen,
                                                          batch_size=batch_size,
                                                          constellations=constellations,
                                                          shuffle=False)
        '''Form dataloader end'''
        # total_taps = 2560
        # least_taps_per_layer = int(math.ceil(total_taps / step))
        least_taps_per_layer = 256

        if os.path.exists(AVRDBPsearchPath):
            dat = scio.loadmat(os.path.join(AVRDBPsearchPath, f'search_result_lp_{lp}_step_{step}.mat'))
            nltap = dat['nltap'][0][0].astype(int)
            init_comp_fac = dat['init_comp_fac'][0][0]
            print('Previous search result read.')
        else:
            nltap = 7
            init_comp_fac = 1.5
            print(f'Init nltap to {nltap} and cr {init_comp_fac}.')

        model_kwargs = {
            'D': -17e-6,
            'tap_per_edc': least_taps_per_layer,
            'sample_factor': 2,
            'symbol_rate': symbolRate,
            'power_norm': False,
            'case_num': 1,
            'gm': -1.3e-3,
            'step': step,
            'trans_length': L,
            'lp': lp,
            'alpha': 0.2e-3,
            'init_comp_fac': init_comp_fac,
            'nltap': nltap,
            'pre_cd': 0,
            'edc_init_method': 'FSM',
            'lmbd_each_layer': 5.12589879e-05,
        }
        pl_module_kwargs = {
            'model_kwargs': model_kwargs,
            'lr': lr,
            'optimizer': 'Adam',
            'weight_decay': 0,
            'pr_win': 301
        }

        mdl = LADBP(**pl_module_kwargs)

        '''Specify callbacks'''
        early_stop_callback = EarlyStopping(monitor='val_loss', patience=patience, strict=False,
                                            verbose=True, mode='min', min_delta=1e-2)
        check_point_callback = ModelCheckpoint(
            monitor='val_loss',
            verbose=True,
            save_last=None,
            save_top_k=1,
            save_weights_only=True,
            mode='min',
            dirpath=checkpointSavePath,
            filename=f'LADBP_lp_{lp}_step_{step}_tapsPerLinearLayer_{least_taps_per_layer}',
        )
        '''Specify callbacks end'''
        logger = TensorBoardLogger(save_dir=resultPath, name='log', version=f'lp_{lp}_step_{step}')
        trainer_kwargs = {
            'accelerator': 'gpu',
            'callbacks': [early_stop_callback, check_point_callback],
            'max_epochs': max_epochs,
            'check_val_every_n_epoch': 1,
            'num_sanity_val_steps': 0,
            'log_every_n_steps': log_interval,
            'val_check_interval': 200,
            'logger': logger,
        }

        trainer = pl.Trainer(**trainer_kwargs)
        if resume_from_checkpoints:
            ckpt_path = os.path.join(checkpointSavePath,
                                     f'LADBP_lp_{lp}_step_{step}_tapsPerLinearLayer_{least_taps_per_layer}.ckpt')
        else:
            ckpt_path = None
        trainer.fit(mdl, train_dataloaders=train_dataloader, val_dataloaders=val_dataloader, ckpt_path=ckpt_path)

        sig, ber = eval_ber(mdl, tstSet, prbsTst, constellations=util.CONST_16QAM, use_cuda=False)
        Q = util.ber2q(np.mean(ber))
        print(f'lp {lp}, step {step}, Q factor {Q}')
        del mdl


def eval_vary_step(step_range=[9]):
    step_range = [*step_range]
    lp = 1
    resultDir = os.path.join(BASE_DIR, 'result_exp/LADBP')
    if not os.path.exists(resultDir):
        os.makedirs(resultDir)
        print(f'Create result dir at \"{resultDir}\"')
    resultPath = os.path.join(resultDir, 'vary_step')
    if not os.path.exists(resultPath):
        os.mkdir(resultPath)
        print(f'Create result path at \"{resultPath}\"')
    checkpointSavePath = os.path.join(resultPath, 'chkpts')
    AVRDBPsearchPath = os.path.join(BASE_DIR, 'result_exp/AVR-DBP/vary_step')

    if not os.path.exists(checkpointSavePath):
        os.mkdir(checkpointSavePath)
        print(f'Create result path at \"{checkpointSavePath}\"')

    '''Behavior control'''
    batch_size = 32
    trsetLen = 8192
    valsetLen = 4096
    tstsetLen = 110000
    lr = (5e-6, 5e-7)

    block_size = 2048
    assert (block_size % 2 == 0)
    constellations = util.CONST_16QAM
    resume_from_checkpoints = False
    print(f'Resume from checkpoint: {resume_from_checkpoints}')
    '''保存实验结果'''
    save_result_flag = True
    save_dict = {}
    Qcache = np.zeros([len(step_range)])
    bercache = np.zeros_like(Qcache)
    Q2cache = np.zeros_like(Qcache)
    for stIndx, step in enumerate(step_range):
        trSetPath = os.path.join(
            BASE_DIR, f'data/experiment/16QAM20GBaud1800kmHe/trSet_lp_{lp}.mat'
        )
        valSetPath = os.path.join(
            BASE_DIR, f'data/experiment/16QAM20GBaud1800kmHe/trSet_lp_{lp}.mat'
        )
        tstSetPath = os.path.join(
            BASE_DIR, f'data/experiment/16QAM20GBaud1800kmHe/tstSet_lp_{lp}.mat'
        )
        '''Form dataloaders'''
        spanLen = 100e3
        data = scio.loadmat(trSetPath)
        spanNum = data['spanNum'][0, 0]
        L = spanLen * spanNum
        symbolRate = 20e9

        train_dataloader, _, _ = gen_dataloader(trSetPath,
                                                block_size=block_size,
                                                symbol_num=trsetLen,
                                                batch_size=batch_size,
                                                constellations=constellations,
                                                shuffle=True)
        val_dataloader, valSet, prbsVal = gen_dataloader(valSetPath,
                                                         block_size=block_size,
                                                         symbol_num=valsetLen,
                                                         batch_size=batch_size,
                                                         constellations=constellations,
                                                         shuffle=False,
                                                         count_from_end=True)
        test_dataloader, tstSet, prbsTst = gen_dataloader(tstSetPath,
                                                          block_size=block_size,
                                                          symbol_num=tstsetLen,
                                                          batch_size=batch_size,
                                                          constellations=constellations,
                                                          shuffle=False)
        '''Form dataloader end'''
        # total_taps = 2560
        # least_taps_per_layer = int(math.ceil(total_taps / step))
        least_taps_per_layer = 256

        if os.path.exists(AVRDBPsearchPath):
            dat = scio.loadmat(os.path.join(AVRDBPsearchPath, f'search_result_lp_{lp}_step_{step}.mat'))
            nltap = dat['nltap'][0][0].astype(int)
            init_comp_fac = dat['init_comp_fac'][0][0]
            print('Previous search result read.')
        else:
            nltap = 7
            init_comp_fac = 1.5
            print(f'Init nltap to {nltap} and cr {init_comp_fac}.')

        model_kwargs = {
            'D': -17e-6,
            'tap_per_edc': least_taps_per_layer,
            'sample_factor': 2,
            'symbol_rate': symbolRate,
            'power_norm': False,
            'case_num': 1,
            'gm': -1.3e-3,
            'step': step,
            'trans_length': L,
            'lp': lp,
            'alpha': 0.2e-3,
            'init_comp_fac': init_comp_fac,
            'nltap': nltap,
            'pre_cd': 0,
            'edc_init_method': 'FSM',
            'lmbd_each_layer': 5.12589879e-05,
        }
        pl_module_kwargs = {
            'model_kwargs': model_kwargs,
            'lr': lr,
            'optimizer': 'Adam',
            'weight_decay': 0,
            'pr_win': 1024
        }

        mdl = LADBP.load_from_checkpoint(os.path.join(
            checkpointSavePath, f'LADBP_lp_{lp}_step_{step}_tapsPerLinearLayer_{least_taps_per_layer}.ckpt'
        ), **pl_module_kwargs)

        use_BO = True
        if not use_BO:
            sig, ber = eval_ber(mdl, tstSet, prbsTst, use_cuda=True)
        else:
            def obj_function(lp):
                p = util.dBm2w(lp)
                p0 = util.dBm2w(1)
                sig, ber = eval_ber(mdl, tstSet, prbsTst, use_cuda=True, power_factor=p / p0)
                ber = np.mean(ber)
                Q = util.ber2q(ber)
                return Q

            pbounds = {'lp': (-5, 5)}
            optimizer = BayesianOptimization(
                f=obj_function,
                pbounds=pbounds,
                verbose=2
            )
            utility = UtilityFunction(kind='ei', xi=0.05)
            optimizer.maximize(init_points=5, n_iter=10, acquisition_function=utility)
            print(optimizer.max)
            lp_opt = optimizer.max['params']['lp']
            print(f'The best lp is {lp_opt}')
            p_best = util.dBm2w(lp_opt)
            p_ref = util.dBm2w(1)
            sig, ber = eval_ber(mdl, tstSet, prbsTst, use_cuda=True, power_factor=p_best / p_ref)
        Q = util.ber2q(np.mean(ber))
        Qcache[stIndx] = Q
        Q2 = util.ber2Q2(np.mean(ber))
        Q2cache[stIndx] = Q2
        bercache[stIndx] = ber.item()
        print(f'lp {lp}, step {step} ber {ber.item()} Q factor {Q} Q2 factor {Q2}')
        del mdl
    save_dict['step'] = np.array(step_range)
    save_dict['Q'] = Qcache
    save_dict['ber'] = bercache
    save_dict['Q2'] = Q2cache
    if save_result_flag:
        scio.savemat(os.path.join(
            resultPath, f'eval_results_vary_step_in_lp_{lp}.mat'
        ), save_dict)
        print(f'Result has been saved at \"{resultPath}\"')


def vary_tap(step_range=[9]):
    step_range = [*step_range]
    lp = 1
    frame = currentframe()
    experiment_name = getframeinfo(frame).function
    resultDir = os.path.join(BASE_DIR, 'result_exp/LADBP')
    if not os.path.exists(resultDir):
        os.makedirs(resultDir)
        print(f'Create result dir at \"{resultDir}\"')
    resultPath = os.path.join(resultDir, '{}'.format(experiment_name))
    if not os.path.exists(resultPath):
        os.mkdir(resultPath)
        print(f'Create result path at \"{resultPath}\"')
    checkpointSavePath = os.path.join(resultPath, 'chkpts')
    AVRDBPsearchPath = os.path.join(BASE_DIR, f'result_exp/AVR-DBP/vary_step')

    if not os.path.exists(checkpointSavePath):
        os.mkdir(checkpointSavePath)
        print(f'Create result path at \"{checkpointSavePath}\"')

    '''Behavior control'''
    max_epochs = 10
    batch_size = 64
    splitAmount = 65536
    tr_val_ratio = 7
    trsetLen = round(splitAmount * tr_val_ratio / (tr_val_ratio + 3))
    valsetLen = round(splitAmount * 3 / (tr_val_ratio + 3))
    tstsetLen = 110000
    lr = (1e-5, 1e-7)
    patience = 6
    log_interval = 20

    block_size = 2048
    assert (block_size % 2 == 0)
    constellations = util.CONST_16QAM
    resume_from_checkpoints = False
    print(f'Resume from checkpoint: {resume_from_checkpoints}')

    for stIndx, step in enumerate(step_range):
        trSetPath = os.path.join(
            BASE_DIR, f'data/experiment/16QAM20GBaud1800kmHe/trSet_lp_{lp}.mat'
        )
        valSetPath = os.path.join(
            BASE_DIR, f'data/experiment/16QAM20GBaud1800kmHe/trSet_lp_{lp}.mat'
        )
        tstSetPath = os.path.join(
            BASE_DIR, f'data/experiment/16QAM20GBaud1800kmHe/tstSet_lp_{lp}.mat'
        )
        '''Form dataloaders'''
        spanLen = 100e3
        data = scio.loadmat(trSetPath)
        spanNum = data['spanNum'][0, 0]
        L = spanLen * spanNum
        symbolRate = 20e9

        train_dataloader, _, _ = gen_dataloader(trSetPath,
                                                block_size=block_size,
                                                symbol_num=trsetLen,
                                                batch_size=batch_size,
                                                constellations=constellations,
                                                shuffle=True)
        val_dataloader, valSet, prbsVal = gen_dataloader(valSetPath,
                                                         block_size=block_size,
                                                         symbol_num=valsetLen,
                                                         batch_size=batch_size,
                                                         constellations=constellations,
                                                         shuffle=False,
                                                         count_from_end=True)
        test_dataloader, tstSet, prbsTst = gen_dataloader(tstSetPath,
                                                          block_size=block_size,
                                                          symbol_num=tstsetLen,
                                                          batch_size=batch_size,
                                                          constellations=constellations,
                                                          shuffle=False)
        '''Form dataloader end'''
        # total_taps_limit = util.cal_tap_num(DL=17e-6 * L,
        #                                     ref_freq=193.1e12,
        #                                     sample_freq=2 * symbolRate)
        # least_taps_per_layer = int(math.ceil(total_taps_limit / step))
        # print(f'At least {least_taps_per_layer} taps per linear layer')
        max_taps_per_layer = 256
        print(f'At most {max_taps_per_layer} taps per linear layer')
        taps_per_layer_range = max_taps_per_layer - np.arange(0, 2 * 32, 32)
        # taps_per_layer_range = least_taps_per_layer + np.arange(0, 3 * (round(80 / step)), round(80 / step))

        if os.path.exists(AVRDBPsearchPath):
            dat = scio.loadmat(os.path.join(AVRDBPsearchPath, f'search_result_lp_{lp}_step_{step}.mat'))
            nltap = dat['nltap'][0][0].astype(int)
            init_comp_fac = dat['init_comp_fac'][0][0]
            print('Previous search result read.')
        else:
            nltap = 7
            init_comp_fac = 1.5
            print(f'Init nltap to {nltap} and cr {init_comp_fac}.')
        for tasIndx, taps_per_layer in enumerate(taps_per_layer_range):
            print(f'There are {taps_per_layer} taps per linear layer!')
            model_kwargs = {
                'D': -17e-6,
                'tap_per_edc': taps_per_layer,
                'sample_factor': 2,
                'symbol_rate': symbolRate,
                'power_norm': False,
                'case_num': 1,
                'gm': -1.3e-3,
                'step': step,
                'trans_length': L,
                'lp': lp,
                'alpha': 0.2e-3,
                'init_comp_fac': init_comp_fac,
                'nltap': nltap,
                'pre_cd': 0,
                'edc_init_method': 'FSM',
                'lmbd_each_layer': 5.12589879e-05,
            }
            pl_module_kwargs = {
                'nodel_kwargs': model_kwargs,
                'lr': lr,
                'optimizer': 'Adam',
                'weight_decay': 0,
                'pr_win': 301
            }

            mdl = LADBP(**pl_module_kwargs)

            '''Specify callbacks'''
            early_stop_callback = EarlyStopping(monitor='val_loss', patience=patience, strict=False,
                                                verbose=True, mode='min', min_delta=1e-2)
            check_point_callback = ModelCheckpoint(
                monitor='val_loss',
                verbose=True,
                save_last=None,
                save_top_k=1,
                save_weights_only=True,
                mode='min',
                dirpath=checkpointSavePath,
                filename=f'LADBP_lp_{lp}_step_{step}_tapsPerLinearLayer_{taps_per_layer}',
            )
            '''Specify callbacks end'''
            logger = TensorBoardLogger(save_dir=resultPath, name='log',
                                       version=f'lp_{lp}_step_{step}_taps_{taps_per_layer}')
            trainer_kwargs = {
                'accelerator': 'gpu',
                'callbacks': [early_stop_callback, check_point_callback],
                'max_epochs': max_epochs,
                'check_val_every_n_epoch': 1,
                'num_sanity_val_steps': 0,
                'log_every_n_steps': log_interval,
                'val_check_interval': 200,
                'logger': logger,
            }

            trainer = pl.Trainer(**trainer_kwargs)
            if resume_from_checkpoints:
                ckpt_path = os.path.join(checkpointSavePath, 'model.ckpt')
            else:
                ckpt_path = None
            trainer.fit(mdl, train_dataloaders=train_dataloader, val_dataloaders=val_dataloader, ckpt_path=ckpt_path)

            sig, ber = eval_ber(mdl, tstSet, prbsTst, constellations=util.CONST_16QAM, use_cuda=False)
            Q = util.ber2q(np.mean(ber))
            print(f'lp {lp}, step {step}, taps {taps_per_layer}, Q factor {Q}')


def eval_vary_tap(step_range=[18]):
    step_range = [*step_range]
    lp = 1
    resultDir = os.path.join(BASE_DIR, 'result_exp/LADBP')
    if not os.path.exists(resultDir):
        os.makedirs(resultDir)
        print(f'Create result dir at \"{resultDir}\"')
    resultPath = os.path.join(resultDir, 'vary_tap')
    if not os.path.exists(resultPath):
        os.mkdir(resultPath)
        print(f'Create result path at \"{resultPath}\"')
    checkpointSavePath = os.path.join(resultPath, 'chkpts')
    AVRDBPsearchPath = os.path.join(BASE_DIR, f'result_exp/AVR-DBP/vary_step')

    if not os.path.exists(checkpointSavePath):
        os.mkdir(checkpointSavePath)
        print(f'Create result path at \"{checkpointSavePath}\"')

    '''Behavior control'''

    batch_size = 32
    trsetLen = 8192
    valsetLen = 4096
    tstsetLen = 110000
    lr = (1e-5, 1e-7)

    block_size = 2048
    assert (block_size % 2 == 0)
    constellations = util.CONST_16QAM
    resume_from_checkpoints = False
    print(f'Resume from checkpoint: {resume_from_checkpoints}')

    for stIndx, step in enumerate(step_range):
        trSetPath = os.path.join(
            BASE_DIR, f'data/experiment/16QAM20GBaud1800kmHe/trSet_lp_{lp}.mat'
        )
        valSetPath = os.path.join(
            BASE_DIR, f'data/experiment/16QAM20GBaud1800kmHe/trSet_lp_{lp}.mat'
        )
        tstSetPath = os.path.join(
            BASE_DIR, f'data/experiment/16QAM20GBaud1800kmHe/tstSet_lp_{lp}.mat'
        )
        '''Form dataloaders'''
        spanLen = 100e3
        data = scio.loadmat(trSetPath)
        spanNum = data['spanNum'][0, 0]
        L = spanLen * spanNum
        symbolRate = 20e9

        train_dataloader, _, _ = gen_dataloader(trSetPath,
                                                block_size=block_size,
                                                symbol_num=trsetLen,
                                                batch_size=batch_size,
                                                constellations=constellations,
                                                shuffle=True)
        val_dataloader, valSet, prbsVal = gen_dataloader(valSetPath,
                                                         block_size=block_size,
                                                         symbol_num=valsetLen,
                                                         batch_size=batch_size,
                                                         constellations=constellations,
                                                         shuffle=False,
                                                         count_from_end=True)
        test_dataloader, tstSet, prbsTst = gen_dataloader(tstSetPath,
                                                          block_size=block_size,
                                                          symbol_num=tstsetLen,
                                                          batch_size=batch_size,
                                                          constellations=constellations,
                                                          shuffle=False)
        '''Form dataloader end'''
        # total_taps_limit = util.cal_tap_num(DL=17e-6 * L,
        #                                     ref_freq=193.1e12,
        #                                     sample_freq=2 * symbolRate)
        # least_taps_per_layer = int(math.ceil(total_taps_limit / step))
        # print(f'At least {least_taps_per_layer} taps per linear layer')
        # taps_per_layer_range = least_taps_per_layer + np.arange(0, 3 * (round(80 / step)), round(80 / step))
        max_taps_per_layer = 256
        print(f'At most {max_taps_per_layer} taps per linear layer')
        taps_per_layer_range = max_taps_per_layer - np.arange(0, 1 * 32, 32)

        if os.path.exists(AVRDBPsearchPath):
            dat = scio.loadmat(os.path.join(AVRDBPsearchPath, f'search_result_lp_{lp}_step_{step}.mat'))
            nltap = dat['nltap'][0][0].astype(int)
            init_comp_fac = dat['init_comp_fac'][0][0]
            print('Previous search result read.')
        else:
            nltap = 7
            init_comp_fac = 1.5
            print(f'Init nltap to {nltap} and cr {init_comp_fac}.')

        for tasIndx, taps_per_layer in enumerate(taps_per_layer_range):
            print(f'There are {taps_per_layer} per linear later!')
            model_kwargs = {
                'D': -17e-6,
                'tap_per_edc': taps_per_layer,
                'sample_factor': 2,
                'symbol_rate': symbolRate,
                'power_norm': False,
                'case_num': 1,
                'gm': -1.3e-3,
                'step': step,
                'trans_length': L,
                'lp': lp,
                'alpha': 0.2e-3,
                'init_comp_fac': init_comp_fac,
                'nltap': nltap,
                'pre_cd': 0,
                'edc_init_method': 'FSM',
                'lmbd_each_layer': 5.12589879e-05,
            }
            pl_module_kwargs = {
                'model_kwargs': model_kwargs,
                'lr': lr,
                'optimizer': 'Adam',
                'weight_decay': 0,
                'pr_win': 301
            }

            mdl = LADBP.load_from_checkpoint(os.path.join(
                checkpointSavePath, f'LADBP_lp_{lp}_step_{step}_tapsPerLinearLayer_{taps_per_layer}.ckpt'
            ), **pl_module_kwargs)

            use_BO = True
            if not use_BO:
                sig, ber = eval_ber(mdl, tstSet, prbsTst, use_cuda=True)
            else:
                def obj_function(lp):
                    p = util.dBm2w(lp)
                    p0 = util.dBm2w(1)
                    sig, ber = eval_ber(mdl, tstSet, prbsTst, use_cuda=True, power_factor=p / p0)
                    ber = np.mean(ber)
                    Q = util.ber2q(ber)
                    return Q

                pbounds = {'lp': (-5, 5)}
                optimizer = BayesianOptimization(
                    f=obj_function,
                    pbounds=pbounds,
                    verbose=2
                )
                utility = UtilityFunction(kind='ei', xi=0.05)
                optimizer.maximize(init_points=5, n_iter=10, acquisition_function=utility)
                print(optimizer.max)
                lp_opt = optimizer.max['params']['lp']
                print(f'The best lp is {lp_opt}')
                p_best = util.dBm2w(lp_opt)
                p_ref = util.dBm2w(1)
                sig, ber = eval_ber(mdl, tstSet, prbsTst, use_cuda=True, power_factor=p_best / p_ref)
            Q = util.ber2q(np.mean(ber))
            Q2 = util.ber2Q2(np.mean(ber))
            print(f'lp {lp},step {step} taps {taps_per_layer} ber {ber.item()} Q factor {Q} Q2 factor {Q2}')


def form_csv_experiment_result():
    import csv
    result_path = os.path.join(BASE_DIR, 'result_exp/LADBP/vary_lp')  # 目录
    result_name = 'eval_results_vary_lp_in_step_18'  # 名字

    result_dict = scio.loadmat(
        os.path.join(
            result_path, '{}.mat'
        ).format(result_name)
    )
    Q = result_dict['Q'].squeeze()
    ber = result_dict['ber'].squeeze()
    lp = result_dict['lp'].squeeze()
    Q2 = result_dict['Q2'].squeeze()
    with open(os.path.join(result_path, '{}.csv').format(result_name), 'w') as f:
        csv_writer = csv.writer(f)
        csv_writer.writerow(['lp', 'ber', 'Q factor', 'Q2 factor'])
        for result_indx in range(Q.shape[0]):
            csv_writer.writerow([lp[result_indx], ber[result_indx], Q[result_indx], Q2[result_indx]])

        print('transform has been done!')


if __name__ == '__main__':
    warnings.filterwarnings('ignore')
    vary_lp(lp_range=[1])
    # eval_vary_lp(lp_range=[-3, -2, -1, 0, 1, 2, 3])
    # vary_step(step_range=[3, 9, 14, 18])
    # eval_vary_step(step_range=[3, 9, 14, 18])
    # vary_tap(step_range=[18])
    # eval_vary_tap(step_range=[18])
    # form_csv_experiment_result()
