import sys
sys.path.append("../../")
from metrics import precision_recall_fscore_support
from sentiDataReader import SentiDatasetV2, Senti_domain_map
from model import accuracy_score, VanillaBert
from InstanceReweighting import InstanceReweightingV3
import pickle, torch, fitlog, random, os
from Senti_Utils import reconfig_args, obtain_domain_set, obtain_model

def pred_Logits(model:VanillaBert, data, idxs=None, batch_size=20):
    preds = []
    if idxs is None:
        idxs = list(range(len(data)))
    with torch.no_grad():
        for i in range(0, len(idxs), batch_size):
            batch_idxs = idxs[i:min(len(idxs), i+batch_size)]
            batch = data.collate_raw_batch([data[idx] for idx in batch_idxs])
            pred = model.predict(batch)
            preds.append(pred)
    pred_tensor = torch.cat(preds)
    return pred_tensor

def prediction(model:VanillaBert, data, idxs=None, batch_size=20):
    pred_tensor = pred_Logits(model, data, idxs, batch_size)
    vals, idxs = pred_tensor.sort(dim=1)
    return idxs[:, -1], vals[:, -1]

def acc_P_R_F1(y_true, y_pred):
    return accuracy_score(y_true, y_pred.cpu()), \
                precision_recall_fscore_support(y_true, y_pred.cpu())

def Perf(model:VanillaBert, data, label, idxs=None, batch_size=20):
    y_pred, _ = prediction(model, data, idxs=idxs, batch_size=batch_size)
    y_true = label[idxs] if idxs is not None else label
    return acc_P_R_F1(y_true, y_pred)

class WindTrainer(InstanceReweightingV3):
    def __init__(self, class_num, log_dir, suffix, weight_eta=0.1, lr4model=2e-2,
                 coeff4expandset=1.0, max_few_shot_size=20, Inner_BatchSize=5):
        super(WindTrainer, self).__init__(class_num, lr4model, coeff4expandset,
                                                 max_few_shot_size, Inner_BatchSize)
        self.log_dir = log_dir
        if os.path.exists(self.log_dir):
            os.system(f"rm -r {self.log_dir}")
        os.system(f"mkdir {self.log_dir}")
        fitlog.set_log_dir(self.log_dir)
        self.suffix = suffix
        self.weight_eta = weight_eta
        self.best_valid_acc = 0.0

    def obtainOptim(self, model, learning_rate=-1):
        learning_rate = self.lr4model if learning_rate == -1 else learning_rate
        optimizerGroupedParameters = [
                                         {'params': p,
                                          'lr': learning_rate * pow(0.8, 12 - int(
                                              n.split("layer.")[1].split(".", 1)[0])) if "layer." in n \
                                              else (learning_rate * pow(0.8, 13) if "embedding" in n else learning_rate)
                                          # layer-wise fine-tuning
                                          } for n, p in model.named_parameters()
                                     ]
        optim = torch.optim.Adam(optimizerGroupedParameters)
        return optim

    def MetaStep(self, model:VanillaBert, optim:torch.optim, batch,
                    weight:torch.Tensor, weight_mask, meta_step=5):
        assert hasattr(self, "few_shot_data")
        assert hasattr(self, "few_shot_data_list")
        initStateDicts = model.state_dict()
        initStateDicts = {key: initStateDicts[key].clone() for key in initStateDicts}
        for step in range(meta_step):
            u = weight.sigmoid()
            model.zero_grad()
            loss = self.LossList(model, batch)
            sumLoss = (u * loss).sum()
            sumLoss.backward()
            optim.step()
            self.val_grad_dicts, fewLoss, fewAcc = self.meanGradOnValSet(model,
                                                                         few_shot_data=self.few_shot_data,
                                                                         few_shot_data_list=self.few_shot_data_list)
            print(f"##Perf on Meta Val Set## {step} | {meta_step} :  loss/acc = {fewLoss}/{fewAcc}")
            model.load_state_dict(initStateDicts)
            u_grads = self.ComputeGrads4Weights(model, batch, self.few_shot_data, self.few_shot_data_list)
            w_grads = u_grads*u*(1-u)
            weightGrads = -1 * (w_grads / w_grads.norm(2))
            # print("uGrads:", u_grads)
            # print("wGrads:", w_grads)
            # print("weightGrads:", weightGrads)
            update = self.weight_eta * weightGrads
            weight = weight - update*(weight_mask.to(update.device))
        return weight

    def OptimStep(self, model, model_optim, batch, weight):
        loss = self.LossList(model, batch)
        sumLoss = ((weight.sigmoid()) * loss).sum()
        sumLoss.backward()
        model_optim.step()

    def dataIter(self, OOD_Set, InD_Set=None, batch_size=32):
        p_idxs = list(range(len(OOD_Set)))
        p_len = len(p_idxs)
        if InD_Set is None:
            l_len = 0
            l_idxs = []
        else:
            l_idxs = list(range(len(InD_Set)))
            l_len = len(l_idxs)
        data_size = p_len + l_len
        idxs = random.sample(range(data_size), data_size)*2
        for start_i in range(0, data_size, batch_size):
            batch_idxs = idxs[(start_i):(start_i+batch_size)]
            items = [OOD_Set[p_idxs[idx]] if idx < p_len else \
                        InD_Set[l_idxs[idx-p_len]] for idx in batch_idxs]
            yield OOD_Set.collate_raw_batch(items), batch_idxs, \
                    torch.tensor([1. if idx < p_len else 0. for idx in batch_idxs])

    def Training(self, model:VanillaBert, train_set:SentiDatasetV2, valid_set:SentiDatasetV2,
                 test_set:SentiDatasetV2, indomain_set:SentiDatasetV2=None, max_epoch=100, max_valid_every=100,
                 model_file="./tmp.pkl"):
        self.few_shot_data, self.few_shot_data_list = self.FewShotDataList(valid_set)
        weights = [0.0]*len(train_set) + \
                        ([] if indomain_set is None else [10.0]*len(indomain_set))
        self.train_set_weights = torch.tensor(weights, device=self.device)
        meta_optim = self.obtainOptim(model)
        model_optim = self.obtainOptim(model)
        test_label = test_set.labelTensor().clone()
        step = 0
        for epoch in range(max_epoch):
            for batch, indices, weight_mask in self.dataIter(train_set, indomain_set, self.batch_size):
                weights = self.train_set_weights[indices]
                new_weights = self.MetaStep(model, meta_optim, batch, weights, weight_mask)
                self.train_set_weights[indices] = new_weights
                self.OptimStep(model, model_optim, batch, new_weights)
                if (step+1) % max_valid_every == 0:
                    self.valid(model, test_set, test_label, self.suffix, step)
                step += 1
        model.save_model(model_file)

    def valid(self, model, test_set, test_label, test_suffix, step=0):
        rst_model = Perf(model, test_set, test_label)
        acc_v, (p_v, r_v, f1_v, _) = rst_model
        print("BestPerf : ".format(step), rst_model)
        output_items = [("valid_acc", acc_v)] + \
                       [('valid_prec_{}'.format(i), p_v[i]) for i in range(self.class_num)] + \
                       [('valid_recall_{}'.format(i), r_v[i]) for i in range(self.class_num)] + \
                       [('valid_f1_{}'.format(i), f1_v[i]) for i in range(self.class_num)]
        fitlog.add_metric({f"{test_suffix}": dict(output_items)}, step=step)
        fitlog.add_best_metric({f"FinalPerf_{self.suffix}": dict(output_items)})


if __name__ == "__main__":
    with open("../../args.pkl", 'rb') as fr:
        argConfig = pickle.load(fr)
    argConfig.model_dir = str(__file__).rstrip(".py")
    # Set all the seeds
    seed = argConfig.seed
    reconfig_args(argConfig)

    # See if CUDA available
    device = torch.device("cpu") if not torch.cuda.is_available() else torch.device("cuda:0")

    domainID = 2
    fewShotCnt = 100
    SentiDomainList = list(Senti_domain_map.keys())
    newDomainName = SentiDomainList[domainID-1]

    model1, tokenizer = obtain_model(args=argConfig, model_device=device)
    labeledSource, validTarget, testTarget, labeledTarget, unlabeledTarget = obtain_domain_set(newDomainName,
                                                                                               tokenizer_M=tokenizer,
                                                                                               lt_count=0)
    trainer = WindTrainer(class_num=2, log_dir=argConfig.model_dir, suffix="{}_FS{}".format(newDomainName, fewShotCnt),
                        weight_eta=0.1, lr4model=2e-5, coeff4expandset=1.0, max_few_shot_size=20, Inner_BatchSize=20)
    trainer.Training(model=model1, train_set=labeledSource, valid_set=validTarget,
                      test_set=testTarget, indomain_set=labeledTarget, max_epoch=100, max_valid_every=30,
                      model_file="./Wind.pkl")
