from typing import List, Dict
import os
from mindspore import context, Tensor, nn, Model
from mindnlp.transformers import BertForSequenceClassification, BertTokenizer, AutoModel, AutoTokenizer, RobertaForSequenceClassification, RobertaTokenizer, AutoModelForSequenceClassification
from train import *
from dataset import get_processed_dataset
from args import args_parser
import wandb
from mindspore.dataset import GeneratorDataset
import mindspore.dataset as ds
import numpy
import random
import mindspore
from model import *

def set_seed(seed: int = 0):
    mindspore.set_seed(seed)
    numpy.random.seed(seed)
    random.seed(seed)
    os.environ["PYTHONSEED"] = str(seed)

def finetune(args):
    modelFactory = ModelFactory(args=args)
    base_model, tokenizer = modelFactory.create_base_model(args.model)
    model = modelFactory.create_small_model(base_model, args.divided_depths, args.meta_divided_depths)
    dataset: Dict[GeneratorDataset] = get_processed_dataset(args, ("glue", "mrpc"), tokenizer=tokenizer, batch_size=args.batch_size)
    
    trainUtils = Runner(args=args, model=model, dataset=dataset)
    trainUtils.do_train()

def finetune_origin_model(args):
    modelFactory = ModelFactory(args=args)
    model, tokenizer = modelFactory.create_base_model(args.model)
    dataset: Dict[GeneratorDataset] = get_processed_dataset(args, ("glue", "mrpc"), tokenizer=tokenizer, batch_size=args.batch_size)
    trainUtils = Runner(args=args, model=model, dataset=dataset)
    trainUtils.do_train()

def distill(args):
    modelFactory = ModelFactory(args=args)
    base_model, tokenizer = modelFactory.create_base_model(args.model)
    model = modelFactory.create_small_model(base_model, args.divided_depths, args.meta_divided_depths)
    dataset: Dict[GeneratorDataset] = get_processed_dataset(args, "bookcorpus", tokenizer=tokenizer, batch_size=args.batch_size, model=base_model)
    # dataset: Dict[GeneratorDataset] = get_processed_dataset(args, ("glue", "mrpc"), tokenizer=tokenizer, batch_size=args.batch_size)
    runner = Runner(args=args, model=model, dataset=dataset, base_model=base_model)
    runner.do_train()

def offsite_tuning(args):
    modelFactory = ModelFactory(args=args)
    base_model, tokenizer = modelFactory.create_base_model(args.model)
    model = modelFactory.create_small_model(base_model, args.divided_depths, args.meta_divided_depths)
    dataset: Dict[GeneratorDataset] = get_processed_dataset(args, ("glue", "mrpc"), tokenizer=tokenizer, batch_size=args.batch_size)
    trainUtils = Runner(args=args, model=model, base_model=base_model, dataset=dataset)
    trainUtils.do_train()
    trainUtils.do_test()

def incub(args):
    modelFactory = ModelFactory(args=args)
    integrated_model_generator, tokenizer = modelFactory.create_integrated_model()
    dataset: Dict[GeneratorDataset] = get_processed_dataset(args, ("glue", "mrpc"), tokenizer=tokenizer, batch_size=args.batch_size)
    for i, integrated_model in enumerate(integrated_model_generator):
        trainUtils = Runner(args=args, model=integrated_model, dataset=dataset, idx=i)
        trainUtils.do_train()
    incubated_base_model = modelFactory.create_incubated_base_model(args.model)
    trainUtils = Runner(args=args, model=incubated_base_model, dataset=dataset)
    trainUtils.do_test()

def infer(args):
    print("------------------phase: INFER------------------")
    modelFactory = ModelFactory(args)
    client_model, server_model, tokenizer = modelFactory.create_divided_model()
    trainUtils = Runner(args=args, model=client_model, server_model=server_model, tokenizer=tokenizer, dataset=None)
    trainUtils.do_infer()

def main(): 
    # init
    args = args_parser()

    set_seed(args.seed)
    project_name = "MS_FL_LLM"
    # model_path = "roberta-base"
    # model_path = "bert-base-uncased"
    model_path = args.model
    run_name = f"{args.phase}_{model_path}_mrpc"
    wandb.init(project=project_name, name=run_name, config=args)
    context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU", device_id=args.device) # 罪魁祸首，导致报错有多余的输入input_ids，将模式设置为PYNATIVE_MODE就可以了

    # run
    if args.phase == 'DT': # distill phase
        distill(args)
    elif args.phase == 'OT':
        offsite_tuning(args)
    elif args.phase == 'FT':
        finetune(args)
    elif args.phase == 'PFT':
        finetune_origin_model(args)
    elif args.phase == 'ICB':
        incub(args)
    elif args.phase == 'INFER':
        infer(args)
    else:
        raise NotImplementedError

if __name__ == '__main__':
    main()