import logging
import argparse
import warnings

import numpy as np
import mindspore.ops as ops
from os.path import isdir
from pathlib import Path
from nequip.model import model_from_config
from nequip.utils import Config
from nequip.data import dataset_from_config
from nequip.utils import load_file
from nequip.utils.test import assert_AtomicData_equivariant
from nequip.utils.versions import check_code_version
from nequip.utils._global_options import _set_global_options
from nequip.scripts._logger import set_up_script_logger
from nequip.train.trainer_mindspore import Trainer
from nequip.data import DataLoader, AtomicData
import pickle
import mindspore
def main(default_config):
    parser = argparse.ArgumentParser(description="Train a NequIP model.")
    parser.add_argument("config", help="configuration file")
    args = parser.parse_args(args=['./E_field_magm.yaml'])
    config = Config.from_file(args.config, defaults=default_config)
    set_up_script_logger(config.get("log", None), config.verbose)

    dataset = dataset_from_config(config, prefix="dataset")

    # test dataloader
    dl_kwargs = dict()
    dataloader = DataLoader(dataset,
        shuffle=True,  # training should shuffle
        batch_size=5,
        **dl_kwargs,
    )
    '''
    for ibatch, batch in enumerate(dataloader):
        print(batch)
    '''
    # initial trainer
    # trainer = Trainer(model=None, **dict(config))
    dataset_train = dataset.index_select(np.arange(400))

    final_model = model_from_config(
        config=config, initialize=True, dataset=dataset_train
    )
    input_data = pickle.load(file=open('/mnt/c/Users/dell/Desktop/modifyNequip/input_transformed.pkl', 'rb'))
    # 需要把inputdata也改为mindspore类型
    import torch
    input_data = pickle.load(file=open('/mnt/c/Users/dell/Desktop/modifyNequip/input.pkl', 'rb'))
    input_for_ms = {}
    for key, value in input_data.items():
        if value.dtype == torch.int64 or value.dtype == torch.int32:
            dtype = mindspore.int32
        else:
            dtype = mindspore.float32
        value_transed = mindspore.Tensor(value.cpu().numpy(), dtype=dtype)
        input_for_ms[key] = value_transed

    paras = mindspore.load_checkpoint('/mnt/c/Users/dell/Desktop/modifyNequip/transformed_ckpt.ckpt')
    para_load = mindspore.load_param_into_net(final_model, paras)
    print(para_load)
    out = final_model(input_for_ms)
    print(out)


    for ibatch, batch in enumerate(dataloader):
        data_input = AtomicData.to_AtomicDataDict(batch)
        input_data = {
            k: v
            for k, v in data_input.items()
        }
        out = final_model(input_data)
        mindspore.save_checkpoint(final_model, '/mnt/c/Users/dell/Desktop/modifyNequip/ms_model_example.ckpt')
        print('#'*50)
        print(out)




    print('f')


def _check_old_keys(config) -> None:
    """check ``config`` for old/depricated keys and emit corresponding errors/warnings"""
    # compile_model
    k = "compile_model"
    if k in config:
        if config[k]:
            raise ValueError("the `compile_model` option has been removed")
        else:
            warnings.warn("the `compile_model` option has been removed")

if __name__ == "__main__":
    default_config = dict(
        root="./",
        run_name="NequIP",
        wandb=False,
        wandb_project="NequIP",
        model_builders=[
            "SimpleIrrepsConfig",
            "EnergyModel",
            "PerSpeciesRescale",
            "ForceOutput",
            "RescaleEnergyEtc",
        ],
        dataset_statistics_stride=1,
        default_dtype="float32",
        allow_tf32=False,
        verbose="INFO",
        model_debug_mode=False,
        equivariance_test=False,
        grad_anomaly_mode=False,
        append=False,
        _jit_bailout_depth=2,
        _jit_fusion_strategy=[("DYNAMIC", 3)],
    )

    main(default_config)