import numpy as np
import os
import collections
from os.path import dirname, abspath, join
from copy import deepcopy
from sacred import Experiment, SETTINGS
from sacred.observers import FileStorageObserver
from sacred.utils import apply_backspaces_and_linefeeds
import sys
import torch as th
from utils.logging import get_logger
import yaml
import warnings
from typing import Dict, Any, List
import pickle
import json
import gc
import torch
from tqdm import tqdm
from os.path import join
from run import run

# 抑制不必要的警告
warnings.filterwarnings("ignore", category=UserWarning)



# Sacred 设置
SETTINGS['CAPTURE_MODE'] = "sys"
logger = get_logger()

# 实验初始化
ex = Experiment("pymarl", save_git_info=False)
ex.logger = logger
ex.captured_out_filter = apply_backspaces_and_linefeeds

# 结果路径
results_path = join(dirname(dirname(abspath(__file__))), "results")




def convert_and_save_to_json(pkl_dir, json_dir, _log, batch_size=100):
    def _ensure_serializable(data):
        if isinstance(data, (np.ndarray, torch.Tensor)):
            return data.tolist()
        elif isinstance(data, (list, tuple)):
            return [_ensure_serializable(x) for x in data]
        elif isinstance(data, (int, float, str, bool)):
            return data
        return str(data)

    if not os.path.exists(pkl_dir):
        _log.error(f"PKL目录不存在: {pkl_dir}")
        return

    os.makedirs(json_dir, exist_ok=True)
    pkl_files = [f for f in os.listdir(pkl_dir) if f.endswith(".pkl")]

    if not pkl_files:
        _log.error(f" PKL目录为空: {pkl_dir}")
        return

    _log.info(f"发现 {len(pkl_files)} 个 .pkl 文件，开始转换...")
    success = 0

    for filename in tqdm(pkl_files, desc="转换中"):
        try:
            with open(join(pkl_dir, filename), "rb") as f:
                data = pickle.load(f)

            episode = []
            for agent_traj in data:
                traj = []
                for step in agent_traj:
                    traj.append([
                        _ensure_serializable(step[0]),  # state
                        _ensure_serializable(step[1]),  # obs
                        [int(step[2][0])] if isinstance(step[2], list) else [int(step[2])],
                        [float(step[3][0])] if isinstance(step[3], list) else [float(step[3])],
                        bool(step[4]),
                        _ensure_serializable(step[5]),  # avail
                        [step[6][0]] if len(step) > 6 else [0],
                        [step[7][0]] if len(step) > 7 else [1]
                    ])
                episode.append(traj)

            json_path = join(json_dir, filename.replace(".pkl", ".json"))
            with open(json_path, "w") as f:
                json.dump(episode, f, indent=2)
            success += 1
        except Exception as e:
            _log.error(f"❌ 转换失败 {filename}: {e}")

    _log.info(f"转换完成: {success}/{len(pkl_files)} 文件")
    _log.info(f"JSON文件保存目录: {json_dir}")



@ex.main
def my_main(_run, _config: Dict, _log):
    """主实验函数"""
    config = config_copy(_config)
    np.random.seed(config["seed"])
    th.manual_seed(config["seed"])
    config['env_args']['seed'] = config["seed"]

    # 运行主要训练流程
    run(_run, config, _log)

    # === 固定路径转换 ===
    pkl_dir = "C:/Users/lenovo/Desktop/multi-agent-avoidance/pymarl/src/offline_data"
    json_dir = "C:/Users/lenovo/Desktop/multi-agent-avoidance/offline_data/2s3z/good"

    _log.info(f"转换路径：\n PKL目录 = {pkl_dir}\n JSON输出 = {json_dir}")
    convert_and_save_to_json(pkl_dir, json_dir, _log)


def _get_config(params, arg_name, subfolder):
    config_name = None
    for _i, _v in enumerate(params):
        if _v.split("=")[0] == arg_name:
            config_name = _v.split("=")[1]
            del params[_i]
            break

    if config_name is not None:
        with open(os.path.join(os.path.dirname(__file__), "config", subfolder, "{}.yaml".format(config_name)), "r", encoding="utf-8") as f:
            try:
                config_dict = yaml.safe_load(f)  # 安全加载方式
            except yaml.YAMLError as exc:
                assert False, "{}.yaml error: {}".format(config_name, exc)
        return config_dict


def recursive_dict_update(d, u):
    for k, v in u.items():
        if isinstance(v, collections.Mapping):
            d[k] = recursive_dict_update(d.get(k, {}), v)
        else:
            d[k] = v
    return d


def config_copy(config):
    if isinstance(config, dict):
        return {k: config_copy(v) for k, v in config.items()}
    elif isinstance(config, list):
        return [config_copy(v) for v in config]
    else:
        return deepcopy(config)


if __name__ == '__main__':
    params = deepcopy(sys.argv)

    # 加载默认配置
    with open(join(os.path.dirname(__file__), "config", "default.yaml"), "r") as f:
        try:
            config_dict = yaml.safe_load(f)
        except yaml.YAMLError as exc:
            raise ValueError(f"default.yaml error: {exc}")

    # 合并环境与算法配置
    env_config = _get_config(params, "--env-config", "envs")
    alg_config = _get_config(params, "--config", "algs")
    config_dict = recursive_dict_update(config_dict, env_config)
    config_dict = recursive_dict_update(config_dict, alg_config)

    # 添加Sacred配置
    ex.add_config(config_dict)

    # 设置文件观察者
    file_obs_path = join(results_path, "sacred")
    ex.observers.append(FileStorageObserver.create(file_obs_path))

    ex.run_commandline(params)