import asyncio
import json
import os
import subprocess
import traceback
import uuid
from copy import deepcopy
from typing import Dict

import aiohttp
import requests
import yaml
from lucommon.logger import lu_logger

import conf
from app.settings import BASE_DIR

DELIMITER = " \\\n    "
SYSTEM_DIR = "/etc/systemd/system/"


# todo read、write file方法统一归并

def parse_kwargs(setting_path) -> dict:
    with open(setting_path, 'r') as file:
        setting = file.read()

    kwargs = json.loads(setting)

    return kwargs


def download_file(url, path):
    with requests.get(url, stream=True) as res:
        res.raise_for_status()
        with open(path, "wb") as f:
            for chunk in res.iter_content(chunk_size=8192):
                if chunk:
                    f.write(chunk)
    lu_logger.debug("download file from 「{}」 to 「{}」".format(url, path))
    return path


def write_file(path, content, mode="w"):
    with open(path, mode=mode) as f:
        f.write(content)


def read_from_json(path):
    with open(path, "r") as f:
        return json.loads(f.read())


def read_from_file(path, encoding=None):
    with open(path, "r", encoding=encoding) as f:
        return f.read()


def read_from_yaml(path):
    with open(path, 'r') as f:
        return yaml.load(f.read(), Loader=yaml.FullLoader)  # todo这里无法获取多个配置的yaml


def write_to_json(path, object):
    with open(path, "w") as f:
        f.write(json.dumps(object))


def write_to_yaml(path, object):
    with open(path, 'w') as f:
        yaml.dump(object, f, allow_unicode=True)


def _execute_by_sync(cmd: str, input: str):
    kwargs = {
        "args": cmd,
        "shell": True,
        "stdin": subprocess.PIPE,
        "stdout": subprocess.PIPE,
        "text": True,
        "universal_newlines": True
    }
    if input:
        kwargs.pop("stdin")
        kwargs["input"] = input

    proc = subprocess.run(**kwargs)  # todo 使用Popen

    return_code, stdout, stderr = proc.returncode, proc.stdout, proc.stderr
    if return_code == 0:
        lu_logger.debug("「{}」exit with code「{}」".format(cmd, return_code))
    else:
        lu_logger.warn("「{}」exit with code「{}」stderr「{}」stdout「{}」".format(cmd, return_code, stderr, stdout))

    return return_code, stdout, stderr


def execute_cmd(cmd):
    return _execute_by_sync(cmd, "")


def execute_cmd_chains(chains, type="pipe"):
    """
    :param chains: command
    :param type:
        pipe：依次执行chains中的命令，前一个命令的stdout将作为下一个命令的输入，cmd1 | cmd2 | ...
        sync：按顺序执行chains中的命令，前一个命令执行结束后执行下一个命令，cmd1 && cmd2 && ...

    :return:
    """
    return_code, stdout, stderr = 0, "", ""

    for cmd in chains:
        input = stdout if type == "pipe" else ""
        return_code, stdout, stderr = _execute_by_sync(cmd, input=input)
        if return_code != 0:
            return return_code, stdout, stderr

    return 0, stdout, stderr


def gen_static_dir() -> str:
    dir_id = str(uuid.uuid4())
    static_dir = os.path.join(BASE_DIR, "static")
    if not os.path.exists(static_dir):
        os.makedirs(static_dir)
    dir = os.path.join(static_dir, dir_id)
    os.mkdir(dir)
    return dir


def gen_tmp_file_path() -> str:
    file_name = str(uuid.uuid4())
    tmp_dir = "{}/tmp".format(BASE_DIR)
    if not os.path.exists(tmp_dir):
        os.makedirs(tmp_dir)
    tmp_file = "{}/{}".format(tmp_dir, file_name)
    return tmp_file


def get_static_url(path: any):
    # 获取这个path的下载链接,list、dict会递归遍历替换所有的path，返回全新的对象
    prefix = conf.STATIC_URL_PREFIX

    path = deepcopy(path)
    # todo 路径通过os.path处理
    if isinstance(path, str):
        path_list = path.split("/static/")
        relative_path = path_list[-1]
        return prefix + relative_path

    if isinstance(path, list):
        for i in range(len(path)):
            path[i] = get_static_url(path[i])
        return path

    if isinstance(path, dict):
        for k in path.keys():
            path[k] = get_static_url(path[k])
        return path

    return path


async def _async_request(request_id, result, **request_info):
    async with aiohttp.ClientSession() as session:
        r = {
            "status": 0,
            "headers": {},
            "body": "",
            "error": "",
        }
        try:
            async with session.request(**request_info) as resp:
                lu_logger.debug("request: {}, status: {}".format(request_id, resp.status))
                r["status"] = resp.status
                r["headers"] = dict(resp.headers)
                r["body"] = await resp.text()
        except Exception as e:
            lu_logger.error(traceback.format_exc())
            r["error"] = str(e.args)

        result[request_id] = r


def concurrence_requests(req_dict: Dict[str, Dict]) -> Dict[str, Dict]:
    """
    并发执行多个请求，等待所有请求完成后返回结果

    通过协程同时发起多个请求，返回每个请求的响应体，若请求失败，返回失败信息
    req_dict接受一个key为唯一id，值为request_info的字典
    request_info包含了请求需要的参数，url、method、params等，参考aiohttp.ClientSession().request()的参数
    """
    result = {}

    async def _run():
        coroutines = [_async_request(req_id, result, **req_info) for req_id, req_info in req_dict.items()]
        await asyncio.gather(*coroutines)

    asyncio.run(_run())

    return result


def clean_path(path) -> str:
    return os.path.normpath(path)
