"""本工具集用于各类文件读写操作
"""

from typing import Any, Callable, List, Literal, Tuple, Union
from pydantic_core import to_jsonable_python
from pathlib import Path
import aiofiles
import chardet
import json
import csv


class JOSNTools:
    @staticmethod
    def read_json_file(json_file: str, encoding="utf-8") -> list[Any]:
        if not Path(json_file).exists():
            raise FileNotFoundError(f"json_file: {json_file} not exist, return []")

        with open(json_file, "r", encoding=encoding) as fin:
            try:
                data = json.load(fin)
            except Exception:
                raise ValueError(f"read json file: {json_file} failed")
        return data


    @staticmethod
    def write_json_file(json_file: str, data: list, encoding: str = None, indent: int = 4):
        folder_path = Path(json_file).parent
        if not folder_path.exists():
            folder_path.mkdir(parents=True, exist_ok=True)

        with open(json_file, "w", encoding=encoding) as fout:
            json.dump(data, fout, ensure_ascii=False, indent=indent, default=to_jsonable_python)


class CSVTools:

    @staticmethod
    def read_csv_to_list(curr_file: str, header=False, strip_trail=True):
        """
        Reads in a csv file to a list of list. If header is True, it returns a
        tuple with (header row, all rows)
        ARGS:
        curr_file: path to the current csv file.
        RETURNS:
        List of list where the component lists are the rows of the file.
        """
        print(f"start read csv: {curr_file}")
        analysis_list = []
        with open(curr_file) as f_analysis_file:
            data_reader = csv.reader(f_analysis_file, delimiter=",")
            for count, row in enumerate(data_reader):
                if strip_trail:
                    row = [i.strip() for i in row]
                analysis_list += [row]
        if not header:
            return analysis_list
        else:
            return analysis_list[0], analysis_list[1:]


class FILETools:
    @staticmethod
    async def aread(filename: str | Path, encoding="utf-8") -> str:
        """Read file asynchronously.
        异步读取文件
        """
        try:
            async with aiofiles.open(str(filename), mode="r", encoding=encoding) as reader:
                content = await reader.read()
        except UnicodeDecodeError:
            async with aiofiles.open(str(filename), mode="rb") as reader:
                raw = await reader.read()
                result = chardet.detect(raw)
                detected_encoding = result["encoding"]
                content = raw.decode(detected_encoding)
        return content

    @staticmethod
    async def awrite(filename: str | Path, data: str, encoding="utf-8"):
        """Write file asynchronously.
        异步写入文件
        """
        pathname = Path(filename)
        pathname.parent.mkdir(parents=True, exist_ok=True)
        async with aiofiles.open(str(pathname), mode="w", encoding=encoding) as writer:
            await writer.write(data)


    @staticmethod
    async def read_file_block(filename: str | Path, lineno: int, end_lineno: int) -> List:
        """异步的分块读取文件

        Args:
            filename (str | Path): 文件名称
            lineno (int): 读取起始行
            end_lineno (int): 读取结束行

        Returns:
            _type_: 多行列表
        """
        if not Path(filename).exists():
            return ""
        lines = []
        async with aiofiles.open(str(filename), mode="r") as reader:
            ix = 0
            while ix < end_lineno:
                ix += 1
                line = await reader.readline()
                if ix < lineno:
                    continue
                if ix > end_lineno:
                    break
                lines.append(line)
        return lines