"""
@Author    : ghenyar
@Time      : 2025/8/28 13:16
@File      : utils
@Desc      : 
"""
import base64
import hashlib
import logging
import os
from datetime import datetime
from pathlib import Path
from typing import AsyncIterable, Iterable

from starlette.responses import StreamingResponse


def determine_part_size_internal(total_size: int, preferred_size: int):
    """
    确定分片大小
    :param total_size: 总共需要上传的长度(文件大小)
    :param preferred_size:  用户期望的分片大小
    :return: 分片大小
    """
    max_count = 1000  # 最大分片的个数
    min_part_size = 100 * 1024
    if total_size < preferred_size:
        return total_size

    while preferred_size * max_count < total_size or preferred_size < min_part_size:
        preferred_size = preferred_size * 2

    return preferred_size


def content_md5(file_path: str):
    """
    文件数据校验
    :param file_path: 文件路径
    :return: MD5
    """
    buf_size = 8192
    with open(file_path, 'rb') as fp:
        md5 = hashlib.md5()
        while True:
            bytes_to_read = buf_size
            buf = fp.read(bytes_to_read)
            if not buf:
                break
            md5.update(buf)
        md5_str = base64.standard_b64encode(md5.digest())
    return md5_str


def get_file_start_end(range_header: str, file_size: int):
    """
    :param range_header: range
    :param file_size: 文件大小
    :return:
    """
    chunk_size = 1024 * 1024 * 2  # 2MB
    # 默认情况下，下载第一个块
    start, end = 0, chunk_size - 1
    if range_header:
        # 解析 range_header，假设格式为 "bytes=start-end"
        range_values = range_header.replace("bytes=", "").split("-")
        start = int(range_values[0]) if range_values[0] else 0
        end = int(range_values[1]) if range_values[1] else start + chunk_size - 1

    # 确保 end 不超过文件大小
    end = min(end, file_size - 1)

    # 确保块大小不超过 chunk_size
    if end - start + 1 > chunk_size:
        end = start + chunk_size - 1

    return start, end


def stream_file(fileName: str, stream: AsyncIterable[str | bytes | memoryview] | Iterable[str | bytes | memoryview], header: dict = None):
    """
    处理流式
    :param fileName: 文件名
    :param stream: 文件流式
    :param header: 头部
    :return:
    """
    file_extension = Path(fileName).suffix
    media_type = "application/octet-stream"
    if file_extension == ".svg":
        media_type = "image/svg+xml"
    headers = {"Content-Disposition": f"attachment; filename={fileName}", "Accept-Ranges": "bytes"}
    if header is not None:
        headers.update(header)
        # 确保流中的每个元素都是字节对象

    def byte_stream():
        for chunk in stream:
            if isinstance(chunk, str):
                yield chunk.encode("utf-8")  # 假设使用 utf-8 编码
            elif isinstance(chunk, bytes):
                yield chunk

    return StreamingResponse(
        byte_stream(),
        media_type = media_type,
        headers = headers,
        status_code = 206
    )


def logger(name: str, message: str):
    # 创建一个日志器
    log = logging.getLogger(name)
    # 日志文件的名称
    date = datetime.now().strftime("%Y_%m_%d")
    file = f"log_{date}.log"
    # 日志文件夹
    folder_name = "logs"
    # 检查 log 文件夹是否存在，如果不存在则创建它
    if not os.path.exists(folder_name):
        os.mkdir(folder_name)
    # 创建一个处理器，用于写入日志文件
    handler = logging.FileHandler(os.path.join(folder_name, file))
    # 创建一个格式器，用于添加日志的格式
    fmt = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
    formatter = logging.Formatter(fmt)
    # 将格式器添加到处理器
    handler.setFormatter(formatter)
    # 将处理器添加到日志器
    log.addHandler(handler)
    log.setLevel(logging.ERROR)
    log.error(message)