# serialization.py
"""
安全的序列化 + 压缩工具，支持单对象与批量流式 I/O

功能：
  - dumpsx / loadsx: 序列化 + 压缩 → bytes / 从 bytes 反序列化（内存操作）
  - dumpx / loadx: 写入文件 / 从文件读取（单对象）
  - dumpx_bulk / loadx_iter: 批量流式写入/读取多个对象
  - compress / decompress: 纯 zstd 压缩（带 header）

特性：
  - 线程安全：压缩器/解压器线程本地存储
  - 内存安全：限制最大解压大小，防 zstd bomb
  - 安全警告：使用 pickle 时提醒反序列化风险
  - 流式处理：loadx_iter 支持生成器，内存友好
  - 自动压缩判断、原子写入、异常处理完善

⚠️ 安全警告：
  - 所有反序列化操作（loadsx, loadx, loadx_iter）使用 pickle.loads()，存在代码执行风险！
  - 仅用于可信数据源，禁止用于网络输入、用户上传等不可信场景。
  - 推荐用于内部服务通信、Redis 缓存、日志存储等可信环境。

依赖：
    pip install zstandard

导出函数：
    - dumpsx: 序列化 + 压缩 → bytes
    - loadsx: 解压 + 反序列化 ← bytes
    - dumpx: 序列化 + 压缩 → 文件
    - loadx: 解压 + 反序列化 ← 文件
    - dumpx_bulk: 批量写入多个对象到文件
    - loadx_iter: 从文件逐个读取对象（生成器）
    - compress: 仅压缩（带 header）
    - decompress: 仅解压（带 header）

命名说明：
    dumpsx / loadsx 类比 pickle.dumps / pickle.loads
    dumpx / loadx 类比 pickle.dump / pickle.load
    dumpx_bulk / loadx_iter 表示批量和迭代读取
"""

import os
import pickle
import struct
import tempfile
import threading
import warnings
from typing import Any, Optional, Callable, Iterable, Generator
import zstandard as zstd
from logging import getLogger
logger = getLogger(__name__)

__all__ = [
    'xdumps',
    'xloads',
    'xdump',
    'xload',
    'xdump_iter',
    'xload_bult',
    'compress',
    'decompress',
]
__version__ = "1.5.0"


# ==============================
# 默认配置
# ==============================

_DEFAULT_COMPRESS_THRESHOLD: int = 1024
_DEFAULT_MAX_DECOMPRESSED_SIZE: int = 100 * 1024 * 1024  # 100MB
_DEFAULT_ZSTD_LEVEL: int = 3
_PICKLE_PROTOCOL: int = pickle.HIGHEST_PROTOCOL


# ==============================
# 线程本地存储
# ==============================

_tls = threading.local()


# ==============================
# 一次性安全警告控制（线程安全）
# ==============================

_pickle_warning_shown = False
_pickle_warning_lock = threading.Lock()


# ==============================
# 工具函数
# ==============================

def _get_compressor(level: int = _DEFAULT_ZSTD_LEVEL) -> zstd.ZstdCompressor:
    """获取线程本地压缩器实例"""
    if not hasattr(_tls, 'compressors'):
        _tls.compressors = {}
    if level not in _tls.compressors:
        _tls.compressors[level] = zstd.ZstdCompressor(level=level)
    return _tls.compressors[level]


def _get_decompressor() -> zstd.ZstdDecompressor:
    """获取线程本地解压器实例"""
    if not hasattr(_tls, 'decompressor'):
        _tls.decompressor = zstd.ZstdDecompressor()
    return _tls.decompressor


def _write_bytes_atomic(data: bytes, filepath: str) -> None:
    """原子写入 bytes 到文件"""
    dir_path = os.path.dirname(filepath) or '.'
    os.makedirs(dir_path, exist_ok=True)
    with tempfile.NamedTemporaryFile('wb', dir=dir_path, delete=False) as f:
        temp_path = f.name
        f.write(data)
    try:
        os.replace(temp_path, filepath)
    except:
        os.unlink(temp_path)
        raise


def _read_file_bytes(filepath: str) -> bytes:
    """安全读取文件为 bytes"""
    if not os.path.exists(filepath):
        raise FileNotFoundError(f"File not found: {filepath}")
    try:
        with open(filepath, 'rb') as f:
            return f.read()
    except Exception as e:
        raise IOError(f"Failed to read file {filepath}: {str(e)}") from e


def is_pickleable(obj: Any) -> bool:
    """判断对象是否可被 pickle 序列化（用于多进程模式）"""
    try:
        pickle.dumps(obj)
        return True
    except Exception as e:
        logger.warning(f"pickle 检查异常: {e}", exc_info=True)
        return False

# ==============================
# 核心压缩接口
# ==============================

def compress(
    data: bytes,
    level: int = _DEFAULT_ZSTD_LEVEL,
    force: bool = False,
    threshold: int = _DEFAULT_COMPRESS_THRESHOLD
) -> bytes:
    """
    压缩二进制数据（线程安全）

    Args:
        data: 输入二进制数据
        level: Zstd 压缩级别（1-22）
        force: 是否跳过阈值检查，强制压缩
        threshold: 自动压缩的阈值（字节）

    Returns:
        bytes: 压缩后的数据（以 0x01 开头）或原始数据（以 0x00 开头）
    """
    if not force and len(data) < threshold:
        return b'\x00' + data

    compressor = _get_compressor(level)
    compressed = compressor.compress(data)
    return b'\x01' + (compressed if len(compressed) < len(data) else data)


def decompress(
    data: bytes,
    max_size: int = _DEFAULT_MAX_DECOMPRESSED_SIZE
) -> bytes:
    """
    解压缩数据（线程安全）

    Args:
        data: 带 header 的压缩数据（0x00=未压缩, 0x01=已压缩）
        max_size: 最大允许解压后的大小（字节）

    Returns:
        bytes: 解压后的原始数据
    """
    if not isinstance(data, bytes):
        raise TypeError(f"Expected bytes, got {type(data).__name__}")
    if len(data) < 1:
        raise ValueError("Data is empty")

    header = data[0]
    payload = data[1:]

    if header == 0x00:
        return payload
    elif header == 0x01:
        decompressor = _get_decompressor()
        try:
            return decompressor.decompress(payload, max_output_size=max_size)
        except zstd.ZstdError as e:
            err_msg = str(e)
            if "output size too large" in err_msg or "exceeds maximum" in err_msg:
                raise ValueError(f"Decompressed data exceeds {max_size} bytes") from e
            raise RuntimeError(f"Zstd decompression failed: {err_msg}") from e
        except Exception as e:
            raise RuntimeError(f"Decompression failed: {str(e)}") from e
    else:
        raise ValueError(f"Invalid header byte: 0x{header:02x}")


# ==============================
# 高级序列化接口（单对象）
# ==============================

def xdumps(
    obj: Any,
    *,
    protocol: int = _PICKLE_PROTOCOL,
    compress: Optional[bool] = None,
    compress_threshold: int = _DEFAULT_COMPRESS_THRESHOLD,
    zstd_level: int = _DEFAULT_ZSTD_LEVEL,
) -> bytes:
    """
    序列化并压缩 Python 对象 → bytes（线程安全）

    Args:
        obj: 要序列化的对象
        protocol: pickle 协议
        compress: 是否压缩（None 表示自动判断）
        compress_threshold: 自动压缩的阈值
        zstd_level: zstd 压缩级别

    Returns:
        bytes: 压缩后的数据（带 header）
    """
    try:
        serialized: bytes = pickle.dumps(obj, protocol=protocol)
    except (pickle.PicklingError, TypeError) as e:
        raise ValueError(f"Object is not serializable: {str(e)}") from e

    should_compress = (
        True if compress is True else
        False if compress is False else
        len(serialized) >= compress_threshold
    )

    if not should_compress:
        return b'\x00' + serialized
    else:
        compressor = _get_compressor(zstd_level)
        try:
            compressed: bytes = compressor.compress(serialized)
            return b'\x01' + (compressed if len(compressed) < len(serialized) else serialized)
        except Exception as e:
            raise RuntimeError(f"Compression failed: {str(e)}") from e


def xloads(
    data: bytes,
    *,
    max_size: int = _DEFAULT_MAX_DECOMPRESSED_SIZE,
    pickle_loads: Callable[[bytes], Any] = pickle.loads
) -> Any:
    """
    从 bytes 解压并反序列化对象（线程安全）

    Args:
        data: 压缩后的数据（带 header）
        max_size: 最大允许解压后大小
        pickle_loads: 自定义反序列化函数（用于沙箱等）

    Returns:
        Any: 反序列化后的对象
    """
    # 一次性安全警告
    global _pickle_warning_shown
    if not _pickle_warning_shown:
        with _pickle_warning_lock:
            if not _pickle_warning_shown:
                warnings.warn(
                    "Unpickling data from untrusted sources is dangerous! Only use with trusted data.",
                    RuntimeWarning,
                    stacklevel=2
                )
                _pickle_warning_shown = True
    try:
        decompressed: bytes = decompress(data, max_size=max_size)
        return pickle_loads(decompressed)
    except ValueError:
        raise
    except pickle.UnpicklingError as e:
        raise ValueError(f"Deserialization failed: {str(e)}") from e
    except Exception as e:
        raise RuntimeError(f"Decode failed: {str(e)}") from e


def xdump(
    obj: Any,
    filepath: str,
    *,
    protocol: int = _PICKLE_PROTOCOL,
    compress: Optional[bool] = None,
    compress_threshold: int = _DEFAULT_COMPRESS_THRESHOLD,
    zstd_level: int = _DEFAULT_ZSTD_LEVEL,
) -> None:
    """
    将对象序列化并压缩后写入文件

    Args:
        obj: 要保存的对象
        filepath: 输出文件路径
        其他参数同 dumpsx
    """
    data = xdumps(
        obj,
        protocol=protocol,
        compress=compress,
        compress_threshold=compress_threshold,
        zstd_level=zstd_level
    )
    try:
        _write_bytes_atomic(data, filepath)
    except Exception as e:
        raise RuntimeError(f"Failed to save data to {filepath}: {str(e)}") from e


def xload(
    filepath: str,
    *,
    max_size: int = _DEFAULT_MAX_DECOMPRESSED_SIZE,
    pickle_loads: Callable[[bytes], Any] = pickle.loads
) -> Any:
    """
    从文件加载并反序列化对象

    Args:
        filepath: 输入文件路径
        max_size: 最大解压大小
        pickle_loads: 自定义反序列化函数

    Returns:
        Any: 反序列化后的对象
    """
    data = _read_file_bytes(filepath)
    return xloads(data, max_size=max_size, pickle_loads=pickle_loads)


# ==============================
# 批量流式 I/O 接口
# ==============================

def xdump_iter(
    objs: Iterable[Any],
    filepath: str,
    *,
    protocol: int = _PICKLE_PROTOCOL,
    zstd_level: int = _DEFAULT_ZSTD_LEVEL,
    buffer_size: int = 8192,
) -> None:
    """
    批量写入多个对象到文件（每条独立压缩）

    格式：| uint32_len (大端) | compressed_chunk |

    Args:
        objs: 可迭代对象序列
        filepath: 输出文件路径
        protocol: pickle 协议
        zstd_level: zstd 压缩级别
        buffer_size: 文件缓冲区大小
    """
    compressor = _get_compressor(zstd_level)
    dir_path = os.path.dirname(filepath) or '.'
    os.makedirs(dir_path, exist_ok=True)

    with open(filepath, 'wb', buffering=buffer_size) as f:
        for obj in objs:
            try:
                serialized = pickle.dumps(obj, protocol=protocol)
                compressed = compressor.compress(serialized)
                f.write(struct.pack('>I', len(compressed)))
                f.write(compressed)
            except (pickle.PicklingError, TypeError) as e:
                raise ValueError(f"Serialization failed: {obj!r}") from e
            except Exception as e:
                raise IOError(f"Write failed: {str(e)}") from e


def xload_bult(
    filepath: str,
    *,
    max_item_size: int = _DEFAULT_MAX_DECOMPRESSED_SIZE,
) -> Generator[Any, None, None]:
    """
    从文件逐个读取对象（生成器，流式处理）

    Args:
        filepath: 输入文件路径
        max_item_size: 单条最大解压大小

    Yields:
        Any: 反序列化后的对象
    """
    if not os.path.exists(filepath):
        raise FileNotFoundError(f"File not found: {filepath}")

    decompressor = _get_decompressor()

    with open(filepath, 'rb') as f:
        while True:
            size_bytes = f.read(4)
            if len(size_bytes) == 0:
                break
            if len(size_bytes) < 4:
                raise ValueError("Corrupted file: incomplete size header")

            compressed_size = struct.unpack('>I', size_bytes)[0]
            compressed_data = f.read(compressed_size) if compressed_size > 0 else b''

            if len(compressed_data) != compressed_size:
                raise ValueError("Corrupted file: incomplete data chunk")

            try:
                serialized = decompressor.decompress(compressed_data, max_output_size=max_item_size)
                obj = pickle.loads(serialized)
                yield obj
            except zstd.ZstdError as e:
                if "output size too large" in str(e):
                    raise ValueError(f"Item exceeds {max_item_size} bytes") from e
                raise RuntimeError(f"Decompression failed: {str(e)}") from e
            except pickle.UnpicklingError as e:
                raise ValueError(f"Deserialization failed: {str(e)}") from e
            except Exception as e:
                raise RuntimeError(f"Load failed: {str(e)}") from e



