"""HDFS客户端（历史数据持久化）

提供与HDFS交互的客户端接口，用于存储和读取历史数据。
支持文件上传、下载、目录创建、列表查询等基本操作。
"""
import os
from typing import List, Optional
import pyarrow as pa
import pyarrow.parquet as pq
from hdfs import InsecureClient

from fin_senti_entity_platform.utils.logger import get_logger
from fin_senti_entity_platform.utils.config_loader import ConfigLoader


class HDFSClient:
    """HDFS客户端类，封装HDFS操作的核心功能"""
    _instance = None
    _lock = False
    
    def __new__(cls):
        """单例模式实现"""
        if cls._instance is None:
            if cls._lock:
                raise Exception("正在创建HDFSClient实例，请稍后再试")
            cls._lock = True
            cls._instance = super(HDFSClient, cls).__new__(cls)
            cls._lock = False
        return cls._instance
    
    def __init__(self):
        """初始化HDFS客户端"""
        if not hasattr(self, '_initialized'):
            self.config = ConfigLoader().get_config()
            self.logger = get_logger(__name__)
            self._init_client()
            self._initialized = True
    
    def _init_client(self):
        """初始化HDFS客户端连接"""
        try:
            hdfs_config = self.config.get('data_storage', {}).get('hdfs', {})
            self.client = InsecureClient(
                url=hdfs_config.get('url', 'http://localhost:50070'),
                user=hdfs_config.get('user', 'hadoop')
            )
            self.base_path = hdfs_config.get('base_path', '/fin_senti_data')
            self.logger.info(f"成功初始化HDFS客户端，连接到: {hdfs_config.get('url')}")
        except Exception as e:
            self.logger.error(f"初始化HDFS客户端失败: {str(e)}")
            raise
    
    def create_directory(self, directory_path: str) -> bool:
        """创建HDFS目录
        
        Args:
            directory_path: 要创建的目录路径（相对于base_path）
        
        Returns:
            bool: 是否创建成功
        """
        try:
            full_path = os.path.join(self.base_path, directory_path).replace('\\', '/')
            if not self.client.status(full_path, strict=False):
                self.client.makedirs(full_path)
                self.logger.info(f"成功创建HDFS目录: {full_path}")
            return True
        except Exception as e:
            self.logger.error(f"创建HDFS目录失败: {str(e)}")
            return False
    
    def upload_file(self, local_path: str, hdfs_path: str, overwrite: bool = False) -> bool:
        """上传本地文件到HDFS
        
        Args:
            local_path: 本地文件路径
            hdfs_path: HDFS文件路径（相对于base_path）
            overwrite: 是否覆盖已存在的文件
        
        Returns:
            bool: 是否上传成功
        """
        try:
            full_hdfs_path = os.path.join(self.base_path, hdfs_path).replace('\\', '/')
            
            # 确保目标目录存在
            hdfs_dir = os.path.dirname(full_hdfs_path)
            self.create_directory(hdfs_dir.lstrip(self.base_path).lstrip('/'))
            
            self.client.upload(
                hdfs_path=full_hdfs_path,
                local_path=local_path,
                overwrite=overwrite
            )
            self.logger.info(f"成功上传文件: {local_path} -> {full_hdfs_path}")
            return True
        except Exception as e:
            self.logger.error(f"上传文件失败: {str(e)}")
            return False
    
    def download_file(self, hdfs_path: str, local_path: str, overwrite: bool = False) -> bool:
        """从HDFS下载文件到本地
        
        Args:
            hdfs_path: HDFS文件路径（相对于base_path）
            local_path: 本地文件路径
            overwrite: 是否覆盖已存在的文件
        
        Returns:
            bool: 是否下载成功
        """
        try:
            full_hdfs_path = os.path.join(self.base_path, hdfs_path).replace('\\', '/')
            
            # 确保本地目录存在
            local_dir = os.path.dirname(local_path)
            if not os.path.exists(local_dir):
                os.makedirs(local_dir)
            
            self.client.download(
                hdfs_path=full_hdfs_path,
                local_path=local_path,
                overwrite=overwrite
            )
            self.logger.info(f"成功下载文件: {full_hdfs_path} -> {local_path}")
            return True
        except Exception as e:
            self.logger.error(f"下载文件失败: {str(e)}")
            return False
    
    def list_files(self, directory_path: str = '') -> List[str]:
        """列出HDFS目录下的文件
        
        Args:
            directory_path: 要列出的目录路径（相对于base_path）
        
        Returns:
            List[str]: 文件路径列表
        """
        try:
            full_path = os.path.join(self.base_path, directory_path).replace('\\', '/')
            return self.client.list(full_path)
        except Exception as e:
            self.logger.error(f"列出目录文件失败: {str(e)}")
            return []
    
    def write_dataframe(self, df: pa.Table, hdfs_path: str, partition_cols: List[str] = None) -> bool:
        """将Arrow DataFrame写入HDFS（Parquet格式）
        
        Args:
            df: Arrow DataFrame
            hdfs_path: HDFS文件路径（相对于base_path）
            partition_cols: 分区列列表
        
        Returns:
            bool: 是否写入成功
        """
        try:
            full_hdfs_path = os.path.join(self.base_path, hdfs_path).replace('\\', '/')
            
            # 确保目标目录存在
            hdfs_dir = os.path.dirname(full_hdfs_path)
            self.create_directory(hdfs_dir.lstrip(self.base_path).lstrip('/'))
            
            # 使用pyarrow写入Parquet文件到HDFS
            with self.client.write(full_hdfs_path) as f:
                pq.write_table(df, f)
            
            self.logger.info(f"成功写入DataFrame到HDFS: {full_hdfs_path}")
            return True
        except Exception as e:
            self.logger.error(f"写入DataFrame失败: {str(e)}")
            return False
    
    def read_dataframe(self, hdfs_path: str) -> Optional[pa.Table]:
        """从HDFS读取Parquet文件为Arrow DataFrame
        
        Args:
            hdfs_path: HDFS文件路径（相对于base_path）
        
        Returns:
            Optional[pa.Table]: Arrow DataFrame或None
        """
        try:
            full_hdfs_path = os.path.join(self.base_path, hdfs_path).replace('\\', '/')
            
            with self.client.read(full_hdfs_path) as f:
                df = pq.read_table(f)
            
            self.logger.info(f"成功读取HDFS文件: {full_hdfs_path}")
            return df
        except Exception as e:
            self.logger.error(f"读取文件失败: {str(e)}")
            return None
    
    def check_file_exists(self, hdfs_path: str) -> bool:
        """检查HDFS文件是否存在
        
        Args:
            hdfs_path: HDFS文件路径（相对于base_path）
        
        Returns:
            bool: 文件是否存在
        """
        try:
            full_hdfs_path = os.path.join(self.base_path, hdfs_path).replace('\\', '/')
            return self.client.status(full_hdfs_path, strict=False) is not None
        except Exception:
            return False
    
    def delete_file(self, hdfs_path: str) -> bool:
        """删除HDFS文件
        
        Args:
            hdfs_path: HDFS文件路径（相对于base_path）
        
        Returns:
            bool: 是否删除成功
        """
        try:
            full_hdfs_path = os.path.join(self.base_path, hdfs_path).replace('\\', '/')
            if self.check_file_exists(hdfs_path):
                self.client.delete(full_hdfs_path)
                self.logger.info(f"成功删除HDFS文件: {full_hdfs_path}")
            return True
        except Exception as e:
            self.logger.error(f"删除文件失败: {str(e)}")
            return False


# 全局实例
hdfs_client = HDFSClient()
"""\HDFS客户端全局实例"""