from typing import List, Optional, Dict, Any
from sqlalchemy.orm import Session
from sqlalchemy import and_, or_, desc, asc
from models.share_189_data import Share189Data
from database import db
import json
from datetime import datetime

class Share189Repository:
    """189分享码数据访问层"""
    
    def __init__(self):
        pass
    
    def _get_session(self) -> Session:
        """获取数据库会话"""
        return db.get_session()
    
    def close_session(self):
        """关闭数据库会话（已废弃，使用上下文管理器）"""
        pass
    
    def create(self, share_data: Dict[str, Any]) -> Share189Data:
        """创建分享码记录"""
        session = self._get_session()
        try:
            share_obj = Share189Data(
                share_code=share_data['shareCode'],
                access_code=share_data.get('accessCode'),
                description=share_data.get('description'),
                status=0,  # 待解析
                import_status=0  # 待导入
            )
            session.add(share_obj)
            session.commit()
            session.refresh(share_obj)
            return share_obj
        except Exception as e:
            session.rollback()
            raise e
        finally:
            session.close()
    
    def batch_create(self, share_items: List[Dict[str, Any]]) -> Dict[str, Any]:
        """批量创建分享码记录"""
        session = self._get_session()
        try:
            added_count = 0
            duplicate_count = 0
            share_ids = []
            details = []
            
            for item in share_items:
                share_code = item['shareCode']
                
                # 使用原生SQL检查是否已存在
                from sqlalchemy import text
                result = session.execute(
                    text("SELECT id FROM share_189_data WHERE share_code = :share_code LIMIT 1"),
                    {"share_code": share_code}
                ).fetchone()
                existing = result is not None
                
                if existing:
                    duplicate_count += 1
                    details.append({
                        'shareCode': share_code,
                        'shareId': None,
                        'status': 'duplicate',
                        'message': '分享码已存在'
                    })
                else:
                    share_obj = Share189Data(
                        share_code=share_code,
                        access_code=item.get('accessCode'),
                        status=0,
                        import_status=0
                    )
                    session.add(share_obj)
                    session.flush()  # 获取ID但不提交
                    
                    added_count += 1
                    share_ids.append(share_obj.id)
                    details.append({
                        'shareCode': share_code,
                        'shareId': share_obj.id,
                        'status': 'added',
                        'message': '添加成功'
                    })
            
            session.commit()
            
            return {
                'addedCount': added_count,
                'duplicateCount': duplicate_count,
                'shareIds': share_ids,
                'details': details
            }
        except Exception as e:
            session.rollback()
            raise e
    
    def get_by_id(self, share_id: int) -> Optional[Share189Data]:
        """根据ID获取分享码记录"""
        session = self._get_session()
        return session.query(Share189Data).filter(
            Share189Data.id == share_id
        ).first()
    
    def get_by_share_code(self, share_code: str) -> Optional[Share189Data]:
        """根据分享码获取记录"""
        session = self._get_session()
        return session.query(Share189Data).filter(
            Share189Data.share_code == share_code
        ).first()
    
    def get_list(self, status: Optional[int] = None, keyword: Optional[str] = None, 
                 page: int = 1, page_size: int = 20) -> Dict[str, Any]:
        """获取分享码列表"""
        session = self._get_session()
        try:
            from sqlalchemy import func
            
            # 构建基础查询
            base_query = session.query(Share189Data)
            
            # 状态筛选
            if status is not None:
                base_query = base_query.filter(Share189Data.status == status)
            
            # 关键词搜索
            if keyword:
                keyword = f"%{keyword}%"
                base_query = base_query.filter(
                    or_(
                        Share189Data.share_code.like(keyword),
                        Share189Data.description.like(keyword),
                        Share189Data.file_name.like(keyword)
                    )
                )
            
            # 先获取总数 - 使用简单计数
            from sqlalchemy import text
            count_sql = "SELECT COUNT(*) FROM share_189_data"
            if status is not None:
                count_sql += f" WHERE status = {status}"
            if keyword:
                if status is not None:
                    count_sql += f" AND (share_code LIKE '%{keyword}%' OR file_name LIKE '%{keyword}%')"
                else:
                    count_sql += f" WHERE (share_code LIKE '%{keyword}%' OR file_name LIKE '%{keyword}%')"
            
            total = session.execute(text(count_sql)).scalar()
            
            # 分页查询 - 使用原生SQL
            offset = (page - 1) * page_size
            select_sql = "SELECT * FROM share_189_data"
            if status is not None:
                select_sql += f" WHERE status = {status}"
            if keyword:
                if status is not None:
                    select_sql += f" AND (share_code LIKE '%{keyword}%' OR file_name LIKE '%{keyword}%')"
                else:
                    select_sql += f" WHERE (share_code LIKE '%{keyword}%' OR file_name LIKE '%{keyword}%')"
            
            select_sql += f" ORDER BY created_at DESC LIMIT {page_size} OFFSET {offset}"
            
            result = session.execute(text(select_sql))
            items = result.fetchall()
            
            # 计算总页数
            total_pages = (total + page_size - 1) // page_size
            
            # 手动构建返回数据，处理原生SQL返回的Row对象
            items_data = []
            for row in items:
                # 解析fileTreeJson
                file_tree_json = None
                if row.file_tree_json:
                    try:
                        file_tree_json = json.loads(row.file_tree_json)
                    except json.JSONDecodeError:
                        file_tree_json = None
                
                item_dict = {
                    'id': row.id,
                    'shareCode': row.share_code,
                    'accessCode': row.access_code,
                    'status': row.status,
                    'importStatus': row.import_status,
                    'createdAt': row.created_at.isoformat() if row.created_at else None,
                    'updatedAt': row.updated_at.isoformat() if row.updated_at else None,
                    'parseStartTime': row.parse_start_time.isoformat() if row.parse_start_time else None,
                    'parseEndTime': row.parse_end_time.isoformat() if row.parse_end_time else None,
                    'importStartTime': row.import_start_time.isoformat() if row.import_start_time else None,
                    'importEndTime': row.import_end_time.isoformat() if row.import_end_time else None,
                    'shareId': row.share_id,
                    'fileId': row.file_id,
                    'fileName': row.file_name,
                    'fileSize': row.file_size,
                    'isFolder': row.is_folder,
                    'shareMode': row.share_mode,
                    'creatorName': row.creator_name,
                    'creatorId': row.creator_id,
                    'shareDate': row.share_date.isoformat() if row.share_date else None,
                    'expireDate': row.expire_date.isoformat() if row.expire_date else None,
                    'fileTreeJson': file_tree_json,
                    'totalFiles': row.total_files,
                    'totalFolders': row.total_folders,
                    'totalSize': row.total_size,
                    'maxDepth': row.max_depth,
                    'parseDuration': row.parse_duration,
                    'importDuration': row.import_duration,
                    'importedFiles': row.imported_files,
                    'importedSize': row.imported_size,
                    'errorMessage': row.error_message
                }
                items_data.append(item_dict)
            
            return {
                'items': items_data,
                'total': total,
                'page': page,
                'pageSize': page_size,
                'totalPages': total_pages
            }
        finally:
            session.close()
    
    def update_status(self, share_id: int, status: int, error_message: Optional[str] = None) -> bool:
        """更新解析状态"""
        session = self._get_session()
        try:
            share_obj = session.query(Share189Data).filter(
                Share189Data.id == share_id
            ).first()
            
            if not share_obj:
                return False
            
            share_obj.status = status
            if error_message:
                share_obj.error_message = error_message
            
            # 根据状态更新时间
            if status == 1:  # 解析中
                share_obj.parse_start_time = datetime.utcnow()
            elif status in [2, 3]:  # 解析完成或失败
                share_obj.parse_end_time = datetime.utcnow()
                if share_obj.parse_start_time:
                    duration = (share_obj.parse_end_time - share_obj.parse_start_time).total_seconds()
                    share_obj.parse_duration = int(duration)
            
            session.commit()
            return True
        except Exception as e:
            session.rollback()
            raise e
        finally:
            session.close()
    
    def update_parse_result(self, share_id: int, parse_result: Dict[str, Any]) -> bool:
        """更新解析结果"""
        session = self._get_session()
        try:
            share_obj = session.query(Share189Data).filter(
                Share189Data.id == share_id
            ).first()
            
            if not share_obj:
                return False
            
            # 更新基本信息
            if parse_result.get('success'):
                share_obj.status = 2  # 解析成功
                
                # 更新分享信息
                share_info = parse_result.get('shareInfo', {})
                if share_info:
                    share_obj.share_id = str(share_info.get('shareId', ''))
                    share_obj.file_id = str(share_info.get('fileId', ''))
                    share_obj.file_name = share_info.get('shareName')
                    share_obj.share_name = share_info.get('shareName')  # 分享名称
                    share_obj.file_size = share_info.get('fileSize')
                    share_obj.is_folder = share_info.get('isFolder')
                    share_obj.share_mode = str(share_info.get('shareMode', ''))
                    
                    # 解析分享时间和失效时间
                    if share_info.get('shareTime'):
                        try:
                            share_obj.share_date = datetime.fromisoformat(share_info.get('shareTime').replace('Z', '+00:00'))
                        except:
                            pass
                    
                    if share_info.get('expireTime'):
                        try:
                            share_obj.expire_date = datetime.fromisoformat(share_info.get('expireTime').replace('Z', '+00:00'))
                        except:
                            pass
                    
                    # 创建者信息
                    creator = share_info.get('creator', {})
                    if creator:
                        share_obj.creator_name = creator.get('nickName')
                        share_obj.creator_id = creator.get('ownerAccount')
                
                # 更新统计信息
                statistics = parse_result.get('statistics', {})
                if statistics:
                    share_obj.total_files = statistics.get('totalFiles')
                    share_obj.total_folders = statistics.get('totalFolders')
                    share_obj.total_size = statistics.get('totalSize')
                    share_obj.max_depth = statistics.get('maxDepthReached')
                
                # 保存完整的文件树JSON
                share_obj.set_file_tree_json(parse_result)
                
            else:
                share_obj.status = 3  # 解析失败
                share_obj.error_message = parse_result.get('message', '解析失败')
            
            # 更新解析结束时间
            share_obj.parse_end_time = datetime.utcnow()
            if share_obj.parse_start_time:
                duration = (share_obj.parse_end_time - share_obj.parse_start_time).total_seconds()
                share_obj.parse_duration = int(duration)
            
            session.commit()
            return True
        except Exception as e:
            session.rollback()
            raise e
        finally:
            session.close()
    
    def update_import_status(self, share_id: int, import_status: int, 
                           imported_files: Optional[int] = None, 
                           imported_size: Optional[int] = None,
                           error_message: Optional[str] = None) -> bool:
        """更新导入状态"""
        session = self._get_session()
        try:
            share_obj = session.query(Share189Data).filter(
                Share189Data.id == share_id
            ).first()
            
            if not share_obj:
                return False
            
            share_obj.import_status = import_status
            
            if imported_files is not None:
                share_obj.imported_files = imported_files
            if imported_size is not None:
                share_obj.imported_size = imported_size
            if error_message:
                share_obj.error_message = error_message
            
            # 根据状态更新时间
            if import_status == 1:  # 导入中
                share_obj.import_start_time = datetime.utcnow()
            elif import_status in [2, 3]:  # 导入完成或失败
                share_obj.import_end_time = datetime.utcnow()
                if share_obj.import_start_time:
                    duration = (share_obj.import_end_time - share_obj.import_start_time).total_seconds()
                    share_obj.import_duration = int(duration)
                
                # 如果导入成功，更新主状态
                if import_status == 2:
                    share_obj.status = 4  # 已导入
            
            session.commit()
            return True
        except Exception as e:
            session.rollback()
            raise e
        finally:
            session.close()
    
    def delete(self, share_id: int) -> bool:
        """删除分享码记录"""
        session = self._get_session()
        try:
            share_obj = session.query(Share189Data).filter(
                Share189Data.id == share_id
            ).first()
            
            if not share_obj:
                return False
            
            session.delete(share_obj)
            session.commit()
            return True
        except Exception as e:
            session.rollback()
            raise e
        finally:
            session.close()
    
    def get_statistics(self) -> Dict[str, Any]:
        """获取统计信息"""
        session = self._get_session()
        try:
            from sqlalchemy import func
            
            # 总数统计
            total_count = session.query(func.count(Share189Data.id)).scalar() or 0
            
            # 按状态统计 - 使用分组查询
            status_query = session.query(
                Share189Data.status,
                func.count(Share189Data.id)
            ).group_by(Share189Data.status).all()
            
            # 构建状态统计字典
            status_stats = {0: 0, 1: 0, 2: 0, 3: 0, 4: 0}
            for status, count in status_query:
                if status in status_stats:
                    status_stats[status] = count
            
            # 文件和大小统计
            total_files = session.query(func.sum(Share189Data.total_files)).scalar() or 0
            total_size = session.query(func.sum(Share189Data.total_size)).scalar() or 0
            
            return {
                'totalShares': int(total_count),
                'statusStats': status_stats,
                'totalFiles': int(total_files),
                'totalSize': int(total_size)
            }
        finally:
            session.close()