from django.shortcuts import render
from django.http import JsonResponse
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_http_methods
from django.contrib.auth.decorators import login_required
from django.db.models import Count, Q, Sum
from django.utils import timezone
from rest_framework import viewsets, status
from rest_framework.decorators import action
from rest_framework.response import Response
from rest_framework.permissions import IsAuthenticated
import json
import os
import logging
from datetime import datetime, timedelta
from typing import Dict, Any, List

from apps.core.models import UploadedFile, ProcessingTask, SystemLog, Template
from .serializers import (
    AnalyticsDataSerializer, LogAnalysisSerializer, 
    PerformanceMetricsSerializer, UsageStatisticsSerializer
)

logger = logging.getLogger(__name__)


class AnalyticsViewSet(viewsets.ViewSet):
    """数据分析API视图集"""
    
    permission_classes = [IsAuthenticated]
    
    @action(detail=False, methods=['get'])
    def dashboard_stats(self, request):
        """
        获取仪表板统计数据
        
        GET /api/analytics/dashboard_stats/
        """
        try:
            # 基础统计
            stats = {
                'total_files': UploadedFile.objects.count(),
                'total_tasks': ProcessingTask.objects.count(),
                'total_templates': Template.objects.count(),
                'total_logs': SystemLog.objects.count(),
            }
            
            # 文件类型分布
            file_type_stats = UploadedFile.objects.values('file_type').annotate(
                count=Count('id')
            ).order_by('file_type')
            
            # 任务状态分布
            task_status_stats = ProcessingTask.objects.values('status').annotate(
                count=Count('id')
            ).order_by('status')
            
            # 最近7天的活动统计
            seven_days_ago = timezone.now() - timedelta(days=7)
            recent_activity = {
                'files_uploaded': UploadedFile.objects.filter(
                    created_at__gte=seven_days_ago
                ).count(),
                'tasks_created': ProcessingTask.objects.filter(
                    created_at__gte=seven_days_ago
                ).count(),
                'tasks_completed': ProcessingTask.objects.filter(
                    updated_at__gte=seven_days_ago,
                    status='completed'
                ).count(),
            }
            
            # 系统性能指标
            performance_metrics = self._get_performance_metrics()
            
            return Response({
                'success': True,
                'stats': stats,
                'file_type_distribution': list(file_type_stats),
                'task_status_distribution': list(task_status_stats),
                'recent_activity': recent_activity,
                'performance_metrics': performance_metrics
            })
            
        except Exception as e:
            logger.error(f"获取仪表板统计失败: {str(e)}", exc_info=True)
            return Response({
                'success': False,
                'error': str(e)
            }, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
    
    def _get_performance_metrics(self) -> Dict[str, Any]:
        """
        获取系统性能指标
        """
        try:
            # 计算平均处理时间
            completed_tasks = ProcessingTask.objects.filter(
                status='completed',
                created_at__isnull=False,
                updated_at__isnull=False
            )
            
            avg_processing_time = 0
            if completed_tasks.exists():
                total_time = sum([
                    (task.updated_at - task.created_at).total_seconds()
                    for task in completed_tasks
                ])
                avg_processing_time = total_time / completed_tasks.count()
            
            # 成功率
            total_tasks = ProcessingTask.objects.count()
            successful_tasks = ProcessingTask.objects.filter(status='completed').count()
            success_rate = (successful_tasks / total_tasks * 100) if total_tasks > 0 else 0
            
            # 错误率
            failed_tasks = ProcessingTask.objects.filter(status='failed').count()
            error_rate = (failed_tasks / total_tasks * 100) if total_tasks > 0 else 0
            
            return {
                'avg_processing_time': round(avg_processing_time, 2),
                'success_rate': round(success_rate, 2),
                'error_rate': round(error_rate, 2),
                'total_processed': total_tasks
            }
            
        except Exception as e:
            logger.error(f"获取性能指标失败: {str(e)}")
            return {}
    
    @action(detail=False, methods=['get'])
    def usage_trends(self, request):
        """
        获取使用趋势数据
        
        GET /api/analytics/usage_trends/?days=30
        """
        try:
            days = int(request.query_params.get('days', 30))
            start_date = timezone.now() - timedelta(days=days)
            
            # 按日期分组统计
            daily_stats = []
            for i in range(days):
                date = start_date + timedelta(days=i)
                next_date = date + timedelta(days=1)
                
                day_stats = {
                    'date': date.strftime('%Y-%m-%d'),
                    'files_uploaded': UploadedFile.objects.filter(
                        created_at__gte=date,
                created_at__lt=next_date
                    ).count(),
                    'tasks_created': ProcessingTask.objects.filter(
                        created_at__gte=date,
                        created_at__lt=next_date
                    ).count(),
                    'tasks_completed': ProcessingTask.objects.filter(
                        updated_at__gte=date,
                        updated_at__lt=next_date,
                        status='completed'
                    ).count(),
                }
                daily_stats.append(day_stats)
            
            return Response({
                'success': True,
                'trends': daily_stats,
                'period': f'{days} days'
            })
            
        except Exception as e:
            return Response({
                'success': False,
                'error': str(e)
            }, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
    
    @action(detail=False, methods=['get'])
    def template_usage(self, request):
        """
        获取模板使用统计
        
        GET /api/analytics/template_usage/
        """
        try:
            # 模板使用次数统计
            template_stats = Template.objects.annotate(
                usage_count=Count('processingtask'),
                file_count=Count('uploadedfile')
            ).order_by('-usage_count')
            
            template_data = []
            for template in template_stats:
                template_data.append({
                    'id': template.id,
                    'name': template.name,
                    'usage_count': template.usage_count,
                    'file_count': template.file_count,
                    'created_at': template.created_at.strftime('%Y-%m-%d'),
                    'is_active': template.is_active
                })
            
            return Response({
                'success': True,
                'templates': template_data
            })
            
        except Exception as e:
            return Response({
                'success': False,
                'error': str(e)
            }, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
    
    @action(detail=False, methods=['get'])
    def log_analysis(self, request):
        """
        获取日志分析数据
        
        GET /api/analytics/log_analysis/?level=ERROR&hours=24
        """
        try:
            level = request.query_params.get('level', '')
            hours = int(request.query_params.get('hours', 24))
            
            start_time = timezone.now() - timedelta(hours=hours)
            
            # 构建查询
            queryset = SystemLog.objects.filter(created_at__gte=start_time)
            
            if level:
                queryset = queryset.filter(level=level)
            
            # 按级别统计
            level_stats = queryset.values('level').annotate(
                count=Count('id')
            ).order_by('level')
            
            # 按小时统计
            hourly_stats = []
            for i in range(hours):
                hour_start = start_time + timedelta(hours=i)
                hour_end = hour_start + timedelta(hours=1)
                
                hour_count = queryset.filter(
                    created_at__gte=hour_start,
                    created_at__lt=hour_end
                ).count()
                
                hourly_stats.append({
                    'hour': hour_start.strftime('%Y-%m-%d %H:00'),
                    'count': hour_count
                })
            
            # 最近的错误日志
            recent_errors = SystemLog.objects.filter(
                level='ERROR',
                created_at__gte=start_time
            ).order_by('-created_at')[:10]
            
            error_data = []
            for log in recent_errors:
                error_data.append({
                    'id': log.id,
                    'message': log.message,
                    'module': log.module,
                    'created_at': log.created_at.strftime('%Y-%m-%d %H:%M:%S'),
                    'details': log.details
                })
            
            return Response({
                'success': True,
                'level_distribution': list(level_stats),
                'hourly_trends': hourly_stats,
                'recent_errors': error_data,
                'total_logs': queryset.count()
            })
            
        except Exception as e:
            return Response({
                'success': False,
                'error': str(e)
            }, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
    
    @action(detail=False, methods=['get'])
    def file_statistics(self, request):
        """
        获取文件统计信息
        
        GET /api/analytics/file_statistics/
        """
        try:
            # 文件大小统计
            total_size = 0
            file_count = 0
            
            for file_obj in UploadedFile.objects.all():
                if file_obj.file:
                    try:
                        total_size += file_obj.file.size
                        file_count += 1
                    except:
                        pass
            
            # 格式化文件大小
            if total_size > 1024 * 1024 * 1024:
                size_formatted = f"{total_size / 1024 / 1024 / 1024:.2f} GB"
            elif total_size > 1024 * 1024:
                size_formatted = f"{total_size / 1024 / 1024:.2f} MB"
            elif total_size > 1024:
                size_formatted = f"{total_size / 1024:.2f} KB"
            else:
                size_formatted = f"{total_size} B"
            
            # 文件扩展名统计
            extension_stats = {}
            for file_obj in UploadedFile.objects.all():
                ext = os.path.splitext(file_obj.original_name)[1].lower()
                extension_stats[ext] = extension_stats.get(ext, 0) + 1
            
            # 按模板分组的文件统计
            template_file_stats = Template.objects.annotate(
                file_count=Count('uploadedfile')
            ).order_by('-file_count')
            
            template_data = []
            for template in template_file_stats:
                template_data.append({
                    'template_name': template.name,
                    'file_count': template.file_count
                })
            
            return Response({
                'success': True,
                'total_size': total_size,
                'total_size_formatted': size_formatted,
                'file_count': file_count,
                'extension_distribution': extension_stats,
                'template_file_distribution': template_data
            })
            
        except Exception as e:
            return Response({
                'success': False,
                'error': str(e)
            }, status=status.HTTP_500_INTERNAL_SERVER_ERROR)


@require_http_methods(["GET"])
def ajax_dashboard_data(request):
    """
    AJAX获取仪表板数据
    """
    try:
        # 基础统计
        stats = {
            'total_files': UploadedFile.objects.count(),
            'total_tasks': ProcessingTask.objects.count(),
            'total_templates': Template.objects.count(),
            'active_templates': Template.objects.filter(is_active=True).count(),
        }
        
        # 最近24小时活动
        yesterday = timezone.now() - timedelta(hours=24)
        recent_activity = {
            'files_uploaded': UploadedFile.objects.filter(
                created_at__gte=yesterday
            ).count(),
            'tasks_completed': ProcessingTask.objects.filter(
                updated_at__gte=yesterday,
                status='completed'
            ).count(),
        }
        
        # 任务状态分布
        task_status = {
            'pending': ProcessingTask.objects.filter(status='pending').count(),
            'processing': ProcessingTask.objects.filter(status='processing').count(),
            'completed': ProcessingTask.objects.filter(status='completed').count(),
            'failed': ProcessingTask.objects.filter(status='failed').count(),
        }
        
        return JsonResponse({
            'success': True,
            'stats': stats,
            'recent_activity': recent_activity,
            'task_status': task_status
        })
        
    except Exception as e:
        return JsonResponse({
            'success': False,
            'error': str(e)
        })


@require_http_methods(["GET"])
def get_file_logs(request):
    """
    读取文件日志
    """
    try:
        level = request.GET.get('level', '')
        limit = int(request.GET.get('limit', 50))
        
        # 日志文件目录
        log_dir = os.path.join(os.path.dirname(settings.BASE_DIR), 'logs')
        
        if not os.path.exists(log_dir):
            return JsonResponse({
                'success': False,
                'error': '日志目录不存在'
            })
        
        # 获取最新的日志文件
        log_files = [f for f in os.listdir(log_dir) if f.startswith('debug_') and f.endswith('.log')]
        if not log_files:
            return JsonResponse({
                'success': False,
                'error': '没有找到日志文件'
            })
        
        # 按文件名排序，获取最新的
        log_files.sort(reverse=True)
        latest_log_file = os.path.join(log_dir, log_files[0])
        
        logs = []
        
        # 读取日志文件
        with open(latest_log_file, 'r', encoding='utf-8') as f:
            lines = f.readlines()
        
        # 解析日志行
        log_pattern = re.compile(
            r'(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d{3}) - (\w+) - (.+)'
        )
        
        for line in reversed(lines[-1000:]):  # 只读取最后1000行
            line = line.strip()
            if not line:
                continue
                
            match = log_pattern.match(line)
            if match:
                timestamp_str, log_level, message = match.groups()
                
                # 过滤日志级别
                if level and log_level != level:
                    continue
                
                try:
                    timestamp = datetime.strptime(timestamp_str, '%Y-%m-%d %H:%M:%S,%f')
                except:
                    timestamp = datetime.now()
                
                logs.append({
                    'id': len(logs) + 1,
                    'level': log_level,
                    'message': message,
                    'module': 'CFS_System',
                    'created_at': timestamp.strftime('%Y-%m-%d %H:%M:%S'),
                    'details': '',
                    'source': 'file'
                })
                
                if len(logs) >= limit:
                    break
        
        return JsonResponse({
            'success': True,
            'logs': logs,
            'total_count': len(logs),
            'log_file': log_files[0]
        })
        
    except Exception as e:
        return JsonResponse({
            'success': False,
            'error': str(e)
        })


def ajax_log_data(request):
    """
    AJAX获取日志数据
    """
    try:
        level = request.GET.get('level', '')
        limit = int(request.GET.get('limit', 50))
        source = request.GET.get('source', 'database')  # database 或 file
        
        if source == 'file':
            # 读取文件日志
            return get_file_logs(request)
        else:
            # 读取数据库日志
            queryset = SystemLog.objects.all().order_by('-created_at')
            
            if level:
                queryset = queryset.filter(level=level)
            
            logs = queryset[:limit]
            
            log_data = []
            for log in logs:
                log_data.append({
                    'id': log.id,
                    'level': log.level,
                    'message': log.message,
                    'module': log.module,
                    'created_at': log.created_at.strftime('%Y-%m-%d %H:%M:%S'),
                    'details': getattr(log, 'details', '')
                })
            
            return JsonResponse({
                'success': True,
                'logs': log_data,
                'total_count': queryset.count()
            })
        
    except Exception as e:
        return JsonResponse({
            'success': False,
            'error': str(e)
        })


def analytics_dashboard(request):
    """
    数据分析仪表板
    """
    # 基础统计数据
    context = {
        'total_files': UploadedFile.objects.count(),
        'total_tasks': ProcessingTask.objects.count(),
        'total_templates': Template.objects.count(),
        'recent_logs': SystemLog.objects.order_by('-created_at')[:10]
    }
    
    return render(request, 'analytics/dashboard.html', context)


def log_viewer(request):
    """
    日志查看器
    """
    # 日志级别统计
    level_stats = SystemLog.objects.values('level').annotate(
        count=Count('id')
    ).order_by('level')
    
    context = {
        'level_stats': level_stats
    }
    
    return render(request, 'analytics/log_viewer.html', context)


def file_log_viewer(request):
    """
    文件日志查看器
    """
    return render(request, 'analytics/file_logs.html')