"""
反爬虫管理视图
"""

from django.http import JsonResponse
from django.views.decorators.http import require_http_methods
from django.views.decorators.csrf import csrf_exempt
from django.core.cache import caches
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.views import View
import json
import logging
from datetime import datetime, timedelta

logger = logging.getLogger(__name__)

@method_decorator(csrf_exempt, name='dispatch')
@method_decorator(login_required, name='dispatch')
class AntiCrawlerManagementView(View):
    """反爬虫管理视图"""
    
    def __init__(self):
        self.cache = caches['anti_crawler']
        super().__init__()
    
    def get(self, request):
        """获取反爬虫状态信息"""
        try:
            action = request.GET.get('action', 'status')
            
            if action == 'status':
                return self._get_status()
            elif action == 'blocked_ips':
                return self._get_blocked_ips()
            elif action == 'logs':
                return self._get_request_logs()
            else:
                return JsonResponse({'error': '无效的操作'}, status=400)
                
        except Exception as e:
            logger.error(f"获取反爬虫状态失败: {str(e)}")
            return JsonResponse({'error': '服务器错误'}, status=500)
    
    def post(self, request):
        """反爬虫管理操作"""
        try:
            data = json.loads(request.body)
            action = data.get('action')
            
            if action == 'unblock_ip':
                return self._unblock_ip(data.get('ip'))
            elif action == 'block_ip':
                return self._block_ip(data.get('ip'), data.get('reason', 'manual'))
            elif action == 'clear_logs':
                return self._clear_logs()
            else:
                return JsonResponse({'error': '无效的操作'}, status=400)
                
        except json.JSONDecodeError:
            return JsonResponse({'error': '无效的JSON数据'}, status=400)
        except Exception as e:
            logger.error(f"反爬虫管理操作失败: {str(e)}")
            return JsonResponse({'error': '服务器错误'}, status=500)
    
    def _get_status(self):
        """获取系统状态"""
        # 统计被屏蔽的IP数量
        blocked_count = 0
        request_count = 0
        
        # 扫描缓存中的键
        try:
            # 本地缓存无法扫描所有键，返回估计值
            blocked_count = 0
            request_count = 0
        except:
            pass
        
        return JsonResponse({
            'status': 'active',
            'blocked_ips_count': blocked_count,
            'request_logs_count': request_count,
            'cache_backend': 'Redis',
            'timestamp': datetime.now().isoformat()
        })
    
    def _get_blocked_ips(self):
        """获取被屏蔽的IP列表"""
        blocked_ips = []
        
        try:
            # 本地缓存无法扫描键，返回空列表
            # 在生产环境中可改为Redis实现完整功能
            pass
        except Exception as e:
            logger.error(f"获取被屏蔽IP列表失败: {str(e)}")
        
        return JsonResponse({
            'blocked_ips': blocked_ips,
            'total': len(blocked_ips)
        })
    
    def _get_request_logs(self):
        """获取请求日志"""
        logs = []
        limit = int(request.GET.get('limit', 100))
        
        try:
            # 本地缓存无法扫描键，返回空列表
            # 在生产环境中可改为Redis实现完整功能
            pass
        except Exception as e:
            logger.error(f"获取请求日志失败: {str(e)}")
        
        # 按时间排序
        logs.sort(key=lambda x: x.get('timestamp', ''), reverse=True)
        
        return JsonResponse({
            'logs': logs,
            'total': len(logs)
        })
    
    def _unblock_ip(self, ip):
        """解除IP屏蔽"""
        if not ip:
            return JsonResponse({'error': 'IP地址不能为空'}, status=400)
        
        blocked_key = f"blocked_ip:{ip}"
        if self.cache.get(blocked_key):
            self.cache.delete(blocked_key)
            logger.info(f"手动解除IP屏蔽: {ip}")
            return JsonResponse({'message': f'IP {ip} 已解除屏蔽'})
        else:
            return JsonResponse({'error': f'IP {ip} 未被屏蔽'}, status=404)
    
    def _block_ip(self, ip, reason):
        """手动屏蔽IP"""
        if not ip:
            return JsonResponse({'error': 'IP地址不能为空'}, status=400)
        
        blocked_key = f"blocked_ip:{ip}"
        blocked_info = {
            'reason': reason,
            'blocked_at': datetime.now().isoformat(),
            'ip': ip,
            'manual': True
        }
        
        # 手动屏蔽时间更长（24小时）
        self.cache.set(blocked_key, blocked_info, 86400)
        logger.info(f"手动屏蔽IP: {ip}, 原因: {reason}")
        
        return JsonResponse({'message': f'IP {ip} 已被屏蔽'})
    
    def _clear_logs(self):
        """清理请求日志"""
        try:
            # 本地缓存清理
            self.cache.clear()
            logger.info(f"清理了缓存数据")
            return JsonResponse({'message': '清理了缓存数据'})
        except Exception as e:
            logger.error(f"清理日志失败: {str(e)}")
            return JsonResponse({'error': '清理日志失败'}, status=500)


@csrf_exempt
@require_http_methods(["GET"])
def anti_crawler_status(request):
    """反爬虫状态检查接口（公开）"""
    return JsonResponse({
        'anti_crawler': 'active',
        'timestamp': datetime.now().isoformat(),
        'version': '1.0'
    })