from flask import Blueprint, request, jsonify
from flask_jwt_extended import jwt_required
from backend.services.lottery_crawler import LotteryCrawler
import logging
import pymysql
from sqlalchemy import create_engine, text

crawler_bp = Blueprint('crawler', __name__)
logger = logging.getLogger(__name__)

# 初始化爬取服务
crawler = LotteryCrawler()

@crawler_bp.route('/update', methods=['POST'])
@jwt_required()
def update_lottery_data():
    """更新双色球数据"""
    try:
        data = request.get_json() or {}
        
        # 获取参数
        target_count = data.get('target_count', 30)  # 默认30期
        max_pages = data.get('max_pages', 3)  # 默认3页
        
        # 参数验证
        if not isinstance(target_count, int) or target_count <= 0:
            return jsonify({'error': '目标数量必须是正整数'}), 400
        
        if not isinstance(max_pages, int) or max_pages <= 0:
            return jsonify({'error': '最大页数必须是正整数'}), 400
        
        if target_count > 100:
            return jsonify({'error': '单次最多只能爬取100期数据'}), 400
        
        if max_pages > 10:
            return jsonify({'error': '单次最多只能爬取10页数据'}), 400
        
        logger.info(f"开始更新双色球数据，目标数量: {target_count}期，最大页数: {max_pages}页")
        
        # 执行爬取
        result = crawler.crawl_lottery_data(
            max_pages=max_pages,
            target_count=target_count
        )
        
        if result['success']:
            # 获取更新后的统计信息
            stats = crawler.get_database_stats()
            
            return jsonify({
                'success': True,
                'message': result['message'],
                'data': {
                    'total_crawled': result['total_crawled'],
                    'total_added': result['total_added'],
                    'total_skipped': result['total_skipped'],
                    'pages_crawled': result['pages_crawled'],
                    'target_count': target_count,
                    'max_pages': max_pages
                },
                'stats': stats
            }), 200
        else:
            return jsonify({
                'success': False,
                'error': result['error'],
                'message': result['message']
            }), 500
            
    except Exception as e:
        logger.error(f"更新双色球数据失败: {str(e)}")
        return jsonify({
            'success': False,
            'error': str(e),
            'message': f'更新失败: {str(e)}'
        }), 500

@crawler_bp.route('/stats', methods=['GET'])
@jwt_required()
def get_crawler_stats():
    """获取爬取统计信息"""
    try:
        stats = crawler.get_database_stats()
        
        return jsonify({
            'success': True,
            'data': stats
        }), 200
        
    except Exception as e:
        logger.error(f"获取统计信息失败: {str(e)}")
        return jsonify({
            'success': False,
            'error': str(e),
            'message': f'获取统计信息失败: {str(e)}'
        }), 500

@crawler_bp.route('/latest', methods=['GET'])
@jwt_required()
def get_latest_draw():
    """获取最新一期开奖信息"""
    try:
        latest_info = crawler.get_latest_draw_info()
        
        if latest_info:
            return jsonify({
                'success': True,
                'data': latest_info
            }), 200
        else:
            return jsonify({
                'success': False,
                'error': '没有找到开奖数据',
                'message': '数据库中暂无开奖数据'
            }), 404
            
    except Exception as e:
        logger.error(f"获取最新开奖信息失败: {str(e)}")
        return jsonify({
            'success': False,
            'error': str(e),
            'message': f'获取最新开奖信息失败: {str(e)}'
        }), 500

@crawler_bp.route('/health', methods=['GET'])
@jwt_required()
def health_check():
    """爬取服务健康检查"""
    try:
        # 检查数据库连接
        engine = crawler.engine
        with engine.connect() as conn:
            count_result = conn.execute(text("SELECT COUNT(*) FROM lottery_results"))
            count = count_result.scalar()
            
            # 检查最新数据时间
            latest_result = conn.execute(
                text("SELECT draw_date FROM lottery_results ORDER BY draw_date DESC LIMIT 1")
            )
            latest_row = latest_result.fetchone()
            latest_draw_date = latest_row.draw_date.isoformat() if latest_row else None
        
        return jsonify({
            'success': True,
            'data': {
                'database_connected': True,
                'total_records': count,
                'latest_draw_date': latest_draw_date,
                'service_status': 'healthy'
            }
        }), 200
        
    except Exception as e:
        logger.error(f"健康检查失败: {str(e)}")
        return jsonify({
            'success': False,
            'error': str(e),
            'data': {
                'database_connected': False,
                'total_records': 0,
                'latest_draw_date': None,
                'service_status': 'unhealthy'
            }
        }), 500

@crawler_bp.route('/test-crawl', methods=['POST'])
@jwt_required()
def test_crawl():
    """测试爬取功能（仅爬取1条数据）"""
    try:
        logger.info("开始测试爬取功能")
        
        # 测试爬取1条数据
        result = crawler.crawl_lottery_data(
            max_pages=1,
            target_count=1
        )
        
        if result['success']:
            return jsonify({
                'success': True,
                'message': '测试爬取成功',
                'data': {
                    'total_crawled': result['total_crawled'],
                    'total_added': result['total_added'],
                    'total_skipped': result['total_skipped'],
                    'pages_crawled': result['pages_crawled']
                }
            }), 200
        else:
            return jsonify({
                'success': False,
                'error': result['error'],
                'message': '测试爬取失败'
            }), 500
            
    except Exception as e:
        logger.error(f"测试爬取失败: {str(e)}")
        return jsonify({
            'success': False,
            'error': str(e),
            'message': f'测试爬取失败: {str(e)}'
        }), 500
