#!/usr/bin/env python
# -*- coding: utf-8 -*-

"""
小红书爬虫路由
处理爬虫相关的请求
"""

from flask import Blueprint, render_template, redirect, url_for, flash, jsonify, current_app
import os
import sys
import logging
import traceback

# 添加爬虫桥接模块路径
current_dir = os.path.dirname(os.path.abspath(__file__))
parent_dir = os.path.dirname(current_dir)
sys.path.append(parent_dir)

# 配置日志
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
    handlers=[
        logging.StreamHandler(),
        logging.FileHandler(os.path.join(parent_dir, 'crawler_route.log'))
    ]
)
logger = logging.getLogger('xhs_crawler_route')

# 导入爬虫桥接模块
try:
    from xhs_crawler_bridge import launch_crawler, get_crawler_url, get_crawler_status
    logger.info("成功导入爬虫桥接模块")
except Exception as e:
    logger.error(f"导入爬虫桥接模块失败: {str(e)}")
    logger.error(traceback.format_exc())

# 创建蓝图
crawler_bp = Blueprint('crawler', __name__)

@crawler_bp.route('/crawler')
def crawler_page():
    """爬虫页面"""
    try:
        logger.info("访问爬虫页面")
        
        # 获取爬虫状态
        status = get_crawler_status()
        
        # 如果爬虫未运行，尝试启动
        if not status["running"] and not status["error"]:
            try:
                # 启动爬虫应用
                logger.info("尝试启动爬虫应用")
                crawler_url = launch_crawler()
                logger.info(f"爬虫应用启动成功，URL: {crawler_url}")
                return render_template('xhs/crawler.html', crawler_url=crawler_url)
            except Exception as e:
                logger.error(f"启动爬虫失败: {str(e)}")
                logger.error(traceback.format_exc())
                flash(f'启动爬虫失败: {str(e)}', 'danger')
                return redirect(url_for('xhs.index'))
        elif status["error"]:
            # 爬虫有错误
            logger.error(f"爬虫状态错误: {status['error']}")
            flash(f'爬虫服务错误: {status["error"]}', 'warning')
            return render_template('xhs/crawler.html', crawler_url=get_crawler_url(), error=status["error"])
        else:
            # 爬虫已经运行
            logger.info("爬虫已经在运行")
            return render_template('xhs/crawler.html', crawler_url=get_crawler_url())
    except Exception as e:
        logger.error(f"访问爬虫页面时发生错误: {str(e)}")
        logger.error(traceback.format_exc())
        flash(f'访问爬虫页面失败: {str(e)}', 'danger')
        return redirect(url_for('xhs.index'))

@crawler_bp.route('/crawler/status')
def crawler_status():
    """获取爬虫状态"""
    try:
        # 获取爬虫状态
        status = get_crawler_status()
        return jsonify(status)
    except Exception as e:
        logger.error(f"获取爬虫状态时发生错误: {str(e)}")
        logger.error(traceback.format_exc())
        return jsonify({
            'status': 'error',
            'message': str(e)
        })

@crawler_bp.route('/crawler/restart')
def restart_crawler():
    """重启爬虫"""
    try:
        # 尝试启动爬虫应用
        crawler_url = launch_crawler()
        return jsonify({
            'status': 'success',
            'url': crawler_url,
            'message': '爬虫服务已重启'
        })
    except Exception as e:
        logger.error(f"重启爬虫失败: {str(e)}")
        logger.error(traceback.format_exc())
        return jsonify({
            'status': 'error',
            'message': str(e)
        }) 