# -*- coding: utf-8 -*-
"""
Houzz Web服务 - 爬虫任务管理器
提供Web界面来管理爬虫任务、监控进度和导出数据
"""

import os
import json
import time
import threading
from datetime import datetime
from flask import Flask, render_template, request, jsonify, send_file
import pandas as pd
from modules.common import RedisManager, ProxyManager, get_logger
from modules.houzz import ListPageScraper, DetailPageScraper
from config import get_spider_config

app = Flask(__name__)
app.secret_key = 'houzz_scraper_web_service_2024'

SPIDER_NAME = 'houzz'

# 全局变量
redis_manager = None
proxy_manager = None
list_scraper = None
detail_scraper = None
task_status = {
    'running': False,
    'current_task': None,
    'start_time': None,
    'progress': {
        'total_pages': 0,
        'success_pages': 0,
        'total_details': 0,
        'success_details': 0,
        'failed_details': 0,
        'progress_percentage': 0,
        'is_completed': False
    },
    'logs': []
}

logger = get_logger('web_service', spider_name=SPIDER_NAME)


def init_spider():
    """初始化爬虫实例"""
    global redis_manager, proxy_manager, list_scraper, detail_scraper
    
    try:
        logger.info("初始化Houzz爬虫实例...")
        redis_manager = RedisManager(spider_name=SPIDER_NAME)
        proxy_manager = ProxyManager()
        list_scraper = ListPageScraper(redis_manager, proxy_manager)
        detail_scraper = DetailPageScraper(redis_manager, proxy_manager)
        logger.info("Houzz爬虫实例初始化成功")
    except Exception as e:
        logger.error(f"Houzz爬虫实例初始化失败: {e}")
        raise


@app.route('/')
def index():
    """首页"""
    if redis_manager is None:
        init_spider()
    return render_template('index.html', task_status=task_status, spider_name=SPIDER_NAME)


@app.route('/api/task/start', methods=['POST'])
def start_task():
    """启动任务"""
    try:
        # 如果redis_manager未初始化，先初始化
        if redis_manager is None:
            init_spider()
        
        data = request.get_json()
        category_url = data.get('category_url')
        start_page = int(data.get('start_page', 1))
        end_page = int(data.get('end_page', 100))
        
        # 验证参数
        if not category_url:
            return jsonify({'success': False, 'message': '类目URL不能为空'})
        
        if end_page > 1000:
            return jsonify({'success': False, 'message': '最大页数不能超过1000'})
        
        if start_page > end_page:
            return jsonify({'success': False, 'message': '起始页不能大于结束页'})
        
        # 检查是否有任务在运行
        if task_status['running']:
            return jsonify({'success': False, 'message': '已有任务在运行中'})
        
        # 启动任务
        start_scraping_task(category_url, start_page, end_page)
        
        return jsonify({'success': True, 'message': '任务启动成功'})
        
    except Exception as e:
        logger.error(f"启动任务失败: {e}")
        return jsonify({'success': False, 'message': f'启动任务失败: {str(e)}'})


@app.route('/api/task/stop', methods=['POST'])
def stop_task():
    """停止任务"""
    try:
        # 如果redis_manager未初始化，先初始化
        if redis_manager is None:
            init_spider()
        
        if not task_status['running']:
            return jsonify({'success': False, 'message': '没有运行中的任务'})
        
        # 清空所有待处理的任务
        redis_manager.clear_all_tasks()
        
        # 停止任务逻辑
        task_status['running'] = False
        task_status['current_task'] = None
        
        logger.info("任务已停止，所有待处理任务已清空")
        return jsonify({'success': True, 'message': '任务已停止，所有待处理任务已清空'})
        
    except Exception as e:
        logger.error(f"停止任务失败: {e}")
        return jsonify({'success': False, 'message': f'停止任务失败: {str(e)}'})


@app.route('/api/task/status')
def get_task_status():
    """获取任务状态"""
    try:
        # 如果redis_manager未初始化，先初始化
        if redis_manager is None:
            init_spider()
        
        # 更新进度信息
        update_progress()
        
        return jsonify({
            'success': True,
            'data': task_status
        })
    except Exception as e:
        logger.error(f"获取任务状态失败: {e}")
        return jsonify({'success': False, 'message': f'获取状态失败: {str(e)}'})


@app.route('/api/task/export')
def export_data():
    """导出数据"""
    try:
        # 如果redis_manager未初始化，先初始化
        if redis_manager is None:
            init_spider()
        
        # 检查是否有待处理的任务
        stats = redis_manager.get_stats()
        has_pending_tasks = stats.get('list_tasks', 0) > 0 or stats.get('detail_tasks', 0) > 0
        
        if task_status['running'] and has_pending_tasks:
            return jsonify({'success': False, 'message': '任务运行中且有待处理任务，无法导出数据'})
        
        # 创建时间文件夹
        timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
        export_dir = os.path.join('exports', f'export_{timestamp}')
        os.makedirs(export_dir, exist_ok=True)
        
        # 获取所有Redis数据
        all_data = redis_manager.get_all_completed_data()
        list_failed = redis_manager.get_all_list_failed_tasks()
        detail_failed = redis_manager.get_all_detail_failed_tasks()
        stats_data = redis_manager.get_stats_data()
        
        # 保存JSON格式的调试数据
        json_data = {
            'export_info': {
                'export_time': datetime.now().isoformat(),
                'total_completed_data': len(all_data) if all_data else 0,
                'total_list_failed': len(list_failed) if list_failed else 0,
                'total_detail_failed': len(detail_failed) if detail_failed else 0
            },
            'stats_data': stats_data
        }
        
        # 保存JSON调试文件
        json_file = os.path.join(export_dir, 'debug_data.json')
        with open(json_file, 'w', encoding='utf-8') as f:
            json.dump(json_data, f, ensure_ascii=False, indent=2)
        
        # 保存已完成数据
        if all_data:
            df_data = []
            for item in all_data:
                data = item.get('data', {})
                df_data.append({
                    'pro_user_id': data.get('pro_user_id'),
                    'company_name': data.get('company_name'),
                    'company_profile_url': data.get('company_profile_url'),
                    'category_name': data.get('category_name'),
                    'category_url': data.get('category_url'),
                    'category_page': data.get('category_page'),
                    'average_rating': data.get('average_rating'),
                    'number_of_reviews': data.get('number_of_reviews'),
                    'location': data.get('location'),
                    'company_description': data.get('company_description'),
                    'phone_number': data.get('phone_number'),
                    'website_url': data.get('website_url'),
                    'address': data.get('address'),
                    'verified_license': data.get('verified_license'),
                    'awards_and_recognition': data.get('awards_and_recognition'),
                    'followers': data.get('followers'),
                    'projects': data.get('projects'),
                    'videos': data.get('videos'),
                    'houzz_awards': data.get('houzz_awards'),
                    'houzz_badges': data.get('houzz_badges'),
                    'completed_at': datetime.fromtimestamp(item.get('completed_at', 0)).strftime('%Y-%m-%d %H:%M:%S')
                })
            
            df_completed = pd.DataFrame(df_data)
            completed_file = os.path.join(export_dir, 'completed_data.xlsx')
            df_completed.to_excel(completed_file, index=False, engine='openpyxl')
        
        # 保存失败任务到JSON文件
        if list_failed:
            list_failed_file = os.path.join(export_dir, 'list_failed_tasks.json')
            with open(list_failed_file, 'w', encoding='utf-8') as f:
                json.dump(list_failed, f, ensure_ascii=False, indent=2)
        
        if detail_failed:
            detail_failed_file = os.path.join(export_dir, 'detail_failed_tasks.json')
            with open(detail_failed_file, 'w', encoding='utf-8') as f:
                json.dump(detail_failed, f, ensure_ascii=False, indent=2)
        
        # 清空Redis数据
        logger.info("开始清空Redis数据...")
        redis_manager.clear_all_tasks()
        redis_manager.clear_completed_data()
        redis_manager.clear_url_mapping()
        redis_manager.clear_stats()
        logger.info("Redis数据清空完成")
        
        # 重置任务状态
        task_status['progress'] = {
            'total_pages': 0,
            'success_pages': 0,
            'total_details': 0,
            'success_details': 0,
            'failed_details': 0,
            'progress_percentage': 0,
            'is_completed': False
        }
        task_status['logs'] = []
        task_status['running'] = False
        task_status['current_task'] = None
        
        logger.info(f"数据导出成功: {export_dir}")
        return jsonify({
            'success': True, 
            'message': '导出成功',
            'filename': 'completed_data.xlsx',
            'download_path': f'export_{timestamp}/completed_data.xlsx',
            'count': len(all_data) if all_data else 0,
            'export_dir': export_dir
        })
        
    except Exception as e:
        logger.error(f"导出数据失败: {e}")
        return jsonify({'success': False, 'message': f'导出失败: {str(e)}'})


@app.route('/api/task/download/<path:filename>')
def download_file(filename):
    """下载文件"""
    try:
        import re
        # 只允许字母、数字、下划线、连字符、点号和路径分隔符
        safe_filename = re.sub(r'[^a-zA-Z0-9_\-./]', '', filename)
        # 防止路径遍历攻击
        if '..' in safe_filename or safe_filename.startswith('/'):
            logger.error(f"不安全的文件路径: {filename}")
            return jsonify({'success': False, 'message': '不安全的文件路径'})
        
        filepath = os.path.join('exports', safe_filename)
        
        # 检查文件是否存在
        if os.path.exists(filepath) and os.path.isfile(filepath):
            logger.info(f"下载文件: {filepath}")
            return send_file(filepath, as_attachment=True)
        else:
            logger.error(f"文件不存在: {filepath}")
            return jsonify({'success': False, 'message': '文件不存在'})
    except Exception as e:
        logger.error(f"下载文件失败: {e}")
        return jsonify({'success': False, 'message': f'下载失败: {str(e)}'})


@app.route('/api/task/clear', methods=['POST'])
def clear_task_data():
    """清空任务数据（不导出）"""
    try:
        # 如果redis_manager未初始化，先初始化
        if redis_manager is None:
            init_spider()
        
        logger.info("开始清空Redis数据...")
        
        # 清空Redis数据
        redis_manager.clear_all_tasks()
        redis_manager.clear_completed_data()
        redis_manager.clear_url_mapping()
        redis_manager.clear_stats()
        
        logger.info("Redis数据清空完成")
        
        # 重置任务状态
        task_status['progress'] = {
            'total_pages': 0,
            'success_pages': 0,
            'total_details': 0,
            'success_details': 0,
            'failed_details': 0,
            'progress_percentage': 0,
            'is_completed': False
        }
        task_status['logs'] = []
        task_status['running'] = False
        task_status['current_task'] = None
        
        logger.info("任务数据清空成功")
        return jsonify({
            'success': True,
            'message': '所有任务数据已清空'
        })
        
    except Exception as e:
        logger.error(f"清空任务数据失败: {e}")
        return jsonify({'success': False, 'message': f'清空失败: {str(e)}'})


def start_scraping_task(category_url, start_page, end_page):
    """启动爬虫任务"""
    def run_task():
        try:
            # 首先清理所有旧任务
            logger.info("清理旧任务...")
            redis_manager.clear_all_tasks()
            redis_manager.clear_stats()
            
            task_status['running'] = True
            task_status['current_task'] = {
                'category_url': category_url,
                'start_page': start_page,
                'end_page': end_page
            }
            task_status['start_time'] = datetime.now()
            task_status['progress'] = {
                'total_pages': end_page - start_page + 1,
                'success_pages': 0,
                'total_details': 0,
                'success_details': 0,
                'failed_details': 0,
                'progress_percentage': 0,
                'is_completed': False
            }
            
            # 初始化统计信息
            redis_manager.set_stats({
                'total_pages': end_page - start_page + 1,
                'success_pages': 0,
                'total_detail_tasks': 0,
                'success_detail_tasks': 0,
                'failed_detail_tasks': 0,
                'task_start_time': int(time.time()),
                'last_activity_time': int(time.time())
            })
            
            # 创建任务
            create_tasks(category_url, start_page, end_page)
            
            # 启动爬虫
            start_scrapers()
            
        except Exception as e:
            logger.error(f"任务执行失败: {e}")
            task_status['running'] = False
            task_status['current_task'] = None
    
    # 在后台线程中运行任务
    thread = threading.Thread(target=run_task, name="TaskStarter")
    thread.daemon = True
    thread.start()


def create_tasks(category_url, start_page, end_page):
    """创建爬虫任务"""
    try:
        # 创建列表页任务
        for page in range(start_page, end_page + 1):
            redis_manager.add_list_task(category_url, page)
        
        actual_pages = end_page - start_page + 1
        logger.info(f"创建了 {actual_pages} 个列表页任务 (从第{start_page}页到第{end_page}页)")
        
    except Exception as e:
        logger.error(f"创建任务失败: {e}")
        raise


def start_scrapers():
    """启动爬虫"""
    try:
        # 启动列表页爬虫
        list_thread = threading.Thread(target=list_scraper.run_continuous, name="ListScraper")
        list_thread.daemon = True
        list_thread.start()
        
        # 启动详情页爬虫
        detail_thread = threading.Thread(target=detail_scraper.run_continuous, name="DetailScraper")
        detail_thread.daemon = True
        detail_thread.start()
        
        logger.info("爬虫已启动")
        
    except Exception as e:
        logger.error(f"启动爬虫失败: {e}")
        raise


def update_progress():
    """更新进度信息"""
    try:
        # 检查redis_manager是否已初始化
        if redis_manager is None:
            logger.warning("redis_manager未初始化，跳过进度更新")
            return
        
        # 从Redis获取统计信息（始终更新，即使任务不在运行状态）
        stats_data = redis_manager.get_stats_data()
        total_pages = int(stats_data.get('total_pages', 0))
        success_pages = int(stats_data.get('success_pages', 0))
        total_details = int(stats_data.get('total_detail_tasks', 0))
        success_details = int(stats_data.get('success_detail_tasks', 0))
        failed_details = int(stats_data.get('failed_detail_tasks', 0))
        last_activity_time = int(stats_data.get('last_activity_time', 0))
        
        # 获取当前队列中的任务数
        stats = redis_manager.get_stats()
        pending_list_tasks = stats.get('list_tasks', 0)
        pending_detail_tasks = stats.get('detail_tasks', 0)
        
        # 计算进度
        completed_and_failed = success_details + failed_details
        progress_percentage = (completed_and_failed / total_details * 100) if total_details > 0 else 0
        
        # 检查任务是否完成 - 必须同时满足：
        # 1. 有详情任务被创建（total_details > 0）
        # 2. 所有列表页任务已处理完成（pending_list_tasks == 0）
        # 3. 所有详情任务都已处理完成（completed_and_failed >= total_details）
        #    注意：不能用pending_detail_tasks==0判断，因为任务被取出后就从队列移除了，
        #         但可能还在处理中或重试中，此时不会计入success/failed统计
        # 4. 最后活动时间超过10秒（time_since_last_activity >= 10）
        current_time = int(time.time())
        time_since_last_activity = current_time - last_activity_time if last_activity_time > 0 else 0
        is_completed = (total_details > 0 and 
                       pending_list_tasks == 0 and 
                       completed_and_failed >= total_details and 
                       time_since_last_activity >= 10)
        
        # 更新进度
        task_status['progress'].update({
            'total_pages': total_pages,
            'success_pages': success_pages,
            'total_details': total_details,
            'success_details': success_details,
            'failed_details': failed_details,
            'progress_percentage': progress_percentage,
            'is_completed': is_completed
        })
        
        # 如果任务完成，更新状态
        if is_completed and task_status['running']:
            task_status['running'] = False
            task_status['current_task'] = None
            logger.info("所有任务已完成")
        
        # 更新日志
        if task_status['running']:
            # 任务运行中：显示爬虫实时日志
            try:
                log_files = [
                    f'logs/{SPIDER_NAME}/main.log', 
                    f'logs/{SPIDER_NAME}/list_scraper.log', 
                    f'logs/{SPIDER_NAME}/detail_scraper.log'
                ]
                recent_logs = []
                
                for log_file in log_files:
                    if os.path.exists(log_file):
                        with open(log_file, 'r', encoding='utf-8') as f:
                            lines = f.readlines()
                            recent_logs.extend(lines[-10:])
                
                if recent_logs:
                    processed_logs = []
                    for line in recent_logs[-20:]:
                        if line.strip():
                            parts = line.strip().split(' - ', 2)
                            if len(parts) >= 3:
                                time_str = parts[0]
                                level = parts[1]
                                message = parts[2]
                                
                                if level in ['INFO', 'WARNING', 'ERROR']:
                                    processed_logs.append({
                                        'time': time_str,
                                        'message': f"[{level}] {message}"
                                    })
                    
                    task_status['logs'] = processed_logs[-20:]
            except Exception as e:
                logger.error(f"读取爬虫日志失败: {e}")
        else:
            # 任务结束：显示统计状态信息
            try:
                current_time = datetime.now().strftime('%H:%M:%S')
                status_logs = []
                
                if is_completed:
                    status_logs.append({
                        'time': current_time,
                        'message': "✅ 所有任务已完成，可以导出数据"
                    })
                    
                    status_logs.append({
                        'time': current_time,
                        'message': f"📊 最终统计 - 总页数: {total_pages}, 成功页数: {success_pages}"
                    })
                    
                    status_logs.append({
                        'time': current_time,
                        'message': f"📊 详情统计 - 总详情: {total_details}, 成功: {success_details}, 失败: {failed_details}"
                    })
                    
                    if total_details > 0:
                        success_rate = (success_details / total_details * 100)
                        status_logs.append({
                            'time': current_time,
                            'message': f"📈 成功率: {success_rate:.1f}%"
                        })
                else:
                    status_logs.append({
                        'time': current_time,
                        'message': "⏹️ 任务已停止"
                    })
                    
                    if total_details > 0:
                        status_logs.append({
                            'time': current_time,
                            'message': f"📊 当前进度 - 成功: {success_details}/{total_details} ({progress_percentage:.1f}%)"
                        })
                
                task_status['logs'] = status_logs
            except Exception as e:
                logger.error(f"生成状态日志失败: {e}")
            
    except Exception as e:
        logger.error(f"更新进度失败: {e}")


if __name__ == '__main__':
    logger.info("启动Houzz Web服务...")
    init_spider()
    app.run(host='0.0.0.0', port=8899, debug=True)
