import time
from anjuke_spider_enhanced import EnhancedAnjukeRentSpider
from config_enhanced import *
import sys
import os


def clear_screen():
    """清屏"""
    os.system('cls' if os.name == 'nt' else 'clear')


def print_banner():
    """打印横幅"""
    banner = """
    ╔══════════════════════════════════════════════╗
    ║            安居客租房信息爬虫工具 v2.0          ║
    ║             Enhanced Anjuke Spider          ║
    ╚══════════════════════════════════════════════╝
    """
    print(banner)


def get_user_choice(options, title, allow_custom=False):
    """获取用户选择"""
    print(f"\n{title}:")
    for i, (key, value) in enumerate(options.items(), 1):
        print(f"  {i}. {key}")

    if allow_custom:
        print(f"  {len(options) + 1}. 自定义")

    while True:
        try:
            choice = input(f"请选择 (1-{len(options)}): ").strip()
            if not choice:
                return None

            if allow_custom and choice == str(len(options) + 1):
                return "custom"

            choice_num = int(choice)
            if 1 <= choice_num <= len(options):
                return list(options.keys())[choice_num - 1]
            else:
                print(f"请输入 1-{len(options)} 之间的数字")
        except ValueError:
            print("请输入有效的数字")


def test_mysql_connection():
    """测试MySQL连接"""
    try:
        spider = EnhancedAnjukeRentSpider(config=ENHANCED_CONFIG)
        if spider.db_manager.connection:
            # 测试查询
            result = spider.db_manager.execute_query("SELECT COUNT(*) as count FROM house_info", fetch=True)
            if result:
                print(f"✓ MySQL连接成功，当前数据库中有 {result[0][0]} 条房源记录")
            else:
                print("✓ MySQL连接成功")
            spider.close()
            return True
        else:
            print("✗ MySQL连接失败")
            return False
    except Exception as e:
        print(f"✗ MySQL连接异常: {e}")
        return False


def main():
    """主函数 - 增强版用户交互界面"""
    clear_screen()
    print_banner()

    # 测试MySQL连接
    print("正在测试MySQL连接...")
    mysql_connected = test_mysql_connection()

    if not mysql_connected:
        print("\n⚠ 警告: MySQL连接失败，数据将无法保存到数据库")
        print("是否继续? (y/N): ")
        continue_choice = input().strip().lower()
        if continue_choice != 'y':
            print("程序已退出")
            return

    print("=" * 60)

    spider = EnhancedAnjukeRentSpider(
        use_proxy=ENHANCED_CONFIG['use_proxy'],
        config=ENHANCED_CONFIG
    )

    print("支持的城市:", ", ".join(SUPPORTED_CITIES))
    print("=" * 60)

    print("存储配置:")
    storage_config = ENHANCED_CONFIG.get('storage_options', {})
    mysql_status = "✓ 启用" if storage_config.get('save_to_mysql', True) and mysql_connected else "✗ 禁用"
    excel_status = "✓ 启用" if storage_config.get('save_to_excel', False) else "✗ 禁用"
    csv_status = "✓ 启用" if storage_config.get('save_to_csv', False) else "✗ 禁用"

    print(f"  MySQL存储: {mysql_status}")
    print(f"  Excel存储: {excel_status}")
    print(f"  CSV存储: {csv_status}")
    print("=" * 60)

    try:
        # 城市选择
        city_choice = get_user_choice(
            {city: city for city in SUPPORTED_CITIES},
            "选择城市"
        )
        if not city_choice:
            city = input("请输入城市名称: ").strip()
        else:
            city = city_choice

        # 区域选择
        city_info = spider.get_city_info(city)
        if city_info and city_info['districts']:
            district_options = {'全部': None}
            district_options.update(city_info['districts'])
            district_choice = get_user_choice(district_options, "选择区域")
            district = district_choice if district_choice != '全部' else None
        else:
            district = input("请输入区域名称 (留空则搜索全市): ").strip() or None

        # 价格范围
        price_choice = get_user_choice(PRICE_RANGES, "选择价格范围", allow_custom=True)
        if price_choice == "custom":
            min_price = input("请输入最低价格 (元/月): ").strip() or None
            max_price = input("请输入最高价格 (元/月): ").strip() or None
        elif price_choice:
            min_price, max_price = PRICE_RANGES[price_choice]
            if not min_price and not max_price:
                min_price = input("请输入最低价格 (元/月): ").strip() or None
                max_price = input("请输入最高价格 (元/月): ").strip() or None
        else:
            min_price = max_price = None

        # 房间类型
        room_choice = get_user_choice(ROOM_TYPES, "选择房间类型")
        room_type = room_choice if room_choice != '不限' else None

        # 房源来源
        source_choice = get_user_choice(SOURCE_TYPES, "选择房源来源")
        source_type = source_choice if source_choice != '全部' else None

        # 爬取设置
        try:
            max_pages = int(input(f"爬取页数 (默认{ENHANCED_CONFIG['max_pages']}): ") or ENHANCED_CONFIG['max_pages'])
        except:
            max_pages = ENHANCED_CONFIG['max_pages']

        get_details = input("获取详细房源信息? (y/N): ").strip().lower() == 'y'

        # 爬取模式选择
        if get_details:
            print("\n获取详情信息会显著增加爬取时间")
            print("建议选择快速模式以提高效率")
            mode_choice = get_user_choice(
                {'普通模式': 'normal', '快速模式': 'fast'},
                "选择爬取模式"
            )
            use_fast_mode = (mode_choice == '快速模式')
        else:
            use_fast_mode = True  # 不获取详情时默认使用快速模式

        remove_dup = input("去除重复房源? (Y/n): ").strip().lower() != 'n'

        # 存储选项配置
        print("\n存储选项配置:")
        save_to_mysql = input("保存到MySQL数据库? (Y/n): ").strip().lower() != 'n'
        save_to_excel = input("同时保存到Excel文件? (y/N): ").strip().lower() == 'y'
        save_to_csv = input("同时保存到CSV文件? (y/N): ").strip().lower() == 'y'
        save_analysis = input("生成分析报告? (Y/n): ").strip().lower() != 'n'

        # 更新配置
        spider.storage_options['save_to_mysql'] = save_to_mysql and mysql_connected
        spider.storage_options['save_to_excel'] = save_to_excel
        spider.storage_options['save_to_csv'] = save_to_csv
        spider.storage_options['save_analysis_report'] = save_analysis

        print("\n" + "=" * 60)
        print("爬取配置确认:")
        print(f"  城市: {city}")
        print(f"  区域: {district or '全市'}")
        print(f"  价格: {min_price or '0'} - {max_price or '不限'}")
        print(f"  户型: {room_type or '不限'}")
        print(f"  来源: {source_type or '全部'}")
        print(f"  页数: {max_pages}")
        print(f"  详情: {'是' if get_details else '否'}")
        print(f"  模式: {'快速' if use_fast_mode else '普通'}")
        print(f"  去重: {'是' if remove_dup else '否'}")
        print(f"  MySQL存储: {'是' if save_to_mysql and mysql_connected else '否'}")
        print(f"  Excel存储: {'是' if save_to_excel else '否'}")
        print(f"  CSV存储: {'是' if save_to_csv else '否'}")
        print(f"  分析报告: {'是' if save_analysis else '否'}")
        print("=" * 60)

        if get_details and not use_fast_mode:
            print("⚠ 注意: 普通模式获取详情可能需要较长时间")
        if save_to_mysql and not mysql_connected:
            print("⚠ 警告: MySQL连接失败，数据将无法保存到数据库")
        print("=" * 60)

        confirm = input("\n确认开始爬取? (Y/n): ").strip().lower()
        if confirm and confirm != 'y':
            print("爬取已取消")
            return

        print("\n开始爬取...")
        start_time = time.time()

        # 执行爬取
        if use_fast_mode:
            print("使用快速爬取模式...")
            houses = spider.crawl_fast(
                city=city,
                district=district,
                min_price=min_price,
                max_price=max_price,
                max_pages=max_pages,
                get_details=get_details
            )
        else:
            print("使用普通爬取模式...")
            houses = spider.crawl(
                city=city,
                district=district,
                min_price=min_price,
                max_price=max_price,
                room_type=room_type,
                source_type=source_type,
                max_pages=max_pages,
                get_details=get_details,
                delay=ENHANCED_CONFIG['page_delay']
            )

        total_time = time.time() - start_time

        if not houses:
            print("没有找到符合条件的房源信息")
            return

        print(f"\n爬取完成! 耗时: {total_time:.1f}秒")

        # 去重处理
        if remove_dup:
            original_count = len(houses)
            houses = spider.remove_duplicates(houses)
            print(f"去重后剩余 {len(houses)} 条数据 (去除 {original_count - len(houses)} 条重复)")

        # 保存数据到文件（如果启用）
        saved_files = []

        # Excel保存
        if save_to_excel and houses:
            timestamp = int(time.time())
            filename = f"{city}_{district or '全市'}_{timestamp}.xlsx"
            excel_file = spider.save_to_excel(houses, filename)
            if excel_file and os.path.exists(excel_file):
                saved_files.append(excel_file)
                print(f"✓ Excel文件已保存: {excel_file}")
            else:
                print("✗ Excel文件保存失败")

        # CSV保存
        if save_to_csv and houses:
            timestamp = int(time.time())
            filename = f"{city}_{district or '全市'}_{timestamp}.csv"
            csv_file = spider.save_to_csv(houses, filename)
            if csv_file and os.path.exists(csv_file):
                saved_files.append(csv_file)
                print(f"✓ CSV文件已保存: {csv_file}")
            else:
                print("✗ CSV文件保存失败")

        # 分析报告
        if save_analysis and houses:
            timestamp = int(time.time())
            filename = f"{city}_{district or '全市'}_{timestamp}_分析报告.txt"
            report_file = spider.save_analysis_report(houses, filename)
            if report_file and os.path.exists(report_file):
                saved_files.append(report_file)
                print(f"✓ 分析报告已保存: {report_file}")
            else:
                print("✗ 分析报告保存失败")

        # 显示统计信息
        analysis = spider.analyze_data(houses)
        if analysis.get('价格统计'):
            print("\n" + "=" * 60)
            print("数据统计:")
            print(f"  总房源数: {analysis['总房源数']}")
            stats = analysis['价格统计']
            print(f"  平均价格: {stats.get('平均价格', 'N/A')} 元/月")
            print(f"  最低价格: {stats.get('最低价格', 'N/A')} 元/月")
            print(f"  最高价格: {stats.get('最高价格', 'N/A')} 元/月")
            print(f"  价格区间: {stats.get('价格区间', 'N/A')}")

        # 显示MySQL保存状态
        if save_to_mysql and mysql_connected and spider.db_manager.connection:
            # 查询数据库中的记录数
            result = spider.db_manager.execute_query("SELECT COUNT(*) as count FROM house_info", fetch=True)
            if result:
                print(f"\n✓ 数据已保存到MySQL，当前数据库中共有 {result[0][0]} 条房源记录")
        elif save_to_mysql and not mysql_connected:
            print("\n⚠ 数据未保存到MySQL，因为数据库连接失败")

        # 显示保存的文件
        if saved_files:
            print(f"\n✓ 成功保存 {len(saved_files)} 个文件:")
            for file in saved_files:
                file_size = os.path.getsize(file) if os.path.exists(file) else 0
                file_time = time.ctime(os.path.getctime(file)) if os.path.exists(file) else '未知'
                print(f"  - {file} ({file_size} 字节)")

        print(f"\n总处理时间: {total_time:.1f}秒")
        if houses:
            print(f"处理速度: {len(houses) / total_time:.1f} 条/秒")

        spider.close()

    except KeyboardInterrupt:
        print("\n\n用户中断爬取")
        if 'spider' in locals():
            spider.close()
    except Exception as e:
        print(f"\n发生错误: {e}")
        import traceback
        traceback.print_exc()
        if 'spider' in locals():
            spider.close()


def batch_mode():
    """批量处理模式"""
    clear_screen()
    print_banner()

    print("批量处理模式")
    print("=" * 60)

    # 测试MySQL连接
    print("正在测试MySQL连接...")
    mysql_connected = test_mysql_connection()

    if not mysql_connected:
        print("\n⚠ 警告: MySQL连接失败，数据将无法保存到数据库")
        print("是否继续? (y/N): ")
        continue_choice = input().strip().lower()
        if continue_choice != 'y':
            print("程序已退出")
            return

    print("=" * 60)

    # 加载批量任务配置
    config_file = input("请输入批量任务配置文件路径 (默认: batch_config.json): ").strip() or "batch_config.json"

    if not os.path.exists(config_file):
        print(f"配置文件 {config_file} 不存在")
        print("请先创建批量任务配置文件")
        return

    try:
        import json
        with open(config_file, 'r', encoding='utf-8') as f:
            tasks = json.load(f)

        if not tasks:
            print("配置文件中没有任务")
            return

        print(f"找到 {len(tasks)} 个任务")
        print("=" * 60)

        # 显示任务列表
        for i, task in enumerate(tasks, 1):
            print(f"{i}. {task.get('city', '未知')}-{task.get('district', '全市')}")

        print("=" * 60)

        # 确认执行
        confirm = input("\n确认执行批量任务? (Y/n): ").strip().lower()
        if confirm and confirm != 'y':
            print("批量任务已取消")
            return

        # 创建爬虫实例
        spider = EnhancedAnjukeRentSpider(config=ENHANCED_CONFIG)

        # 设置存储选项
        spider.storage_options['save_to_mysql'] = mysql_connected
        spider.storage_options['save_to_excel'] = False
        spider.storage_options['save_to_csv'] = False
        spider.storage_options['save_analysis_report'] = False

        all_results = []
        total_start_time = time.time()

        for i, task in enumerate(tasks, 1):
            print(f"\n{'=' * 50}")
            print(f"处理任务 {i}/{len(tasks)}: {task.get('city', '未知')}-{task.get('district', '全市')}")
            print(f"{'=' * 50}")

            try:
                houses = spider.crawl(
                    city=task.get('city'),
                    district=task.get('district'),
                    min_price=task.get('min_price'),
                    max_price=task.get('max_price'),
                    room_type=task.get('room_type'),
                    source_type=task.get('source_type'),
                    max_pages=task.get('max_pages', 3),
                    get_details=task.get('get_details', False)
                )

                if houses:
                    # 添加到总结果
                    for house in houses:
                        house['任务来源'] = f"{task['city']}-{task.get('district', '全市')}"
                    all_results.extend(houses)

                    print(f"✓ 任务完成，获取 {len(houses)} 条房源信息")
                else:
                    print("✗ 任务完成，未获取到房源信息")

                # 任务间延迟
                if i < len(tasks):
                    delay = 10
                    print(f"等待 {delay} 秒后处理下一个任务...")
                    time.sleep(delay)

            except Exception as e:
                print(f"✗ 处理任务 {i} 时出错: {e}")
                continue

        total_time = time.time() - total_start_time

        print(f"\n批量处理完成! 总耗时: {total_time:.1f}秒")
        print(f"共处理 {len(tasks)} 个任务，获取 {len(all_results)} 条房源信息")

        # 显示MySQL保存状态
        if mysql_connected and spider.db_manager.connection:
            result = spider.db_manager.execute_query("SELECT COUNT(*) as count FROM house_info", fetch=True)
            if result:
                print(f"✓ 所有数据已保存到MySQL，当前数据库中共有 {result[0][0]} 条房源记录")

        spider.close()

    except Exception as e:
        print(f"批量处理出错: {e}")
        import traceback
        traceback.print_exc()


if __name__ == "__main__":
    clear_screen()
    print_banner()

    print("请选择运行模式:")
    print("1. 交互式爬取 (单次任务)")
    print("2. 批量处理模式")
    print("3. 测试MySQL连接")

    choice = input("\n请选择 (1-3): ").strip()

    if choice == "1":
        main()
    elif choice == "2":
        batch_mode()
    elif choice == "3":
        test_mysql_connection()
    else:
        print("无效选择，使用交互式爬取模式")
        main()