from flask import Flask, jsonify, request
from flask_cors import CORS
import pymysql
from datetime import datetime
import json
import os
from spider_manager import spider_manager

app = Flask(__name__)
CORS(app)

# 数据库配置
db_config = {
    'host': 'localhost',
    'user': 'root',
    'password': '123456',
    'database': 'dangdang_db',
    'charset': 'utf8mb4'
}

def get_conn():
    return pymysql.connect(**db_config)

# 数据总览接口
@app.route('/api/dashboard/overview')
def dashboard_overview():
    try:
        conn = get_conn()
        cursor = conn.cursor()
        
        # 总商品数
        cursor.execute("SELECT COUNT(*) FROM products")
        total_products = cursor.fetchone()[0]
        
        # 分类数量
        cursor.execute("SELECT COUNT(DISTINCT category3) FROM products")
        total_categories = cursor.fetchone()[0]
        
        # 平均价格
        cursor.execute("SELECT AVG(price) FROM products WHERE price > 0")
        avg_price = float(cursor.fetchone()[0] or 0)
        
        # 最后更新时间
        cursor.execute("SELECT MAX(updated_at) FROM products")
        last_update = cursor.fetchone()[0]
        
        conn.close()
        
        return jsonify({
            "totalProducts": total_products,
            "totalCategories": total_categories,
            "avgPrice": round(avg_price, 2),
            "lastUpdate": str(last_update) if last_update else "暂无数据"
        })
    except Exception as e:
        return jsonify({"error": str(e)}), 500

@app.route('/api/dashboard/price-distribution')
def price_distribution():
    try:
        conn = get_conn()
        cursor = conn.cursor()
        
        cursor.execute("SELECT COUNT(*) FROM products WHERE price < 50")
        low = cursor.fetchone()[0]
        
        cursor.execute("SELECT COUNT(*) FROM products WHERE price >= 50 AND price < 100")
        mid = cursor.fetchone()[0]
        
        cursor.execute("SELECT COUNT(*) FROM products WHERE price >= 100")
        high = cursor.fetchone()[0]
        
        conn.close()
        
        return jsonify([
            {"value": low, "name": "0-50元"},
            {"value": mid, "name": "50-100元"},
            {"value": high, "name": "100元以上"}
        ])
    except Exception as e:
        return jsonify({"error": str(e)}), 500

@app.route('/api/dashboard/category-distribution')
def category_distribution():
    try:
        conn = get_conn()
        cursor = conn.cursor()
        
        cursor.execute("SELECT category3, COUNT(*) FROM products WHERE category3 IS NOT NULL AND category3 != '' GROUP BY category3 ORDER BY COUNT(*) DESC LIMIT 10")
        data = cursor.fetchall()
        
        conn.close()
        
        categories = [row[0] for row in data]
        counts = [row[1] for row in data]
        
        return jsonify({"categories": categories, "counts": counts})
    except Exception as e:
        return jsonify({"error": str(e)}), 500



# 分类分析接口
@app.route('/api/category/list')
def category_list():
    try:
        conn = get_conn()
        cursor = conn.cursor()
        
        cursor.execute("SELECT DISTINCT category3 FROM products WHERE category3 IS NOT NULL AND category3 != ''")
        data = cursor.fetchall()
        
        conn.close()
        
        return jsonify([{"label": row[0], "value": row[0]} for row in data])
    except Exception as e:
        return jsonify({"error": str(e)}), 500

@app.route('/api/category/analysis')
def category_analysis():
    try:
        category = request.args.get('category', '')
        conn = get_conn()
        cursor = conn.cursor()
        
        if category:
            cursor.execute("""
                SELECT category3, COUNT(*) as count, AVG(price) as avg_price, 
                       MIN(price) as min_price, MAX(price) as max_price, 
                       SUM(comments) as total_comments
                FROM products 
                WHERE category3 = %s
                GROUP BY category3
            """, (category,))
        else:
            cursor.execute("""
                SELECT category3, COUNT(*) as count, AVG(price) as avg_price, 
                       MIN(price) as min_price, MAX(price) as max_price, 
                       SUM(comments) as total_comments
                FROM products 
                WHERE category3 IS NOT NULL AND category3 != ''
                GROUP BY category3
                ORDER BY count DESC
                LIMIT 20
            """)
        
        data = cursor.fetchall()
        
        details = []
        chart_data = {"categories": [], "counts": [], "prices": []}
        
        for row in data:
            details.append({
                "category": row[0],
                "productCount": row[1],
                "avgPrice": round(float(row[2] or 0), 2),
                "minPrice": round(float(row[3] or 0), 2),
                "maxPrice": round(float(row[4] or 0), 2),
                "commentCount": row[5] or 0
            })
            
            chart_data["categories"].append(row[0])
            chart_data["counts"].append(row[1])
            chart_data["prices"].append(round(float(row[2] or 0), 2))
        
        conn.close()
        
        return jsonify({
            "details": details,
            "chartData": chart_data
        })
    except Exception as e:
        return jsonify({"error": str(e)}), 500

@app.route('/api/category/detail/<category>')
def category_detail(category):
    try:
        conn = get_conn()
        cursor = conn.cursor()
        
        # 分类统计
        cursor.execute("""
            SELECT COUNT(*) as count, AVG(price) as avg_price
            FROM products 
            WHERE category3 = %s
        """, (category,))
        stats = cursor.fetchone()
        
        # 分类下的商品
        cursor.execute("""
            SELECT name, price, author, publisher, comments, description
            FROM products 
            WHERE category3 = %s
            LIMIT 20
        """, (category,))
        products = cursor.fetchall()
        
        conn.close()
        
        return jsonify({
            "category": category,
            "productCount": stats[0],
            "avgPrice": round(float(stats[1] or 0), 2),
            "products": [
                {
                    "title": row[0],
                    "price": row[1],
                    "author": row[2],
                    "publisher": row[3],
                    "commentCount": row[4],
                    "description": row[5]
                } for row in products
            ]
        })
    except Exception as e:
        return jsonify({"error": str(e)}), 500

# 价格分析接口
@app.route('/api/price/analysis')
def price_analysis():
    try:
        min_price = request.args.get('minPrice', type=float)
        max_price = request.args.get('maxPrice', type=float)
        category = request.args.get('category', '')
        
        conn = get_conn()
        cursor = conn.cursor()
        
        # 构建查询条件
        where_conditions = ["price > 0"]
        params = []
        
        if min_price is not None:
            where_conditions.append("price >= %s")
            params.append(min_price)
        
        if max_price is not None:
            where_conditions.append("price <= %s")
            params.append(max_price)
        
        if category:
            where_conditions.append("category3 = %s")
            params.append(category)
        
        where_clause = " AND ".join(where_conditions)
        
        # 价格统计
        cursor.execute(f"""
            SELECT AVG(price), MIN(price), MAX(price), STD(price)
            FROM products 
            WHERE {where_clause}
        """, params)
        stats = cursor.fetchone()
        
        # 价格分布直方图
        cursor.execute(f"""
            SELECT 
                CASE 
                    WHEN price < 20 THEN '0-20'
                    WHEN price < 40 THEN '20-40'
                    WHEN price < 60 THEN '40-60'
                    WHEN price < 80 THEN '60-80'
                    WHEN price < 100 THEN '80-100'
                    ELSE '100+'
                END as price_range,
                COUNT(*) as count
            FROM products 
            WHERE {where_clause}
            GROUP BY price_range
            ORDER BY MIN(price)
        """, params)
        histogram = cursor.fetchall()
        
        # 价格区间分布
        cursor.execute(f"""
            SELECT 
                CASE 
                    WHEN price < 50 THEN '0-50元'
                    WHEN price < 100 THEN '50-100元'
                    WHEN price < 200 THEN '100-200元'
                    ELSE '200元以上'
                END as price_range,
                COUNT(*) as count
            FROM products 
            WHERE {where_clause}
            GROUP BY price_range
        """, params)
        range_dist = cursor.fetchall()
        
        # 价格异常检测
        cursor.execute(f"""
            SELECT name, price, category1,
                   ABS(price - (SELECT AVG(price) FROM products WHERE {where_clause})) / 
                   (SELECT STD(price) FROM products WHERE {where_clause}) as z_score
            FROM products 
            WHERE {where_clause}
            HAVING z_score > 2
            ORDER BY z_score DESC
            LIMIT 10
        """, params)
        anomalies = cursor.fetchall()
        
        conn.close()
        
        return jsonify({
            "stats": {
                "avgPrice": round(float(stats[0] or 0), 2),
                "minPrice": round(float(stats[1] or 0), 2),
                "maxPrice": round(float(stats[2] or 0), 2),
                "stdPrice": round(float(stats[3] or 0), 2)
            },
            "histogram": {
                "bins": [row[0] for row in histogram],
                "counts": [row[1] for row in histogram]
            },
            "rangeDistribution": [
                {"value": row[1], "name": row[0]} for row in range_dist
            ],
            "anomalies": [
                {
                    "title": row[0],
                    "price": row[1],
                    "category": row[2],
                    "anomalyType": "high" if row[3] > 0 else "low",
                    "deviation": round(abs(row[3]) * 100, 1)
                } for row in anomalies
            ],
            "trend": {
                "dates": ["2024-01", "2024-02", "2024-03", "2024-04", "2024-05"],
                "prices": [45.2, 46.8, 44.5, 47.1, 45.9]
            }
        })
    except Exception as e:
        return jsonify({"error": str(e)}), 500

# 数据查询接口
@app.route('/api/query/search')
def query_search():
    try:
        title = request.args.get('title', '')
        category = request.args.get('category', '')
        min_price = request.args.get('minPrice', type=float)
        max_price = request.args.get('maxPrice', type=float)
        author = request.args.get('author', '')
        publisher = request.args.get('publisher', '')
        min_comments = request.args.get('minComments', type=int)
        page = request.args.get('page', 1, type=int)
        page_size = request.args.get('pageSize', 20, type=int)
        
        conn = get_conn()
        cursor = conn.cursor()
        
        # 构建查询条件
        where_conditions = ["1=1"]
        params = []
        
        if title:
            where_conditions.append("name LIKE %s")
            params.append(f"%{title}%")
        
        if category:
            where_conditions.append("category3 = %s")
            params.append(category)
        
        if min_price is not None:
            where_conditions.append("price >= %s")
            params.append(min_price)
        
        if max_price is not None:
            where_conditions.append("price <= %s")
            params.append(max_price)
        
        if author:
            where_conditions.append("author LIKE %s")
            params.append(f"%{author}%")
        
        if publisher:
            where_conditions.append("publisher LIKE %s")
            params.append(f"%{publisher}%")
        
        if min_comments is not None:
            where_conditions.append("comments >= %s")
            params.append(min_comments)
        
        where_clause = " AND ".join(where_conditions)
        
        # 总数
        cursor.execute(f"SELECT COUNT(*) FROM products WHERE {where_clause}", params)
        total = cursor.fetchone()[0]
        
        # 分页数据
        offset = (page - 1) * page_size
        cursor.execute(f"""
            SELECT name, price, author, publisher, category1, comments, 
                   publish_date, description, image_url
            FROM products 
            WHERE {where_clause}
            ORDER BY id DESC
            LIMIT %s OFFSET %s
        """, params + [page_size, offset])
        
        products = cursor.fetchall()
        
        # 统计信息
        cursor.execute(f"""
            SELECT AVG(price), MIN(price), MAX(price), COUNT(DISTINCT category3)
            FROM products 
            WHERE {where_clause}
        """, params)
        stats = cursor.fetchone()
        
        conn.close()
        
        return jsonify({
            "products": [
                {
                    "id": i + 1,
                    "title": row[0],
                    "price": row[1],
                    "author": row[2],
                    "publisher": row[3],
                    "category": row[4],
                    "commentCount": row[5],
                    "publishDate": str(row[6]) if row[6] else "",
                    "description": row[7],
                    "image": row[8]
                } for i, row in enumerate(products)
            ],
            "total": total,
            "stats": {
                "avgPrice": round(float(stats[0] or 0), 2),
                "minPrice": round(float(stats[1] or 0), 2),
                "maxPrice": round(float(stats[2] or 0), 2),
                "categoryCount": stats[3]
            }
        })
    except Exception as e:
        return jsonify({"error": str(e)}), 500

# 系统监控接口
@app.route('/api/monitor/status')
def monitor_status():
    try:
        # 读取爬虫统计信息
        stats_file = os.path.join(os.path.dirname(__file__), '..', 'logs', 'spider_stats.json')
        crawler_running = False
        success_count = 0
        error_count = 0
        start_time = ""
        end_time = ""
        
        if os.path.exists(stats_file):
            try:
                with open(stats_file, 'r', encoding='utf-8') as f:
                    stats = json.load(f)
                
                success_count = stats.get('success_count', 0)
                error_count = stats.get('error_count', 0)
                start_time = stats.get('start_time', '')
                end_time = stats.get('end_time', '')
                
                # 如果结束时间存在，说明爬虫已经完成
                if end_time:
                    crawler_running = False
                else:
                    crawler_running = True
                    
            except Exception as e:
                print(f"读取统计信息失败: {e}")
        
        # 统计今日抓取数据
        conn = get_conn()
        cursor = conn.cursor()
        cursor.execute("SELECT COUNT(*) FROM products WHERE DATE(created_at) = CURDATE()")
        today_scraped = cursor.fetchone()[0]
        
        # 统计数据库中的错误数量
        cursor.execute("SELECT COUNT(*) FROM products WHERE name = '' OR name IS NULL OR price = 0 OR price IS NULL OR author = '' OR author IS NULL")
        db_error_count = cursor.fetchone()[0]
        
        conn.close()
        
        # 计算运行时长
        uptime = "0天0小时0分钟"
        if start_time:
            try:
                start_dt = datetime.fromisoformat(start_time.replace('Z', '+00:00'))
                end_dt = datetime.fromisoformat(end_time.replace('Z', '+00:00')) if end_time else datetime.now()
                duration = end_dt - start_dt
                days = duration.days
                hours = duration.seconds // 3600
                minutes = (duration.seconds % 3600) // 60
                uptime = f"{days}天{hours}小时{minutes}分钟"
            except:
                uptime = "未知"
        
        return jsonify({
            "crawlerRunning": crawler_running,
            "successCount": success_count,
            "errorCount": error_count + db_error_count,
            "todayScraped": today_scraped,
            "uptime": uptime,
            "totalProducts": success_count + error_count,
            "startTime": start_time,
            "endTime": end_time
        })
    except Exception as e:
        return jsonify({
            "crawlerRunning": False,
            "error": str(e)
        }), 500

@app.route('/api/monitor/tasks')
def monitor_tasks():
    try:
        # 获取爬虫状态
        spider_status = spider_manager.get_all_spiders_status()
        tasks = []
        
        for spider_name, status in spider_status.items():
            # 计算抓取进度
            conn = get_conn()
            cursor = conn.cursor()
            cursor.execute("SELECT COUNT(*) FROM products WHERE category3 = %s", (status.get('category', ''),))
            items_scraped = cursor.fetchone()[0]
            
            # 计算错误数量
            cursor.execute("SELECT COUNT(*) FROM products WHERE category3 = %s AND (name = '' OR price = 0 OR author = '')", (status.get('category', ''),))
            error_count = cursor.fetchone()[0]
            conn.close()
            
            # 计算进度（假设每个分类最多抓取200个商品）
            progress = min(100, int((items_scraped / 200) * 100)) if items_scraped > 0 else 0
            
            tasks.append({
                "id": len(tasks) + 1,
                "name": spider_name,
                "status": status.get('status', 'unknown'),
                "startTime": status.get('start_time', ''),
                "endTime": status.get('end_time'),
                "itemsScraped": items_scraped,
                "errorCount": error_count,
                "progress": progress,
                "category": status.get('category', ''),
                "pid": status.get('pid')
            })
        
        # 如果没有运行中的爬虫，显示一些示例任务
        if not tasks:
            tasks = [
                {
                    "id": 1,
                    "name": "dangdang_products",
                    "status": "stopped",
                    "startTime": "2024-01-01 10:00:00",
                    "endTime": "2024-01-01 12:00:00",
                    "itemsScraped": 0,
                    "errorCount": 0,
                    "progress": 0,
                    "category": "",
                    "pid": None
                }
            ]
        
        return jsonify(tasks)
        
    except Exception as e:
        return jsonify([{
            "id": 1,
            "name": "error",
            "status": "error",
            "startTime": "",
            "endTime": "",
            "itemsScraped": 0,
            "errorCount": 1,
            "progress": 0,
            "category": "",
            "pid": None,
            "error": str(e)
        }]), 500

@app.route('/api/monitor/tasks/<int:task_id>')
def monitor_task_detail(task_id):
    return jsonify({
        "id": task_id,
        "name": "dangdang_spider",
        "status": "running",
        "startTime": "2024-01-01 10:00:00",
        "endTime": None,
        "itemsScraped": 1000,
        "errorCount": 5,
        "progress": 75,
        "logs": [
            {"timestamp": "2024-01-01 10:00:00", "level": "info", "message": "爬虫启动成功"},
            {"timestamp": "2024-01-01 10:01:00", "level": "info", "message": "开始抓取分类：文学"},
            {"timestamp": "2024-01-01 10:02:00", "level": "warning", "message": "页面加载超时"}
        ]
    })

@app.route('/api/monitor/tasks/<int:task_id>/stop', methods=['POST'])
def monitor_task_stop(task_id):
    return jsonify({
        "success": True,
        "message": f"任务 {task_id} 停止成功"
    })

@app.route('/api/monitor/logs', methods=['DELETE'])
def monitor_logs_delete():
    return jsonify({
        "success": True,
        "message": "日志清理成功"
    })

@app.route('/api/monitor/logs')
def monitor_logs():
    page = request.args.get('page', 1, type=int)
    page_size = request.args.get('pageSize', 100, type=int)
    level = request.args.get('level', '')
    
    try:
        all_logs = []
        
        # 读取爬虫错误日志文件
        error_log_file = os.path.join(os.path.dirname(__file__), '..', 'logs', 'spider_errors.json')
        if os.path.exists(error_log_file):
            try:
                with open(error_log_file, 'r', encoding='utf-8') as f:
                    error_logs = json.load(f)
                
                for i, log in enumerate(error_logs):
                    all_logs.append({
                        "id": len(all_logs) + 1,
                        "timestamp": log.get('timestamp', ''),
                        "level": log.get('type', 'error'),
                        "message": log.get('message', ''),
                        "spider": "爬虫",
                        "details": log.get('details', {})
                    })
            except Exception as e:
                print(f"读取错误日志失败: {e}")
        
        # 读取爬虫统计信息
        stats_file = os.path.join(os.path.dirname(__file__), '..', 'logs', 'spider_stats.json')
        if os.path.exists(stats_file):
            try:
                with open(stats_file, 'r', encoding='utf-8') as f:
                    stats = json.load(f)
                
                # 添加统计信息作为日志
                all_logs.append({
                    "id": len(all_logs) + 1,
                    "timestamp": stats.get('end_time', ''),
                    "level": "info",
                    "message": f"爬虫统计: 成功 {stats.get('success_count', 0)} 个，失败 {stats.get('error_count', 0)} 个，总计 {stats.get('total_count', 0)} 个",
                    "spider": "统计",
                    "details": stats
                })
            except Exception as e:
                print(f"读取统计信息失败: {e}")
        
        # 如果没有日志文件，检查数据库中的数据质量问题
        if not all_logs:
            conn = get_conn()
            cursor = conn.cursor()
            
            # 检查数据质量问题
            cursor.execute("SELECT COUNT(*) FROM products WHERE name = '' OR name IS NULL")
            empty_name_count = cursor.fetchone()[0]
            
            cursor.execute("SELECT COUNT(*) FROM products WHERE price = 0 OR price IS NULL")
            empty_price_count = cursor.fetchone()[0]
            
            cursor.execute("SELECT COUNT(*) FROM products WHERE author = '' OR author IS NULL")
            empty_author_count = cursor.fetchone()[0]
            
            cursor.execute("SELECT COUNT(*) FROM products WHERE publisher = '' OR publisher IS NULL")
            empty_publisher_count = cursor.fetchone()[0]
            
            conn.close()
            
            # 生成数据质量警告
            if empty_name_count > 0:
                all_logs.append({
                    "id": 1,
                    "timestamp": datetime.now().isoformat(),
                    "level": "warning",
                    "message": f"发现 {empty_name_count} 个商品名称缺失",
                    "spider": "数据质量检查"
                })
            
            if empty_price_count > 0:
                all_logs.append({
                    "id": 2,
                    "timestamp": datetime.now().isoformat(),
                    "level": "warning",
                    "message": f"发现 {empty_price_count} 个商品价格缺失",
                    "spider": "数据质量检查"
                })
            
            if empty_author_count > 0:
                all_logs.append({
                    "id": 3,
                    "timestamp": datetime.now().isoformat(),
                    "level": "warning",
                    "message": f"发现 {empty_author_count} 个商品作者信息缺失",
                    "spider": "数据质量检查"
                })
            
            if empty_publisher_count > 0:
                all_logs.append({
                    "id": 4,
                    "timestamp": datetime.now().isoformat(),
                    "level": "warning",
                    "message": f"发现 {empty_publisher_count} 个商品出版社信息缺失",
                    "spider": "数据质量检查"
                })
        
        # 按时间倒序排序
        all_logs.sort(key=lambda x: x['timestamp'], reverse=True)
        
        # 过滤日志级别
        if level and level != 'all':
            all_logs = [log for log in all_logs if log['level'] == level]
        
        # 分页
        start_idx = (page - 1) * page_size
        end_idx = start_idx + page_size
        paginated_logs = all_logs[start_idx:end_idx]
        
        return jsonify({
            "logs": paginated_logs,
            "total": len(all_logs),
            "page": page,
            "pageSize": page_size
        })
        
    except Exception as e:
        return jsonify({
            "logs": [{
                "id": 1,
                "timestamp": datetime.now().isoformat(),
                "level": "error",
                "message": f"获取日志失败: {str(e)}",
                "spider": "系统"
            }],
            "total": 1
        }), 500



@app.route('/api/monitor/performance')
def monitor_performance():
    return jsonify({
        "cpu": {
            "timestamps": ["12:00", "12:05", "12:10", "12:15", "12:20"],
            "values": [25, 30, 45, 35, 28]
        },
        "memory": {
            "timestamps": ["12:00", "12:05", "12:10", "12:15", "12:20"],
            "values": [60, 65, 70, 68, 62]
        }
    })

# 爬虫控制接口
@app.route('/api/spider/start/<name>', methods=['POST'])
def spider_start(name):
    try:
        # 获取请求参数
        data = request.get_json() or {}
        category = data.get('category')
        
        # 使用爬虫管理器启动爬虫
        result = spider_manager.start_spider(name, category)
        return jsonify(result)
    except Exception as e:
        return jsonify({
            "success": False,
            "message": f"爬虫启动失败: {str(e)}"
        }), 500

@app.route('/api/spider/stop/<name>', methods=['POST'])
def spider_stop(name):
    try:
        # 使用爬虫管理器停止爬虫
        result = spider_manager.stop_spider(name)
        return jsonify(result)
    except Exception as e:
        return jsonify({
            "success": False,
            "message": f"爬虫停止失败: {str(e)}"
        }), 500

@app.route('/api/spider/status/<name>', methods=['GET'])
def spider_status(name):
    try:
        # 使用爬虫管理器获取爬虫状态
        result = spider_manager.get_spider_status(name)
        return jsonify(result)
    except Exception as e:
        return jsonify({
            "success": False,
            "message": f"获取爬虫状态失败: {str(e)}"
        }), 500

@app.route('/api/spider/list', methods=['GET'])
def spider_list():
    try:
        # 获取所有爬虫状态
        result = spider_manager.get_all_spiders_status()
        return jsonify(result)
    except Exception as e:
        return jsonify({
            "success": False,
            "message": f"获取爬虫列表失败: {str(e)}"
        }), 500

# 用户管理接口
@app.route('/api/users')
def users_list():
    return jsonify([
        {
            "username": "admin",
            "email": "admin@example.com",
            "role": "admin",
            "status": "active",
            "lastLogin": "2024-01-01 10:00:00",
            "createdAt": "2024-01-01 00:00:00"
        }
    ])

@app.route('/api/roles')
def roles_list():
    return jsonify([
        {
            "name": "admin",
            "description": "系统管理员",
            "permissions": ["dashboard", "category", "price", "query", "monitor", "user", "spider", "export"]
        },
        {
            "name": "manager",
            "description": "数据经理",
            "permissions": ["dashboard", "category", "price", "query", "monitor"]
        },
        {
            "name": "user",
            "description": "普通用户",
            "permissions": ["dashboard", "query"]
        }
    ])

@app.route('/api/users/login-logs')
def login_logs():
    return jsonify([
        {
            "username": "admin",
            "loginTime": "2024-01-01 10:00:00",
            "ipAddress": "192.168.1.100",
            "userAgent": "Mozilla/5.0...",
            "status": "success"
        }
    ])

# 测试接口
# @app.route('/api/test')
# def test_connection():
#     try:
#         conn = get_conn()
#         cursor = conn.cursor()
#
#         # 测试基本连接
#         cursor.execute("SELECT 1")
#         result = cursor.fetchone()
#
#         # 检查表是否存在
#         cursor.execute("SHOW TABLES LIKE 'products'")
#         table_exists = cursor.fetchone()
#
#         # 如果表存在，检查字段
#         fields = []
#         if table_exists:
#             cursor.execute("DESCRIBE products")
#             fields = [row[0] for row in cursor.fetchall()]
#
#         # 检查数据量
#         count = 0
#         if table_exists:
#             cursor.execute("SELECT COUNT(*) FROM products")
#             count = cursor.fetchone()[0]
#
#         conn.close()
#
#         return jsonify({
#             "database_connected": True,
#             "table_exists": bool(table_exists),
#             "table_fields": fields,
#             "data_count": count,
#             "message": "数据库连接测试成功"
#         })
#     except Exception as e:
#         return jsonify({
#             "database_connected": False,
#             "error": str(e),
#             "message": "数据库连接失败"
#         }), 500

if __name__ == '__main__':
    print("=" * 50)
    print("当当网商品数据分析系统 - 后端API服务")
    print("=" * 50)
    print("服务地址: http://localhost:5000")
    print("测试接口: http://localhost:5000/api/test")
    print("=" * 50)
    app.run(host='0.0.0.0', port=5000, debug=True) 