from flask import Blueprint, render_template, request, jsonify, flash, redirect, url_for, send_file
from flask_login import login_required
from app import db
from app.models.data import PlantingData, EnvironmentData
from sqlalchemy import func
import pandas as pd
from datetime import datetime
import io
from flask import current_app

bp = Blueprint('cleaning', __name__, url_prefix='/cleaning')

@bp.route('/planting-clean')
@login_required
def planting_clean():
    # 获取查询参数
    page = request.args.get('page', 1, type=int)
    search_crop = request.args.get('search_crop', '')
    search_region = request.args.get('search_region', '')
    date_from = request.args.get('date_from', '')
    date_to = request.args.get('date_to', '')
    
    # 获取当前日期
    now = datetime.now().date()
    
    # 构建异常数据查询
    anomaly_query = PlantingData.query.filter(
        (PlantingData.area <= 0) |
        (PlantingData.area > 10000) |
        (PlantingData.planting_date > now) |
        (PlantingData.crop_type == '') |
        (PlantingData.region == '')
    )
    
    # 添加搜索条件
    if search_crop:
        anomaly_query = anomaly_query.filter(PlantingData.crop_type.like(f'%{search_crop}%'))
    if search_region:
        anomaly_query = anomaly_query.filter(PlantingData.region.like(f'%{search_region}%'))
    if date_from:
        anomaly_query = anomaly_query.filter(PlantingData.planting_date >= datetime.strptime(date_from, '%Y-%m-%d').date())
    if date_to:
        anomaly_query = anomaly_query.filter(PlantingData.planting_date <= datetime.strptime(date_to, '%Y-%m-%d').date())
    
    # 执行分页查询
    anomalies_pagination = anomaly_query.paginate(
        page=page, per_page=10, error_out=False)
    
    # 获取重复数据
    duplicates = db.session.query(
        PlantingData.crop_type,
        PlantingData.region,
        PlantingData.planting_date,
        func.count('*').label('count')
    ).group_by(
        PlantingData.crop_type,
        PlantingData.region,
        PlantingData.planting_date
    ).having(func.count('*') > 1).all()
    
    return render_template('cleaning/planting_clean.html',
                         anomalies=anomalies_pagination.items,
                         anomalies_pagination=anomalies_pagination,
                         duplicates=duplicates,
                         now=now,
                         search_crop=search_crop,
                         search_region=search_region,
                         date_from=date_from,
                         date_to=date_to)

@bp.route('/planting-clean/fix-anomaly/<int:id>', methods=['POST'])
@login_required
def fix_planting_anomaly(id):
    data = PlantingData.query.get_or_404(id)
    try:
        if 'area' in request.form:
            data.area = float(request.form['area'])
        if 'crop_type' in request.form:
            data.crop_type = request.form['crop_type']
        if 'region' in request.form:
            data.region = request.form['region']
        if 'planting_date' in request.form:
            data.planting_date = datetime.strptime(request.form['planting_date'], '%Y-%m-%d')
            
        db.session.commit()
        flash('数据已修正', 'success')
    except Exception as e:
        flash('修正失败', 'danger')
    return redirect(url_for('cleaning.planting_clean'))

@bp.route('/environment-clean')
@login_required
def environment_clean():
    # 获取查询参数
    page = request.args.get('page', 1, type=int)
    search_region = request.args.get('search_region', '')
    date_from = request.args.get('date_from', '')
    date_to = request.args.get('date_to', '')
    
    # 构建基础查询
    query = EnvironmentData.query
    
    # 应用搜索过滤
    if search_region:
        query = query.filter(EnvironmentData.region.like(f'%{search_region}%'))
    if date_from:
        query = query.filter(EnvironmentData.date >= datetime.strptime(date_from, '%Y-%m-%d'))
    if date_to:
        query = query.filter(EnvironmentData.date <= datetime.strptime(date_to, '%Y-%m-%d'))
    
    # 检测异常数据
    anomalies_pagination = query.filter(
        db.or_(
            EnvironmentData.temperature < -50,  # 温度异常
            EnvironmentData.temperature > 50,
            EnvironmentData.rainfall < 0,      # 降雨量异常
            EnvironmentData.rainfall > 1000,
            EnvironmentData.light_intensity < 0,  # 光照强度异常
            EnvironmentData.light_intensity > 150000,
            EnvironmentData.date > datetime.now().date()  # 确保使用 date() 进行比较
        )
    ).paginate(page=page, per_page=10, error_out=False)
    
    # 检测重复数据
    duplicates = db.session.query(
        EnvironmentData.region,
        EnvironmentData.date,
        db.func.count('*').label('count')
    ).group_by(
        EnvironmentData.region,
        EnvironmentData.date
    ).having(db.func.count('*') > 1).all()
    
    return render_template('cleaning/environment_clean.html',
                         anomalies=anomalies_pagination.items,
                         anomalies_pagination=anomalies_pagination,
                         duplicates=duplicates,
                         now=datetime.now().date(),  # 确保传递 date 对象
                         search_region=search_region,
                         date_from=date_from,
                         date_to=date_to)

@bp.route('/environment-clean/fix-anomaly/<int:id>', methods=['POST'])
@login_required
def fix_environment_anomaly(id):
    data = EnvironmentData.query.get_or_404(id)
    try:
        if 'temperature' in request.form:
            data.temperature = float(request.form['temperature'])
        if 'rainfall' in request.form:
            data.rainfall = float(request.form['rainfall'])
        if 'light_intensity' in request.form:
            data.light_intensity = float(request.form['light_intensity'])
        if 'region' in request.form:
            data.region = request.form['region']
        if 'date' in request.form:
            data.date = datetime.strptime(request.form['date'], '%Y-%m-%d')
            
        db.session.commit()
        flash('数据已修正', 'success')
    except Exception as e:
        flash('修正失败', 'danger')
    return redirect(url_for('cleaning.environment_clean'))

@bp.route('/data-integration')
@login_required
def data_integration():
    page = request.args.get('page', 1, type=int)
    search_region = request.args.get('search_region', '')
    date_from = request.args.get('date_from', '')
    date_to = request.args.get('date_to', '')
    
    # 构建查询
    query = db.session.query(
        PlantingData.region,
        PlantingData.planting_date,
        db.func.sum(PlantingData.area).label('total_area'),
        db.func.count(PlantingData.id).label('planting_count'),
        EnvironmentData.temperature,
        EnvironmentData.rainfall,
        EnvironmentData.soil_type,
        EnvironmentData.light_intensity
    ).join(
        EnvironmentData,
        db.and_(
            PlantingData.region == EnvironmentData.region,
            PlantingData.planting_date == EnvironmentData.date
        )
    ).group_by(
        PlantingData.region,
        PlantingData.planting_date
    )
    
    # 应用搜索过滤
    if search_region:
        query = query.filter(PlantingData.region.like(f'%{search_region}%'))
    if date_from:
        query = query.filter(PlantingData.planting_date >= datetime.strptime(date_from, '%Y-%m-%d'))
    if date_to:
        query = query.filter(PlantingData.planting_date <= datetime.strptime(date_to, '%Y-%m-%d'))
    
    # 分页
    pagination = query.paginate(page=page, per_page=10, error_out=False)
    
    return render_template('cleaning/data_integration.html',
                         integrated_data=pagination.items,
                         pagination=pagination,
                         search_region=search_region,
                         date_from=date_from,
                         date_to=date_to)

@bp.route('/export-integrated-data', methods=['POST'])
@login_required
def export_integrated_data():
    export_format = request.form.get('export_format', 'excel')
    export_range = request.form.get('export_range', 'all')
    
    # 获取数据
    query = db.session.query(
        PlantingData.region,
        PlantingData.planting_date,
        db.func.sum(PlantingData.area).label('total_area'),
        db.func.count(PlantingData.id).label('planting_count'),
        EnvironmentData.temperature,
        EnvironmentData.rainfall,
        EnvironmentData.soil_type,
        EnvironmentData.light_intensity
    ).join(
        EnvironmentData,
        db.and_(
            PlantingData.region == EnvironmentData.region,
            PlantingData.planting_date == EnvironmentData.date
        )
    ).group_by(
        PlantingData.region,
        PlantingData.planting_date
    )
    
    # 如果是导出筛选结果，应用筛选条件
    if export_range == 'filtered':
        search_region = request.args.get('search_region', '')
        date_from = request.args.get('date_from', '')
        date_to = request.args.get('date_to', '')
        
        if search_region:
            query = query.filter(PlantingData.region.like(f'%{search_region}%'))
        if date_from:
            query = query.filter(PlantingData.planting_date >= datetime.strptime(date_from, '%Y-%m-%d'))
        if date_to:
            query = query.filter(PlantingData.planting_date <= datetime.strptime(date_to, '%Y-%m-%d'))
    
    data = query.all()
    df = pd.DataFrame([{
        '区域': item.region,
        '日期': item.planting_date.strftime('%Y-%m-%d'),
        '总种植面积(亩)': float(item.total_area),
        '种植记录数': item.planting_count,
        '温度(℃)': float(item.temperature) if item.temperature else None,
        '降雨量(mm)': float(item.rainfall) if item.rainfall else None,
        '土壤类型': item.soil_type,
        '光照强度(lux)': float(item.light_intensity) if item.light_intensity else None
    } for item in data])
    
    # 创建内存文件
    output = io.BytesIO()
    
    if export_format == 'excel':
        df.to_excel(output, index=False)
        mimetype = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
        filename = f'integrated_data_{datetime.now().strftime("%Y%m%d_%H%M%S")}.xlsx'
    else:
        df.to_csv(output, index=False)
        mimetype = 'text/csv'
        filename = f'integrated_data_{datetime.now().strftime("%Y%m%d_%H%M%S")}.csv'
    
    output.seek(0)
    return send_file(
        output,
        mimetype=mimetype,
        as_attachment=True,
        download_name=filename
    )

@bp.route('/get-analysis-data')
@login_required
def get_analysis_data():
    try:
        print("开始获取分析数据...")  # 添加调试日志
        # 获取趋势数据 - 按日期统计种植面积
        trend_query = db.session.query(
            PlantingData.planting_date,
            db.func.sum(PlantingData.area).label('total_area')
        ).group_by(
            PlantingData.planting_date
        ).order_by(
            PlantingData.planting_date
        ).all()

        trend_data = [
            {
                'value': [
                    item.planting_date.strftime('%Y-%m-%d'),
                    float(item.total_area)
                ]
            } for item in trend_query
        ]

        # 获取区域数据 - 按区域统计总面积
        region_query = db.session.query(
            PlantingData.region,
            db.func.sum(PlantingData.area).label('total_area')
        ).group_by(
            PlantingData.region
        ).all()

        region_data = [
            {
                'name': item.region,
                'value': float(item.total_area)
            } for item in region_query
        ]

        # 获取环境数据 - 按温度区间统计平均种植面积
        environment_query = db.session.query(
            db.func.round(EnvironmentData.temperature).label('temp_rounded'),
            db.func.avg(PlantingData.area).label('avg_area')
        ).join(
            PlantingData,
            db.and_(
                PlantingData.region == EnvironmentData.region,
                PlantingData.planting_date == EnvironmentData.date
            )
        ).group_by(
            'temp_rounded'
        ).order_by(
            'temp_rounded'
        ).all()

        environment_data = {
            'categories': [float(item.temp_rounded) for item in environment_query],
            'values': [float(item.avg_area) for item in environment_query]
        }

        result = {
            'trend_data': trend_data,
            'region_data': region_data,
            'environment_data': environment_data
        }
        print("分析数据获取成功:", result)  # 添加调试日志
        return jsonify(result)

    except Exception as e:
        print("获取分析数据失败:", str(e))  # 添加调试日志
        return jsonify({
            'error': True,
            'message': str(e)
        }), 500 