"""
数据清洗相关的路由处理
"""

from flask import Blueprint, request, jsonify, current_app
import os
from datetime import datetime
from .cleaner import DataCleaner
from ...models import Dataset
from ... import db

bp = Blueprint('clean', __name__)

@bp.route('/missing', methods=['POST'])
def clean_missing_data():
    """
    处理缺失值API
    
    请求体：
    {
        "dataset_id": 1,  # 数据集ID
        "method": "mean",  # 可选值：mean, median, mode
        "columns": ["col1", "col2"]  # 可选，指定要处理的列
    }
    """
    try:
        data = request.get_json()
        
        # 验证必要参数
        if not data or 'dataset_id' not in data or 'method' not in data:
            return jsonify({
                'status': 'error',
                'message': '缺少必要参数'
            }), 400
            
        dataset_id = data['dataset_id']
        method = data['method']
        columns = data.get('columns')
        
        # 从数据库获取数据集信息
        dataset = Dataset.query.get(dataset_id)
        if not dataset:
            return jsonify({
                'status': 'error',
                'message': '数据集不存在'
            }), 404
            
        if not os.path.exists(dataset.file_path):
            return jsonify({
                'status': 'error',
                'message': '文件不存在'
            }), 404
            
        # 创建清洗器实例
        cleaner = DataCleaner(dataset.file_path)
        
        # 处理缺失值
        cleaner.fill_missing(method, columns)
            
        # 生成清洗后的文件名和路径
        timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
        cleaned_filename = f"cleaned_{timestamp}_{dataset.filename}"
        cleaned_file_path = os.path.join(current_app.config['UPLOAD_FOLDER'], cleaned_filename)
        
        # 保存清洗后的数据
        cleaner.save(cleaned_file_path)
        
        # 创建新的数据集记录
        new_dataset = Dataset(
            filename=cleaned_filename,
            file_path=cleaned_file_path,
            original_filename=f"cleaned_{dataset.original_filename}",
            file_type=dataset.file_type,
            created_by=dataset.created_by,
            parent_id=dataset_id
        )
        
        # 更新元数据
        row_count = len(cleaner.df)
        column_info = [
            {
                'name': col,
                'type': str(cleaner.df[col].dtype),
                'nullable': bool(cleaner.df[col].isnull().any())
            }
            for col in cleaner.df.columns
        ]
        new_dataset.update_metadata(row_count, column_info)
        
        db.session.add(new_dataset)
        db.session.commit()
        
        return jsonify({
            'status': 'success',
            'message': '数据清洗完成',
            'data': {
                'dataset_id': new_dataset.id,
                'original_filename': new_dataset.original_filename,
                'filename': new_dataset.filename
            }
        })
        
    except Exception as e:
        return jsonify({
            'status': 'error',
            'message': str(e)
        }), 500

@bp.route('/preview', methods=['GET'])
def preview_data():
    """
    预览数据API
    
    查询参数：
    - dataset_id: 数据集ID
    - rows: 预览行数（可选，默认5行）
    """
    try:
        dataset_id = request.args.get('dataset_id')
        if not dataset_id:
            return jsonify({
                'status': 'error',
                'message': '缺少数据集ID'
            }), 400
            
        rows = int(request.args.get('rows', 5))
        
        # 从数据库获取数据集信息
        dataset = Dataset.query.get(dataset_id)
        if not dataset:
            return jsonify({
                'status': 'error',
                'message': '数据集不存在'
            }), 404
            
        if not os.path.exists(dataset.file_path):
            return jsonify({
                'status': 'error',
                'message': '文件不存在'
            }), 404
            
        # 创建清洗器实例并获取预览
        cleaner = DataCleaner(dataset.file_path)
        preview_data = cleaner.get_preview(rows)
        
        # 添加数据集信息
        preview_data['dataset'] = {
            'id': dataset.id,
            'original_filename': dataset.original_filename,
            'file_type': dataset.file_type,
            'created_at': dataset.created_at.isoformat()
        }
        
        return jsonify({
            'status': 'success',
            'data': preview_data
        })
        
    except Exception as e:
        return jsonify({
            'status': 'error',
            'message': str(e)
        }), 500 