import os
import json
import logging
import hashlib
from datetime import datetime
from flask import Flask, request, jsonify, render_template, send_from_directory
from werkzeug.utils import secure_filename
from .log_processor import LogProcessor
from .config_manager import ConfigManager
from .ai_analyzer import AIAnalyzer
from flask_cors import CORS
from .workspace_service import workspace_service, session_service, conversation_service
from .flow_service import FlowService


# 初始化Flask应用
app = Flask(__name__)
CORS(app, supports_credentials=True, resources={r"/*": {"origins": "*"}})

# 初始化配置管理器
config_manager = ConfigManager()
app.config['MAX_CONTENT_LENGTH'] = config_manager.get_config('max_file_size')
app.config['UPLOAD_FOLDER'] = config_manager.get_config('upload_folder')
app.config['TEMP_FOLDER'] = config_manager.get_config('temp_folder')

# 确保上传和临时目录存在
os.makedirs(app.config['UPLOAD_FOLDER'], exist_ok=True)
os.makedirs(app.config['TEMP_FOLDER'], exist_ok=True)

# 配置日志
logging.basicConfig(level=getattr(logging, config_manager.get_config('log_level')))
logger = logging.getLogger(__name__)

# 初始化日志处理器
log_processor = LogProcessor()
logger.info("后端启动：日志处理器已就绪，等待上传或持久化读取")

# 初始化AI分析器
ai_analyzer = AIAnalyzer()
logger.info("后端启动：AI分析器配置已加载")

# 初始化Flow服务
flow_service = FlowService()
logger.info("后端启动：Flow服务已初始化")

@app.route('/')
def index():
    """主页"""
    return render_template('index.html')

@app.route('/upload', methods=['POST'])
def upload_file():
    """处理文件上传"""
    if 'file' not in request.files:
        return jsonify({'error': '没有选择文件'}), 400
    
    file = request.files['file']
    if file.filename == '':
        return jsonify({'error': '没有选择文件'}), 400
    
    # 检查文件扩展名
    allowed_extensions = config_manager.get_config('allowed_extensions')
    if not any(file.filename.endswith(ext) for ext in allowed_extensions):
        return jsonify({'error': f'不支持的文件格式，请上传 {", ".join(allowed_extensions)} 格式的文件'}), 400
    
    if file and file.filename.endswith('.tar.gz'):
        filename = secure_filename(file.filename)
        filepath = os.path.join(app.config['UPLOAD_FOLDER'], filename)
        file.save(filepath)
        
        # 重置日志处理器
        log_processor.clear_logs()
        
        # 分析日志
        log_processor.extract_from_tar_gz(filepath)
        
        if not log_processor.logs:
            return jsonify({'error': '未找到有效的日志文件'}), 400
        
        # 生成分析结果
        log_processor.analysis_results = log_processor.generate_summary()
        
        # 记录提取的文件信息
        logger.info(f"成功处理日志文件，共 {len(log_processor.logs)} 条日志")
        source_files = set(log.get('source_file') for log in log_processor.logs if log.get('source_file'))
        logger.info(f"源文件列表: {list(source_files)}")
        
        # ============ 持久化：创建/更新分析会话并保存分析结果与文件信息 ============
        try:
            # 选择工作区：优先已有的第一个，若无则创建默认
            workspaces = workspace_service.get_workspaces()
            if workspaces:
                workspace_id = workspaces[0]['id']
            else:
                default_ws = workspace_service.create_workspace(
                    name="默认工作区",
                    description="系统自动创建",
                    settings={}
                )
                workspace_id = default_ws['id']

            # 计算文件元信息
            try:
                file_size = os.path.getsize(filepath)
            except Exception:
                file_size = None
            try:
                sha256 = hashlib.sha256()
                with open(filepath, 'rb') as fbin:
                    for chunk in iter(lambda: fbin.read(8192), b''):
                        sha256.update(chunk)
                file_hash = sha256.hexdigest()
            except Exception:
                file_hash = None

            # 创建分析会话
            session_name = f"{filename} 分析于 {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"
            created_session = session_service.create_session(
                workspace_id=workspace_id,
                name=session_name,
                description=f"从 {filename} 上传并分析的日志"
            )

            # 保存分析结果与文件信息
            updated_session = session_service.update_session(
                created_session['id'],
                analysis_results=log_processor.analysis_results,
                ai_analysis=None,
                status="completed",
                original_filename=filename,
                file_size=file_size,
                file_hash=file_hash,
                storage_path=filepath
            )

            # 返回时携带会话ID，方便前端后续基于会话恢复
            return jsonify({
                'message': '文件上传并持久化成功',
                'total_logs': len(log_processor.logs),
                'session': updated_session
            })
        except Exception as e:
            logger.error(f"上传后持久化失败: {e}")
            # 持久化失败不影响即时分析响应，但明确返回提示
            return jsonify({
                'message': '文件上传成功（持久化失败）',
                'total_logs': len(log_processor.logs),
                'persistence_error': str(e)
            })
    
    return jsonify({'error': '请上传.tar.gz格式的文件'}), 400

@app.route('/components')
def get_components():
    """获取组件分析"""
    if not log_processor.logs:
        return jsonify({'error': '没有日志数据'}), 404
    
    # 优先从analysis_results中获取，如果没有则重新计算
    if log_processor.analysis_results and 'components' in log_processor.analysis_results:
        components = log_processor.analysis_results['components']
    else:
        components = log_processor.categorize_components()
    
    return jsonify(components)

@app.route('/component_categories')
def get_component_categories():
    """获取组件的顶级分类列表"""
    if not log_processor.component_categories:
        return jsonify({'error': '没有组件分类数据'}), 404
    
    categories = list(log_processor.component_categories.keys())
    categories.sort()
    return jsonify(categories)

@app.route('/duplicates')
def get_duplicates():
    """获取重复日志"""
    if not log_processor.logs:
        return jsonify({'error': '没有日志数据'}), 404
    
    # 优先从analysis_results中获取，如果没有则重新计算
    if log_processor.analysis_results and 'duplicates' in log_processor.analysis_results:
        duplicates = log_processor.analysis_results['duplicates']
    else:
        duplicates = log_processor.find_duplicate_logs()
    
    return jsonify(duplicates)

@app.route('/timeline')
def get_timeline():
    """获取时间分析"""
    if not log_processor.logs:
        return jsonify({'error': '没有日志数据'}), 404
    
    # 优先从analysis_results中获取，如果没有则重新计算
    if log_processor.analysis_results and 'time_patterns' in log_processor.analysis_results:
        time_patterns = log_processor.analysis_results['time_patterns']
    else:
        time_patterns = log_processor.analyze_time_patterns()
    
    return jsonify(time_patterns)

@app.route('/config')
def get_config():
    """获取应用配置"""
    return jsonify(config_manager.app_config)

@app.route('/config', methods=['POST'])
def update_config():
    """更新应用配置"""
    data = request.get_json()
    
    if not data:
        return jsonify({'error': '无效的请求数据'}), 400
    
    try:
        for key, value in data.items():
            config_manager.set_config(key, value)
        
        return jsonify({'message': '配置更新成功'})
    except Exception as e:
        logger.error(f"更新配置失败: {e}")
        return jsonify({'error': '更新配置失败'}), 500

@app.route('/logs/raw')
def get_raw_logs():
    """获取原始日志数据"""
    if not log_processor.logs:
        return jsonify({'error': '没有日志数据'}), 404
    
    # 支持分页
    page = request.args.get('page', 1, type=int)
    per_page = request.args.get('per_page', 100, type=int)
    
    start_idx = (page - 1) * per_page
    end_idx = start_idx + per_page
    
    logs = log_processor.logs[start_idx:end_idx]
    
    return jsonify({
        'logs': logs,
        'total': len(log_processor.logs),
        'page': page,
        'per_page': per_page,
        'total_pages': (len(log_processor.logs) + per_page - 1) // per_page
    })

@app.route('/logs/search')
def search_logs():
    """搜索日志"""
    if not log_processor.logs:
        return jsonify({'error': '没有日志数据'}), 404
    
    query = request.args.get('q', '').lower()
    component = request.args.get('component', '')
    level_filter = request.args.get('level', '') # 将参数名改为 level_filter 以避免与局部变量冲突
    page = request.args.get('page', 1, type=int)
    per_page = request.args.get('per_page', 20, type=int) # 默认每页20条
    
    # 定义日志级别优先级
    log_level_priority = {
        'ERROR': 4,
        'NOTICE': 3,
        'WARNING': 2,
        'INFO': 1,
        'DEBUG': 0,
    }
    
    filtered_logs = []
    
    for log in log_processor.logs:
        # 按查询词过滤
        if query and query not in log['message'].lower():
            continue
        
        # 按组件过滤
        if component and log['component'] != component:
            continue
        
        # 按级别过滤（如果提供了 level_filter）
        if level_filter and log['level'] != level_filter:
            continue
        
        filtered_logs.append(log)
    
    # 排序：首先按日志级别优先级降序，然后按行号升序
    # 注意：确保 log.get('file_line') 存在且为数字，否则默认给一个大值让它排在后面
    # 并且确保 log.get('level') 存在，否则给一个最低优先级
    filtered_logs.sort(key=lambda x: (
        log_level_priority.get(x.get('level', '').upper(), -1), # 级别优先级，大者优先
        x.get('file_line', float('inf')) # 行号，小者优先
    ), reverse=True) # reverse=True for priority descending

    # 应用分页
    total_filtered_logs = len(filtered_logs)
    start_idx = (page - 1) * per_page
    end_idx = start_idx + per_page
    paginated_logs = filtered_logs[start_idx:end_idx]
    
    return jsonify({
        'logs': paginated_logs,
        'total': total_filtered_logs,
        'page': page,
        'per_page': per_page,
        'total_pages': (total_filtered_logs + per_page - 1) // per_page
    })

@app.route('/logs/files')
def get_available_log_files():
    """获取可用的日志文件列表"""
    if not log_processor.logs:
        return jsonify({'error': '没有日志数据'}), 404
    
    # 获取所有源文件
    source_files = set(log.get('source_file') for log in log_processor.logs if log.get('source_file'))
    
    # 检查文件是否存在
    available_files = []
    for filename in source_files:
        file_path = None
        possible_paths = [
            os.path.join('temp', filename),
            os.path.join('uploads', filename)
        ]
        
        for path in possible_paths:
            if os.path.exists(path):
                file_path = path
                break
        
        available_files.append({
            'filename': filename,
            'exists': file_path is not None,
            'path': file_path
        })
    
    return jsonify({
        'files': available_files,
        'total': len(available_files)
    })

@app.route('/logs/file/<filename>')
def get_log_file_content(filename):
    """获取日志文件的原始内容"""
    if not log_processor.logs:
        return jsonify({'error': '没有日志数据'}), 404
    
    # 安全地处理文件名
    safe_filename = os.path.basename(filename)
    
    # 获取筛选参数
    component_filter = request.args.get('component')
    level_filter = request.args.get('level')

    # 查找该文件的所有日志，并应用筛选条件
    file_logs = []
    for log in log_processor.logs:
        if log.get('source_file') == safe_filename:
            if component_filter and log.get('component') != component_filter:
                continue
            if level_filter and log.get('level') != level_filter:
                continue
            file_logs.append(log)

    if not file_logs:
        return jsonify({'error': f'未找到文件 {safe_filename} 的日志数据或匹配筛选条件的日志'}), 404
    
    # 获取文件内容（从temp目录或uploads目录）
    file_path = None
    possible_paths = [
        os.path.join('temp', safe_filename),
        os.path.join('uploads', safe_filename)
    ]
    
    # 添加调试信息
    logger.info(f"查找文件 {safe_filename} 的路径:")
    for path in possible_paths:
        logger.info(f"  检查路径: {path}, 存在: {os.path.exists(path)}")
        if os.path.exists(path):
            file_path = path
            logger.info(f"  找到文件: {path}")
            break
    
    if not file_path:
        # 列出temp目录中的所有文件
        temp_dir = 'temp'
        if os.path.exists(temp_dir):
            temp_files = os.listdir(temp_dir)
            logger.info(f"temp目录中的文件: {temp_files}")
        
        return jsonify({'error': f'未找到文件 {safe_filename} 的原始内容'}), 404
    
    try:
        with open(file_path, 'r', encoding='utf-8', errors='ignore') as f:
            content = f.read()
        
        logger.info(f"成功读取文件 {file_path}, 内容长度: {len(content)}")
        
        # 返回文件内容和日志行信息
        return jsonify({
            'filename': safe_filename,
            'content': content,
            'logs': file_logs
        })
    except Exception as e:
        logger.error(f"读取文件 {file_path} 失败: {e}")
        return jsonify({'error': f'读取文件失败: {str(e)}'}), 500

@app.route('/stats')
def get_stats():
    """获取统计信息"""
    if not log_processor.logs:
        return jsonify({'error': '没有日志数据'}), 404
    
    # 优先从analysis_results中获取，如果没有则重新计算
    if log_processor.analysis_results and 'summary' in log_processor.analysis_results:
        summary = log_processor.analysis_results['summary']
        return jsonify({
            'total_logs': summary.get('total_logs', len(log_processor.logs)),
            'unique_components': summary.get('unique_components', 0),
            'components': summary.get('components', []),
            'levels': summary.get('levels', []),
            'time_range': summary.get('time_range', {})
        })
    
    # 如果没有缓存的分析结果，则重新计算
    # 获取所有组件
    components = list(set(log['component'] for log in log_processor.logs))
    
    # 获取所有日志级别
    levels = list(set(log['level'] for log in log_processor.logs))
    
    # 时间范围
    if log_processor.logs:
        dates = sorted([log['date'] for log in log_processor.logs])
        time_range = {
            'start_date': dates[0],
            'end_date': dates[-1]
        }
    else:
        time_range = {}
    
    return jsonify({
        'total_logs': len(log_processor.logs),
        'unique_components': len(components),
        'components': components,
        'levels': levels,
        'time_range': time_range
    })

# AI分析相关API端点
@app.route('/ai/analyze', methods=['POST'])
def ai_analyze():
    """AI分析日志"""
    if not log_processor.logs:
        return jsonify({'error': '没有日志数据'}), 404
    
    try:
        data = request.get_json() or {}
        error_category = data.get('error_category')
        
        logger.info(f"开始AI分析，错误分类: {error_category}")
        logger.info(f"日志总数: {len(log_processor.logs)}")
        
        analysis_result = ai_analyzer.analyze_logs(log_processor)
        
        logger.info(f"AI分析完成，结果类型: {type(analysis_result)}")
        if 'initial_analysis' in analysis_result:
            logger.info(f"分析内容长度: {len(analysis_result['initial_analysis'])}")
        
        return jsonify(analysis_result)
    except Exception as e:
        logger.error(f"AI分析失败: {e}")
        return jsonify({'error': f'AI分析失败: {str(e)}'}), 500

@app.route('/ai/ask', methods=['POST'])
def ai_ask():
    """AI问答"""
    data = request.get_json()
    
    if not data or 'question' not in data:
        return jsonify({'error': '缺少问题参数'}), 400
    
    question = data['question']
    stream = data.get('stream', False)
    conversation_id = data.get('conversation_id', None)
    include_logs = data.get('include_logs', False)
    
    if stream:
        return ai_ask_stream(question, conversation_id, include_logs)
    else:
        return ai_ask_normal(question, conversation_id, include_logs)

def ai_ask_normal(question, conversation_id=None, include_logs=False):
    """普通AI问答"""
    try:
        logger.info(f"收到AI问题: {question}, 对话ID: {conversation_id}, 包含日志: {include_logs}")
        
        # 保存用户问题到数据库
        if conversation_id:
            try:
                conversation_service.add_message(
                    session_id=conversation_id,
                    message_type="user",
                    content=question,
                    context={"include_logs": include_logs}
                )
            except Exception as e:
                logger.warning(f"保存用户问题失败: {e}")
        
        answer = ai_analyzer.ask_question(question, log_processor, conversation_id, include_logs)
        
        # 保存AI回答到数据库
        if conversation_id and 'answer' in answer:
            try:
                conversation_service.add_message(
                    session_id=conversation_id,
                    message_type="ai",
                    content=answer['answer'],
                    context={"include_logs": include_logs}
                )
            except Exception as e:
                logger.warning(f"保存AI回答失败: {e}")
        
        logger.info(f"AI回答完成，回答长度: {len(answer.get('answer', ''))}")
        
        return jsonify(answer)
    except Exception as e:
        logger.error(f"AI问答失败: {e}")
        return jsonify({'error': f'AI问答失败: {str(e)}'}), 500

def ai_ask_stream(question, conversation_id=None, include_logs=False):
    """流式AI问答"""
    def generate():
        try:
            logger.info(f"收到流式AI问题: {question}, 对话ID: {conversation_id}, 包含日志: {include_logs}")
            
            # 保存用户问题到数据库
            if conversation_id:
                try:
                    conversation_service.add_message(
                        session_id=conversation_id,
                        message_type="user",
                        content=question,
                        context={"include_logs": include_logs}
                    )
                except Exception as e:
                    logger.warning(f"保存用户问题失败: {e}")
            
            # 设置SSE响应头
            yield "data: {\"type\": \"start\"}\n\n"
            
            # 获取流式回答
            response_stream = ai_analyzer.ask_question_stream(question, log_processor, conversation_id, include_logs)
            full_answer = ""
            for chunk in response_stream:
                full_answer += chunk
                # 发送数据块
                data = {
                    "type": "chunk",
                    "content": chunk
                }
                yield f"data: {json.dumps(data, ensure_ascii=False)}\n\n"
            
            # 保存AI回答到数据库
            if conversation_id and full_answer:
                try:
                    conversation_service.add_message(
                        session_id=conversation_id,
                        message_type="ai",
                        content=full_answer,
                        context={"include_logs": include_logs}
                    )
                except Exception as e:
                    logger.warning(f"保存AI回答失败: {e}")
            
            # 发送结束信号
            yield "data: {\"type\": \"end\"}\n\n"
            
            logger.info("流式AI回答完成")
            
        except Exception as e:
            logger.error(f"流式AI问答失败: {e}")
            error_data = {
                "type": "error",
                "content": f"AI问答失败: {str(e)}"
            }
            yield f"data: {json.dumps(error_data, ensure_ascii=False)}\n\n"
    
    return app.response_class(
        generate(),
        mimetype='text/event-stream',
        headers={
            'Cache-Control': 'no-cache',
            'Connection': 'keep-alive',
            'Access-Control-Allow-Origin': '*',
            'Access-Control-Allow-Headers': 'Cache-Control'
        }
    )

@app.route('/ai/conversation')
def get_conversation_history():
    """获取对话历史"""
    try:
        conversation_id = request.args.get('conversation_id')
        if conversation_id:
            # 获取特定对话的历史
            history = ai_analyzer.get_conversation_history_by_id(conversation_id)
        else:
            # 获取所有对话历史
            history = ai_analyzer.get_conversation_history()
        return jsonify(history)
    except Exception as e:
        logger.error(f"获取对话历史失败: {e}")
        return jsonify({'error': f'获取对话历史失败: {str(e)}'}), 500

@app.route('/ai/conversation', methods=['DELETE'])
def clear_conversation_history():
    """清空对话历史"""
    try:
        ai_analyzer.clear_conversation_history()
        return jsonify({'message': '对话历史已清空'})
    except Exception as e:
        logger.error(f"清空对话历史失败: {e}")
        return jsonify({'error': f'清空对话历史失败: {str(e)}'}), 500

@app.route('/ai/config')
def get_ai_config():
    """获取AI配置"""
    try:
        config = ai_analyzer.get_ai_config()
        return jsonify(config)
    except Exception as e:
        logger.error(f"获取AI配置失败: {e}")
        return jsonify({'error': f'获取AI配置失败: {str(e)}'}), 500

@app.route('/ai/config', methods=['POST'])
def update_ai_config():
    """更新AI配置"""
    data = request.get_json()
    
    if not data:
        return jsonify({'error': '无效的请求数据'}), 400
    
    try:
        success = ai_analyzer.update_ai_config(data)
        if success:
            return jsonify({'message': 'AI配置更新成功'})
        else:
            return jsonify({'error': 'AI配置更新失败'}), 500
    except Exception as e:
        logger.error(f"更新AI配置失败: {e}")
        return jsonify({'error': f'更新AI配置失败: {str(e)}'}), 500

@app.route('/ai/models')
def get_ai_models():
    """获取所有AI模型配置"""
    try:
        models = ai_analyzer.get_available_models()
        return jsonify(models)
    except Exception as e:
        logger.error(f"获取AI模型列表失败: {e}")
        return jsonify({'error': f'获取AI模型列表失败: {str(e)}'}), 500

@app.route('/ai/models', methods=['POST'])
def add_ai_model():
    """添加新的AI模型"""
    data = request.get_json()
    
    if not data or 'key' not in data or 'config' not in data:
        return jsonify({'error': '缺少必要参数'}), 400
    
    try:
        success = ai_analyzer.add_model(data['key'], data['config'])
        if success:
            return jsonify({'message': 'AI模型添加成功'})
        else:
            return jsonify({'error': 'AI模型添加失败'}), 500
    except Exception as e:
        logger.error(f"添加AI模型失败: {e}")
        return jsonify({'error': f'添加AI模型失败: {str(e)}'}), 500

@app.route('/ai/models/<model_key>', methods=['PUT'])
def update_ai_model(model_key):
    """更新AI模型配置"""
    data = request.get_json()
    
    if not data:
        return jsonify({'error': '无效的请求数据'}), 400
    
    try:
        success = ai_analyzer.update_model(model_key, data)
        if success:
            return jsonify({'message': 'AI模型更新成功'})
        else:
            return jsonify({'error': 'AI模型更新失败'}), 500
    except Exception as e:
        logger.error(f"更新AI模型失败: {e}")
        return jsonify({'error': f'更新AI模型失败: {str(e)}'}), 500

@app.route('/ai/models/<model_key>', methods=['DELETE'])
def delete_ai_model(model_key):
    """删除AI模型"""
    try:
        success = ai_analyzer.remove_model(model_key)
        if success:
            return jsonify({'message': 'AI模型删除成功'})
        else:
            return jsonify({'error': 'AI模型删除失败'}), 500
    except Exception as e:
        logger.error(f"删除AI模型失败: {e}")
        return jsonify({'error': f'删除AI模型失败: {str(e)}'}), 500

@app.route('/ai/models/current', methods=['POST'])
def set_current_model():
    """设置当前使用的AI模型"""
    data = request.get_json()
    
    if not data or 'model_key' not in data:
        return jsonify({'error': '缺少模型标识'}), 400
    
    try:
        success = ai_analyzer.set_current_model(data['model_key'])
        if success:
            return jsonify({'message': '当前模型设置成功'})
        else:
            return jsonify({'error': '当前模型设置失败'}), 500
    except Exception as e:
        logger.error(f"设置当前模型失败: {e}")
        return jsonify({'error': f'设置当前模型失败: {str(e)}'}), 500

# ==================== 工作区管理 API ====================

@app.route('/api/workspaces', methods=['GET', 'OPTIONS'])
def get_workspaces():
    """获取工作区列表"""
    if request.method == 'OPTIONS':
        return '', 200
    try:
        workspaces = workspace_service.get_workspaces()
        return jsonify(workspaces)
    except Exception as e:
        logger.error(f"获取工作区列表失败: {e}")
        return jsonify({'error': f'获取工作区列表失败: {str(e)}'}), 500

@app.route('/api/workspaces', methods=['POST', 'OPTIONS'])
def create_workspace():
    """创建工作区"""
    if request.method == 'OPTIONS':
        return '', 200
    data = request.get_json()
    
    if not data or 'name' not in data:
        return jsonify({'error': '缺少工作区名称'}), 400
    
    try:
        workspace = workspace_service.create_workspace(
            name=data['name'],
            description=data.get('description'),
            settings=data.get('settings', {})
        )
        return jsonify(workspace), 201
    except Exception as e:
        logger.error(f"创建工作区失败: {e}")
        return jsonify({'error': f'创建工作区失败: {str(e)}'}), 500

@app.route('/api/workspaces/<workspace_id>', methods=['GET'])
def get_workspace(workspace_id):
    """获取指定工作区"""
    try:
        workspace = workspace_service.get_workspace(workspace_id)
        if not workspace:
            return jsonify({'error': '工作区不存在'}), 404
        return jsonify(workspace)
    except Exception as e:
        logger.error(f"获取工作区失败: {e}")
        return jsonify({'error': f'获取工作区失败: {str(e)}'}), 500

@app.route('/api/workspaces/<workspace_id>', methods=['PUT'])
def update_workspace(workspace_id):
    """更新工作区"""
    data = request.get_json()
    
    if not data:
        return jsonify({'error': '无效的请求数据'}), 400
    
    try:
        workspace = workspace_service.update_workspace(
            workspace_id=workspace_id,
            name=data.get('name'),
            description=data.get('description'),
            settings=data.get('settings')
        )
        return jsonify(workspace)
    except ValueError as e:
        return jsonify({'error': str(e)}), 404
    except Exception as e:
        logger.error(f"更新工作区失败: {e}")
        return jsonify({'error': f'更新工作区失败: {str(e)}'}), 500

@app.route('/api/workspaces/<workspace_id>', methods=['DELETE'])
def delete_workspace(workspace_id):
    """删除工作区"""
    try:
        success = workspace_service.delete_workspace(workspace_id)
        if success:
            return jsonify({'message': '工作区删除成功'})
        else:
            return jsonify({'error': '删除工作区失败'}), 500
    except ValueError as e:
        return jsonify({'error': str(e)}), 404
    except Exception as e:
        logger.error(f"删除工作区失败: {e}")
        return jsonify({'error': f'删除工作区失败: {str(e)}'}), 500

# ==================== 分析会话管理 API ====================

@app.route('/api/sessions', methods=['GET'])
def get_analysis_sessions():
    """获取分析会话列表"""
    workspace_id = request.args.get('workspace_id')
    limit = int(request.args.get('limit', 50))
    
    try:
        sessions = session_service.get_sessions(workspace_id=workspace_id, limit=limit)
        return jsonify(sessions)
    except Exception as e:
        logger.error(f"获取分析会话列表失败: {e}")
        return jsonify({'error': f'获取分析会话列表失败: {str(e)}'}), 500

@app.route('/api/sessions', methods=['POST'])
def create_analysis_session():
    """创建分析会话"""
    data = request.get_json()
    
    if not data or 'workspace_id' not in data or 'name' not in data:
        return jsonify({'error': '缺少必需参数'}), 400
    
    try:
        session = session_service.create_session(
            workspace_id=data['workspace_id'],
            name=data['name'],
            description=data.get('description')
        )
        return jsonify(session), 201
    except Exception as e:
        logger.error(f"创建分析会话失败: {e}")
        return jsonify({'error': f'创建分析会话失败: {str(e)}'}), 500

@app.route('/api/sessions/<session_id>', methods=['GET'])
def get_analysis_session(session_id):
    """获取指定分析会话"""
    try:
        session = session_service.get_session(session_id)
        if not session:
            return jsonify({'error': '分析会话不存在'}), 404
        return jsonify(session)
    except Exception as e:
        logger.error(f"获取分析会话失败: {e}")
        return jsonify({'error': f'获取分析会话失败: {str(e)}'}), 500

@app.route('/api/sessions/<session_id>', methods=['PUT'])
def update_analysis_session(session_id):
    """更新分析会话"""
    data = request.get_json()
    
    if not data:
        return jsonify({'error': '无效的请求数据'}), 400
    
    try:
        session = session_service.update_session(session_id, **data)
        return jsonify(session)
    except ValueError as e:
        return jsonify({'error': str(e)}), 404
    except Exception as e:
        logger.error(f"更新分析会话失败: {e}")
        return jsonify({'error': f'更新分析会话失败: {str(e)}'}), 500

@app.route('/api/sessions/<session_id>', methods=['DELETE'])
def delete_analysis_session(session_id):
    """删除分析会话"""
    try:
        success = session_service.delete_session(session_id)
        if success:
            return jsonify({'message': '分析会话删除成功'})
        else:
            return jsonify({'error': '删除分析会话失败'}), 500
    except ValueError as e:
        return jsonify({'error': str(e)}), 404
    except Exception as e:
        logger.error(f"删除分析会话失败: {e}")
        return jsonify({'error': f'删除分析会话失败: {str(e)}'}), 500

# ==================== 会话恢复（将持久化数据加载回内存） ====================

@app.route('/api/sessions/<session_id>/load', methods=['POST'])
def load_analysis_session(session_id):
    """根据已持久化的会话，将日志与分析结果恢复到内存（log_processor）"""
    try:
        # 获取会话信息
        session = session_service.get_session(session_id)
        if not session:
            return jsonify({'error': '分析会话不存在'}), 404

        storage_path = session.get('storage_path') or ''
        if not storage_path or not os.path.exists(storage_path):
            return jsonify({'error': '未找到会话对应的存储文件'}), 404

        # 清空并重新加载日志
        log_processor.clear_logs()
        log_processor.extract_from_tar_gz(storage_path)

        # 恢复分析结果（若存在，则直接使用持久化结果；否则重新计算）
        if session.get('analysis_results'):
            log_processor.analysis_results = session['analysis_results']
            logger.info(f"从持久化数据恢复分析结果: {len(log_processor.logs)} 条日志")
        else:
            # 如果没有持久化的分析结果，重新计算
            log_processor.analysis_results = log_processor.generate_summary()
            logger.info(f"重新计算分析结果: {len(log_processor.logs)} 条日志")
            
            # 更新会话中的分析结果
            try:
                session_service.update_session(
                    session_id,
                    analysis_results=log_processor.analysis_results
                )
                logger.info("已更新会话中的分析结果")
            except Exception as update_error:
                logger.warning(f"更新会话分析结果失败: {update_error}")

        return jsonify({
            'message': '会话已加载到内存',
            'session_id': session_id,
            'total_logs': len(log_processor.logs)
        })
    except Exception as e:
        logger.error(f"会话恢复失败: {e}")
        return jsonify({'error': f'会话恢复失败: {str(e)}'}), 500

# ==================== Flow管理 API ====================

@app.route('/api/flows', methods=['GET'])
def get_flows():
    """获取所有flow的列表"""
    try:
        flows = flow_service.get_flows()
        logger.info(f"获取Flow列表: {len(flows)} 个Flow")
        return jsonify(flows)
    except Exception as e:
        logger.error(f"获取Flow列表失败: {e}")
        return jsonify({'error': f'获取Flow列表失败: {str(e)}'}), 500

@app.route('/api/flows', methods=['POST'])
def create_flow():
    """创建新的flow"""
    try:
        data = request.get_json()
        if not data or not all(key in data for key in ['id', 'file', 'description']):
            return jsonify({'error': '缺少必需参数'}), 400

        success = flow_service.create_flow(data)
        if not success:
            return jsonify({'error': 'Flow创建失败'}), 500

        logger.info(f"创建Flow成功: {data['id']}")
        return jsonify(data), 201
    except Exception as e:
        logger.error(f"创建Flow失败: {e}")
        return jsonify({'error': f'创建Flow失败: {str(e)}'}), 500

@app.route('/api/flows/smart-generate', methods=['POST'])
def smart_generate_flow_route():
    """智能生成Flow"""
    try:
        data = request.get_json()
        prompt = data.get('prompt')
        if not prompt:
            return jsonify({'error': '缺少prompt参数'}), 400
        
        new_flow = flow_service.smart_generate_flow(prompt)
        return jsonify(new_flow), 201
    except Exception as e:
        logger.error(f"智能生成Flow失败: {e}")
        return jsonify({'error': f'智能生成Flow失败: {e}'}), 500

@app.route('/api/flows/<flow_id>/content', methods=['GET'])
def get_flow_content(flow_id):
    """获取指定flow的内容"""
    try:
        content = flow_service.get_flow_content(flow_id)
        if content is None:
            return jsonify({'error': 'Flow not found'}), 404
        logger.info(f"获取Flow内容: {flow_id}")
        return jsonify(content)
    except Exception as e:
        logger.error(f"获取Flow内容失败: {e}")
        return jsonify({'error': f'获取Flow内容失败: {str(e)}'}), 500

@app.route('/api/flows/<flow_id>', methods=['DELETE'])
def delete_flow(flow_id):
    """删除指定的flow"""
    try:
        success = flow_service.delete_flow(flow_id)
        if not success:
            return jsonify({'error': 'Flow not found'}), 404
            
        logger.info(f"删除Flow成功: {flow_id}")
        return jsonify({'message': 'Flow删除成功'})
    except Exception as e:
        logger.error(f"删除Flow失败: {e}")
        return jsonify({'error': f'删除Flow失败: {str(e)}'}), 500

@app.route('/api/flows/generate', methods=['POST'])
def generate_flow_api():
    """使用AI生成flow内容"""
    try:
        data = request.get_json()
        if not data or 'description' not in data:
            return jsonify({'error': '缺少必需参数'}), 400

        # 读取prompt模板
        prompt_file = os.path.join(os.path.dirname(__file__), 'DSL', 'prompt.txt')
        with open(prompt_file, 'r', encoding='utf-8') as f:
            prompt_template = f.read()

        # 构建提示词
        prompt = f"{prompt_template}\n\n请根据以下描述生成一个符合上述规范的Flow DSL：\n\n{data['description']}"

        # 调用AI助手生成内容
        try:
            response = ai_analyzer.ask_question(prompt, log_processor=None, conversation_id=None, include_logs=False)
            if not response or 'answer' not in response:
                return jsonify({'error': 'AI生成失败'}), 500
                
            # 验证生成的内容是否是有效的YAML
            try:
                yaml.safe_load(response['answer'])
            except yaml.YAMLError as e:
                return jsonify({'error': f'生成的内容不是有效的YAML: {str(e)}'}), 400

            return jsonify({'content': response['answer']})
        except Exception as e:
            logger.error(f"AI生成失败: {e}")
            return jsonify({'error': f'AI生成失败: {str(e)}'}), 500

    except Exception as e:
        logger.error(f"生成Flow失败: {e}")
        return jsonify({'error': f'生成Flow失败: {str(e)}'}), 500

@app.route('/api/flows/<flow_id>/content', methods=['PUT'])
def update_flow_content(flow_id):
    """更新指定flow的内容"""
    try:
        data = request.get_json()
        if not data or 'content' not in data:
            return jsonify({'error': '缺少必需参数'}), 400

        success = flow_service.update_flow_content(flow_id, data['content'])
        if not success:
            return jsonify({'error': 'Flow not found'}), 404
            
        logger.info(f"更新Flow内容: {flow_id}")
        return jsonify({'message': 'Flow内容更新成功'})
    except Exception as e:
        logger.error(f"更新Flow内容失败: {e}")
        return jsonify({'error': f'更新Flow内容失败: {str(e)}'}), 500

# ==================== 对话历史管理 API ====================

@app.route('/api/sessions/<session_id>/conversations', methods=['GET'])
def get_session_conversations(session_id):
    """获取会话对话历史"""
    limit = int(request.args.get('limit', 100))
    
    try:
        conversations = conversation_service.get_conversation_history(session_id, limit=limit)
        return jsonify(conversations)
    except Exception as e:
        logger.error(f"获取对话历史失败: {e}")
        return jsonify({'error': f'获取对话历史失败: {str(e)}'}), 500

@app.route('/api/sessions/<session_id>/conversations', methods=['POST'])
def add_conversation_message(session_id):
    """添加对话消息"""
    data = request.get_json()
    
    if not data or 'message_type' not in data or 'content' not in data:
        return jsonify({'error': '缺少必需参数'}), 400
    
    try:
        message = conversation_service.add_message(
            session_id=session_id,
            message_type=data['message_type'],
            content=data['content'],
            context=data.get('context', {})
        )
        return jsonify(message), 201
    except Exception as e:
        logger.error(f"添加对话消息失败: {e}")
        return jsonify({'error': f'添加对话消息失败: {str(e)}'}), 500

@app.route('/api/sessions/<session_id>/conversations', methods=['DELETE'])
def clear_session_conversations(session_id):
    """清空会话对话历史"""
    try:
        success = conversation_service.clear_conversation_history(session_id)
        if success:
            return jsonify({'message': '对话历史清空成功'})
        else:
            return jsonify({'error': '清空对话历史失败'}), 500
    except Exception as e:
        logger.error(f"清空对话历史失败: {e}")
        return jsonify({'error': f'清空对话历史失败: {str(e)}'}), 500



if __name__ == '__main__':
    app.run(debug=True, host='0.0.0.0', port=8080)
