var express = require('express');
var router = express.Router();
const axios = require('axios');
const config = require('../config');
const { AIChatSession } = require('../db/aiChatModel');

// 从配置文件获取配置
const OLLAMA_CONFIG = config.OLLAMA;
const AI_CONFIG = config.AI;

// 语言检测函数
function detectLanguage(text) {
  if (!text || typeof text !== 'string') {
    return { language: 'unknown', confidence: 0 };
  }
  
  // 简单的语言检测规则
  const patterns = {
    chinese: {
      pattern: /[\u4e00-\u9fff]/,
      name: '中文',
      confidence: 0.9
    },
    english: {
      pattern: /^[a-zA-Z\s.,!?;:'"()-]+$/,
      name: 'English',
      confidence: 0.8
    },
    japanese: {
      pattern: /[\u3040-\u309f\u30a0-\u30ff]/,
      name: '日本語',
      confidence: 0.9
    },
    korean: {
      pattern: /[\uac00-\ud7af]/,
      name: '한국어',
      confidence: 0.9
    },
    french: {
      pattern: /[àâäéèêëïîôöùûüÿç]/,
      name: 'Français',
      confidence: 0.7
    },
    german: {
      pattern: /[äöüß]/,
      name: 'Deutsch',
      confidence: 0.7
    },
    spanish: {
      pattern: /[ñáéíóúü]/,
      name: 'Español',
      confidence: 0.7
    },
    russian: {
      pattern: /[\u0400-\u04ff]/,
      name: 'Русский',
      confidence: 0.9
    }
  };
  
  // 检测语言 - 优先检测日语和韩语
  const priorityLanguages = ['japanese', 'korean', 'chinese', 'english', 'french', 'german', 'spanish', 'russian'];
  
  for (const lang of priorityLanguages) {
    const config = patterns[lang];
    if (config && config.pattern.test(text)) {
      return {
        language: lang,
        languageName: config.name,
        confidence: config.confidence
      };
    }
  }
  
  // 如果没有匹配到特定语言，检查是否主要是英文
  const englishWords = text.toLowerCase().match(/[a-z]+/g) || [];
  const totalWords = text.split(/\s+/).filter(word => word.length > 0).length;
  
  if (totalWords > 0 && englishWords.length / totalWords > 0.7) {
    return {
      language: 'english',
      languageName: 'English',
      confidence: 0.6
    };
  }
  
  return {
    language: 'unknown',
    languageName: '未知',
    confidence: 0
  };
}

// 生成语言匹配的系统提示
function generateLanguagePrompt(userMessage, detectedLanguage) {
  const languagePrompts = {
    chinese: '请用中文回复。',
    english: 'Please respond in English.',
    japanese: '日本語で返信してください。',
    korean: '한국어로 답변해 주세요.',
    french: 'Veuillez répondre en français.',
    german: 'Bitte antworten Sie auf Deutsch.',
    spanish: 'Por favor, responda en español.',
    russian: 'Пожалуйста, ответьте на русском языке.'
  };
  
  const languagePrompt = languagePrompts[detectedLanguage.language] || '';
  
  if (languagePrompt) {
    return `你是一个有用的AI助手。${languagePrompt} 请根据用户的问题提供准确、有帮助的回答。`;
  }
  
  return '你是一个有用的AI助手。请根据用户的问题提供准确、有帮助的回答。';
}

/* GET users listing. */
router.get('/', function(req, res, next) {
  res.json({
    success: true,
    message: 'zjx 路由正常工作',
    defaultModel: AI_CONFIG.DEFAULT_MODEL,
    enableModelSelection: AI_CONFIG.ENABLE_MODEL_SELECTION
  });
});

// 添加测试接口
router.get('/test', function(req, res) {
  res.json({
    success: true,
    message: 'zjx 路由正常工作',
    timestamp: new Date().toISOString()
  });
});

// 配置检查接口
router.get('/config', function(req, res) {
  res.json({
    success: true,
    ollamaModel: OLLAMA_CONFIG.MODEL,
    defaultModel: AI_CONFIG.DEFAULT_MODEL,
    enableModelSelection: AI_CONFIG.ENABLE_MODEL_SELECTION,
    message: '配置检查完成'
  });
});

// 检查 Ollama 服务状态
router.get('/ollama-status', async function(req, res) {
  try {
    const response = await axios.get(`${OLLAMA_CONFIG.BASE_URL}/api/tags`, {
      timeout: 5000
    });
    
    if (response.data && response.data.models) {
      const installedModels = response.data.models.map(m => m.name);
      const configuredModel = OLLAMA_CONFIG.MODEL;
      const modelExists = installedModels.includes(configuredModel);
      
      res.json({
        success: true,
        message: modelExists ? 'Ollama 服务正常运行' : 'Ollama 服务正常，但配置的模型未找到',
        models: installedModels,
        modelConfigured: configuredModel,
        modelExists: modelExists,
        availableModels: installedModels.filter(m => m.includes('deepseek') || m.includes('llama') || m.includes('mistral'))
      });
    } else {
      res.json({
        success: false,
        error: 'Ollama 服务响应异常',
        message: '无法获取模型列表'
      });
    }
  } catch (error) {
    console.error('Ollama 服务检查失败:', error.message);
    res.json({
      success: false,
      error: 'Ollama 服务连接失败',
      message: '请确保 Ollama 服务正在运行，并且可以通过 http://localhost:11434 访问'
    });
  }
});



// 创建新的对话会话
router.post('/sessions', async function(req, res) {
    try {
        const { userId = 'anonymous', title = '新对话' } = req.body || {};
        
        const sessionId = `session_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
        
        const newSession = new AIChatSession({
            sessionId,
            userId,
            title,
            messages: []
        });
        
        await newSession.save();
        
        res.json({
            success: true,
            sessionId,
            message: '对话会话创建成功'
        });
        
    } catch (error) {
        console.error('创建会话失败:', error.message);
        res.json({
            success: false,
            error: '创建会话失败',
            message: error.message
        });
    }
});

// 获取用户的对话会话列表
router.get('/sessions', async function(req, res) {
    try {
        const { userId = 'anonymous' } = req.query;
        
        const sessions = await AIChatSession.find({ 
            userId, 
            isActive: true 
        })
        .select('sessionId title createdAt updatedAt messages')
        .sort({ updatedAt: -1 })
        .limit(50);
        
        res.json({
            success: true,
            sessions: sessions.map(session => ({
                sessionId: session.sessionId,
                title: session.title,
                createdAt: session.createdAt,
                updatedAt: session.updatedAt,
                messageCount: session.messages.length
            }))
        });
        
    } catch (error) {
        console.error('获取会话列表失败:', error.message);
        res.json({
            success: false,
            error: '获取会话列表失败',
            message: error.message
        });
    }
});

// 获取特定会话的详细信息
router.get('/sessions/:sessionId', async function(req, res) {
    try {
        const { sessionId } = req.params;
        
        const session = await AIChatSession.findOne({ sessionId, isActive: true });
        
        if (!session) {
            return res.json({
                success: false,
                error: '会话不存在',
                message: '找不到指定的对话会话'
            });
        }
        
        res.json({
            success: true,
            session: {
                sessionId: session.sessionId,
                title: session.title,
                messages: session.messages,
                selectedModel: session.selectedModel,
                systemPrompt: session.systemPrompt,
                temperature: session.temperature,
                maxTokens: session.maxTokens,
                createdAt: session.createdAt,
                updatedAt: session.updatedAt
            }
        });
        
    } catch (error) {
        console.error('获取会话详情失败:', error.message);
        res.json({
            success: false,
            error: '获取会话详情失败',
            message: error.message
        });
    }
});

// 删除对话会话
router.delete('/sessions/:sessionId', async function(req, res) {
    try {
        const { sessionId } = req.params;
        
        const result = await AIChatSession.updateOne(
            { sessionId },
            { isActive: false }
        );
        
        if (result.modifiedCount === 0) {
            return res.json({
                success: false,
                error: '会话不存在',
                message: '找不到指定的对话会话'
            });
        }
        
        res.json({
            success: true,
            message: '会话删除成功'
        });
        
    } catch (error) {
        console.error('删除会话失败:', error.message);
        res.json({
            success: false,
            error: '删除会话失败',
            message: error.message
        });
    }
});

// 导出聊天记录
router.get('/sessions/:sessionId/export', async function(req, res) {
    try {
        const { sessionId } = req.params;
        
        const session = await AIChatSession.findOne({ sessionId, isActive: true });
        
        if (!session) {
            return res.json({
                success: false,
                error: '会话不存在',
                message: '找不到指定的对话会话'
            });
        }
        
        // 准备导出数据
        const exportData = {
            sessionId: session.sessionId,
            title: session.title,
            createdAt: session.createdAt,
            updatedAt: session.updatedAt,
            selectedModel: session.selectedModel,
            systemPrompt: session.systemPrompt,
            temperature: session.temperature,
            maxTokens: session.maxTokens,
            messages: session.messages.map(msg => ({
                role: msg.role,
                content: msg.content,
                image: msg.image,
                model: msg.model,
                modelName: msg.modelName,
                isError: msg.isError,
                timestamp: msg.timestamp
            }))
        };
        
        // 设置响应头，让浏览器下载文件
        res.setHeader('Content-Type', 'application/json');
        res.setHeader('Content-Disposition', `attachment; filename="chat-session-${sessionId}-${new Date().toISOString().split('T')[0]}.json"`);
        
        res.json(exportData);
        
    } catch (error) {
        console.error('导出聊天记录失败:', error.message);
        res.json({
            success: false,
            error: '导出聊天记录失败',
            message: error.message
        });
    }
});

// 获取聊天统计信息
router.get('/chat-stats', async function(req, res) {
    try {
        const { userId = 'anonymous' } = req.query;
        
        // 获取总会话数
        const totalSessions = await AIChatSession.countDocuments({ userId, isActive: true });
        
        // 获取总消息数
        const sessions = await AIChatSession.find({ userId, isActive: true }).select('messages');
        const totalMessages = sessions.reduce((sum, session) => sum + session.messages.length, 0);
        
        // 获取最近7天的活跃会话数
        // 获取最近7天创建的会话数（基于创建时间，不考虑是否被删除）
        const sevenDaysAgo = new Date();
        sevenDaysAgo.setDate(sevenDaysAgo.getDate() - 7);
        const recentSessions = await AIChatSession.countDocuments({
            userId,
            createdAt: { $gte: sevenDaysAgo }
        });
        
        // 获取最常用的模型
        const modelStats = await AIChatSession.aggregate([
            { $match: { userId, isActive: true } },
            { $group: { _id: '$selectedModel', count: { $sum: 1 } } },
            { $sort: { count: -1 } }
        ]);
        
        res.json({
            success: true,
            stats: {
                totalSessions,
                totalMessages,
                recentSessions,
                modelStats: modelStats.map(stat => ({
                    model: stat._id,
                    count: stat.count
                }))
            }
        });
        
    } catch (error) {
        console.error('获取聊天统计失败:', error.message);
        res.json({
            success: false,
            error: '获取聊天统计失败',
            message: error.message
        });
    }
});

// 统一的聊天接口，支持模型选择和数据库存储
router.post('/chat', async function(req, res) {
    try {
        const { 
            message, 
            image,
            sessionId, 
            model = AI_CONFIG.DEFAULT_MODEL, 
            systemPrompt, 
            temperature, 
            maxTokens 
        } = req.body || {};
        
        // 验证参数
        if (!message && !image) {
            return res.json({ 
                success: false, 
                error: '消息内容或图片不能为空' 
            });
        }

        // 处理请求（只支持 Ollama）
        let aiResponse;
        if (model === 'ollama') {
            aiResponse = await handleOllamaRequest(req, res, message, image, systemPrompt, temperature, maxTokens);
        } else {
            return res.json({
                success: false,
                error: '不支持的模型类型',
                message: '支持的模型: ollama'
            });
        }

        // 如果AI响应成功，保存到数据库
        if (aiResponse && aiResponse.success && sessionId) {
            try {
                await saveChatToDatabase(sessionId, message, image, aiResponse, model, systemPrompt, temperature, maxTokens);
            } catch (dbError) {
                console.error('保存对话到数据库失败:', dbError.message);
                // 即使数据库保存失败，也不影响AI响应
            }
        }

        // 发送AI响应给客户端
        res.json(aiResponse);
        
    } catch (error) {
        console.error('聊天接口错误:', error.message);
        res.json({
            success: false,
            error: '请求失败',
            message: error.message
        });
    }
});

// 保存对话到数据库
async function saveChatToDatabase(sessionId, userMessage, userImage, aiResponse, model, systemPrompt, temperature, maxTokens) {
    try {
        // 查找或创建会话
        let session = await AIChatSession.findOne({ sessionId, isActive: true });
        
        if (!session) {
            // 如果会话不存在，创建一个新的
            session = new AIChatSession({
                sessionId,
                userId: 'anonymous',
                title: userMessage.substring(0, 50) + (userMessage.length > 50 ? '...' : ''),
                messages: [],
                selectedModel: model,
                systemPrompt: systemPrompt,
                temperature: temperature,
                maxTokens: maxTokens
            });
        } else {
            // 更新会话配置
            session.selectedModel = model;
            session.systemPrompt = systemPrompt;
            session.temperature = temperature;
            session.maxTokens = maxTokens;
        }

        // 添加用户消息
        session.messages.push({
            role: 'user',
            content: userMessage,
            image: userImage,
            model: model,
            timestamp: new Date()
        });

        // 添加AI回复
        session.messages.push({
            role: 'ai',
            content: aiResponse.response,
            model: model,
            modelName: aiResponse.modelName || model,
            isError: false,
            timestamp: new Date()
        });

        // 保存到数据库
        await session.save();
        
        console.log(`对话已保存到数据库，会话ID: ${sessionId}`);
        
    } catch (error) {
        console.error('保存对话到数据库失败:', error);
        throw error;
    }
}

// 处理 Ollama 请求
async function handleOllamaRequest(req, res, message, image, systemPrompt, temperature, maxTokens) {
    try {
        // 首先验证模型是否存在
        try {
            const modelsResponse = await axios.get(`${OLLAMA_CONFIG.BASE_URL}/api/tags`, {
                timeout: 5000
            });
            
            if (modelsResponse.data && modelsResponse.data.models) {
                const installedModels = modelsResponse.data.models.map(m => m.name);
                const configuredModel = OLLAMA_CONFIG.MODEL;
                
                if (!installedModels.includes(configuredModel)) {
                    return {
                        success: false,
                        error: '模型未找到',
                        message: `模型 ${configuredModel} 未在 Ollama 中找到。可用模型: ${installedModels.slice(0, 5).join(', ')}...`,
                        availableModels: installedModels.slice(0, 10)
                    };
                }
            }
        } catch (modelCheckError) {
            console.error('模型检查失败:', modelCheckError.message);
            return {
                success: false,
                error: 'Ollama 服务连接失败',
                message: '无法连接到 Ollama 服务，请确保服务正在运行'
            };
        }

        // 检测用户输入的语言
        const detectedLanguage = detectLanguage(message);
        console.log('检测到的语言:', detectedLanguage);
        
        // 生成语言匹配的系统提示
        const languagePrompt = generateLanguagePrompt(message, detectedLanguage);
        const finalSystemPrompt = systemPrompt || languagePrompt;

        // 构建 Ollama 请求数据
        let prompt = message;
        if (image) {
            // 如果有图片，添加图片分析提示
            const imagePrompt = detectedLanguage.language === 'chinese' ? 
                '请分析这张图片：' : 
                'Please analyze this image:';
            prompt = `${imagePrompt}${image}\n\n${detectedLanguage.language === 'chinese' ? '用户问题：' : 'User question:'} ${message || (detectedLanguage.language === 'chinese' ? '请描述这张图片的内容' : 'Please describe the content of this image')}`;
        } else {
            prompt = `${finalSystemPrompt}\n\n${message}`;
        }
        
        const requestData = {
            model: OLLAMA_CONFIG.MODEL,
            prompt: prompt,
            stream: false,
            options: {
                temperature: temperature || OLLAMA_CONFIG.TEMPERATURE,
                top_p: OLLAMA_CONFIG.TOP_P,
                num_predict: maxTokens || OLLAMA_CONFIG.MAX_TOKENS
            }
        };

        console.log('发送请求到 Ollama:', {
            model: OLLAMA_CONFIG.MODEL,
            messageLength: message.length,
            temperature: requestData.options.temperature
        });

        // 发送请求到 Ollama API
        const response = await axios.post(
            `${OLLAMA_CONFIG.BASE_URL}/api/generate`,
            requestData,
            {
                headers: {
                    'Content-Type': 'application/json'
                },
                timeout: OLLAMA_CONFIG.TIMEOUT
            }
        );

        console.log('Ollama API 响应成功');
        
        // 返回 Ollama 的响应数据
        return {
            success: true,
            response: response.data.response,
            model: 'ollama',
            modelName: OLLAMA_CONFIG.MODEL,
            usage: response.data.usage || null,
            detectedLanguage: detectedLanguage
        };
        
    } catch (error) {
        console.error('Ollama API 错误:', error.message);
        console.error('错误详情:', error.response?.data);
        
        if (error.response) {
            const status = error.response.status;
            const data = error.response.data;
            
            if (status === 404) {
                return {
                    success: false,
                    error: '模型未找到',
                    message: `模型 ${OLLAMA_CONFIG.MODEL} 未在 Ollama 中找到，请确保已正确下载该模型`
                };
            } else if (status === 400) {
                return {
                    success: false,
                    error: `Ollama API 错误 (${status})`,
                    message: data.error || '请求参数错误，请检查模型名称和参数'
                };
            } else {
                return {
                    success: false,
                    error: `Ollama API 错误 (${status})`,
                    message: data.error || '请求失败'
                };
            }
        } else if (error.code === 'ECONNREFUSED' || error.code === 'ENOTFOUND') {
            return {
                success: false,
                error: '网络连接失败',
                message: '无法连接到 Ollama 服务，请确保 Ollama 正在运行在 http://localhost:11434'
            };
        } else if (error.code === 'ECONNABORTED') {
            return {
                success: false,
                error: '请求超时',
                message: '模型响应时间过长，请稍后重试或减少输入内容'
            };
        } else {
            return {
                success: false,
                error: '请求失败',
                message: error.message
            };
        }
    }
}



module.exports = router;
