const { ipcMain } = require('electron');
const https = require('https');
const http = require('http');

/**
 * AIService - Handles interaction with LLM providers (DeepSeek, Ollama, etc.)
 * Uses standard OpenAI Chat Completion API format.
 */
class AIService {
    constructor() {
        this.config = {
            baseUrl: 'https://api.deepseek.com/v1',
            apiKey: '',
            model: 'deepseek-chat', // or user configured model
            temperature: 0.7
        };
    }

    /**
     * Send chat request with streaming support
     */
    async chatStream(messages, event) {
        // 1. Load config from DB (on every request to ensure freshness)
        await this.loadConfig();

        if (!this.config.apiKey && !this.config.baseUrl.includes('localhost')) {
            event.sender.send('ai:error', '请先配置 API Key 或 使用本地模型');
            return;
        }

        const requestBody = JSON.stringify({
            model: this.config.model,
            messages: messages,
            temperature: this.config.temperature,
            stream: true,
            max_tokens: 2000 // Safe limit
        });

        const url = new URL(this.config.baseUrl + '/chat/completions');
        const options = {
            method: 'POST',
            headers: {
                'Content-Type': 'application/json',
                'Authorization': `Bearer ${this.config.apiKey}`
            }
        };

        const client = url.protocol === 'https:' ? https : http;

        console.log(`[AI] Sending request to ${url.toString()} model=${this.config.model}`);

        const req = client.request(url, options, (res) => {
            let buffer = '';

            res.on('data', (chunk) => {
                const lines = chunk.toString().split('\n');
                for (const line of lines) {
                    if (line.trim() === '') continue;
                    if (line.trim() === 'data: [DONE]') {
                        event.sender.send('ai:stream-end');
                        continue;
                    }
                    if (line.startsWith('data: ')) {
                        try {
                            const data = JSON.parse(line.slice(6));
                            if (data.choices && data.choices[0].delta && data.choices[0].delta.content) {
                                event.sender.send('ai:stream', data.choices[0].delta.content);
                            }
                        } catch (e) {
                            // verify data parsing
                        }
                    }
                }
            });

            res.on('end', () => {
                event.sender.send('ai:stream-end');
            });
        });

        req.on('error', (e) => {
            console.error('[AI] Request Failed:', e);
            event.sender.send('ai:error', e.message);
        });

        req.write(requestBody);
        req.end();
    }

    async loadConfig() {
        const { getMainKnex } = require('../database/knex');
        const knex = getMainKnex();

        try {
            const configs = await knex('global_configs').whereIn('key', ['ai_base_url', 'ai_api_key', 'ai_model', 'ai_temperature']);
            const configMap = configs.reduce((acc, cur) => ({ ...acc, [cur.key]: cur.value }), {});

            if (configMap.ai_base_url) this.config.baseUrl = configMap.ai_base_url;
            if (configMap.ai_api_key) this.config.apiKey = configMap.ai_api_key;
            if (configMap.ai_model) this.config.model = configMap.ai_model;
            if (configMap.ai_temperature) this.config.temperature = parseFloat(configMap.ai_temperature);
        } catch (e) {
            console.warn('[AI] Failed to load config, using defaults', e);
        }
    }
}

const aiService = new AIService();

function registerAIHandlers() {
    ipcMain.on('ai:chat-start', (event, messages) => {
        aiService.chatStream(messages, event);
    });

    ipcMain.handle('ai:get-config', async () => {
        await aiService.loadConfig();
        return aiService.config;
    });

    ipcMain.handle('ai:save-config', async (event, newConfig) => {
        const { getMainKnex } = require('../database/knex');
        const knex = getMainKnex();

        const updates = [
            { key: 'ai_base_url', value: newConfig.baseUrl },
            { key: 'ai_api_key', value: newConfig.apiKey },
            { key: 'ai_model', value: newConfig.model },
            { key: 'ai_temperature', value: String(newConfig.temperature) }
        ];

        for (const item of updates) {
            await knex('global_configs')
                .insert(item)
                .onConflict('key')
                .merge();
        }

        return true;
    });
}

module.exports = { registerAIHandlers };
