const fs = require('fs');
const path = require('path');
const csv = require('csv-parser');
const ExcelJS = require('exceljs');
const crypto = require('crypto');
const DataSchema = require('./DataSchema');
const { inferCleanerType, getCleanerForType } = require('./cleaners');

/**
 * DataImporter - Handles both stream and array based data import
 */
class DataImporter {
    /**
     * Import data from file using streams
     */
    static async importFromFile(caseKnex, options) {
        const { sourceFile, dataType, caseId, skipFileCheck, forceImport } = options;

        console.log(`[DataImporter] Starting stream import for ${sourceFile}`);

        // 1. Ensure table exists
        const tableName = DataSchema.getTableName(dataType);
        await DataSchema.ensureTableExists(caseKnex, tableName, dataType);

        return await caseKnex.transaction(async (trx) => {
            // 2. File-level Deduplication
            let fileHash = null;
            if (sourceFile) {
                fileHash = await this.calculateFileHash(sourceFile);
            }

            if (sourceFile && !skipFileCheck && !forceImport) {
                const existingLog = await trx('import_logs')
                    .where({ file_hash: fileHash, case_id: caseId, data_type: dataType })
                    .first();

                if (existingLog) {
                    console.log(`[DataImporter] File already imported: ${sourceFile}`);
                    return {
                        success: false,
                        needConfirm: true,
                        message: `文件已导入过 (${existingLog.import_time})，是否重新导入？重新导入将跳过已存在的数据行。`,
                        fileHash
                    };
                }
            }

            // 3. Log File Import
            if (sourceFile && !skipFileCheck) {
                const logExists = await trx('import_logs').where({ file_hash: fileHash, case_id: caseId, data_type: dataType }).first();
                if (!logExists) {
                    await trx('import_logs').insert({
                        case_id: caseId,
                        file_name: path.basename(sourceFile),
                        file_hash: fileHash,
                        data_type: dataType,
                        import_time: new Date().toISOString()
                    });
                }
            }

            // 4. Stream Process
            const result = await this.processFileStream(trx, tableName, { ...options, fileHash });
            return result;
        });
    }

    /**
     * Import data from Array (Memory)
     * For small datasets or legacy calls
     */
    static async importFromArray(caseKnex, options) {
        const { dataType, caseId, data, sourceFile, skipFileCheck = false, forceImport = false } = options;

        console.log(`[DataImporter] Starting array import for case ${caseId}, rows: ${data?.length}`);

        if (!data || data.length === 0) {
            return { success: true, message: '没有数据需要导入', importedCount: 0, duplicateCount: 0, totalCount: 0 };
        }

        const tableName = DataSchema.getTableName(dataType);
        await DataSchema.ensureTableExists(caseKnex, tableName, dataType);

        return await caseKnex.transaction(async (trx) => {
            let fileDuplicateCount = 0;
            let batchDuplicateCount = 0;
            let dbDuplicateCount = 0;

            // 1. File Hash Check (if sourceFile valid)
            if (sourceFile && !skipFileCheck && !forceImport) {
                const fs = require('fs');
                // Note: Reading full file synchronously for hash here (legacy behavior)
                // In array mode, files are usually small.
                const fileBuffer = fs.readFileSync(sourceFile);
                const fileHash = crypto.createHash('md5').update(fileBuffer).digest('hex');

                const existing = await trx('import_logs')
                    .where({ file_hash: fileHash, case_id: caseId, data_type: dataType })
                    .first();

                if (existing) {
                    return {
                        success: false,
                        needConfirm: true,
                        message: `文件已导入过 (${existing.import_time})，是否重新导入？`,
                        fileHash
                    };
                }

                // Log import
                const logExists = await trx('import_logs').where({ file_hash: fileHash, case_id: caseId, data_type: dataType }).first();
                if (!logExists) {
                    await trx('import_logs').insert({
                        case_id: caseId,
                        file_name: path.basename(sourceFile),
                        file_hash: fileHash,
                        data_type: dataType,
                        import_time: new Date().toISOString()
                    });
                }
            }

            // 2. Batch Deduplication (Pre-process all data since it's in memory)
            // Generate Hashes
            data.forEach(row => {
                if (!row.row_hash) row.row_hash = DataSchema.generateRowHash(row, dataType);
            });

            const { unique, duplicateCount } = this.deduplicateInBatch(data);
            batchDuplicateCount = duplicateCount;

            if (unique.length === 0) {
                return { success: true, message: '所有数据在批次内重复', importedCount: 0, duplicateCount: data.length, totalCount: data.length };
            }

            // 3. Insert in Chunks
            const chunkSize = 500;
            let importedCount = 0;

            for (let i = 0; i < unique.length; i += chunkSize) {
                const chunk = unique.slice(i, i + chunkSize);

                // DB Deduplication
                const hashes = chunk.map(r => r.row_hash);
                const existingRows = await trx(tableName).whereIn('row_hash', hashes).select('row_hash');
                const existingSet = new Set(existingRows.map(r => r.row_hash));

                const newRows = chunk.filter(r => !existingSet.has(r.row_hash));

                if (newRows.length > 0) {
                    await trx(tableName).insert(newRows);

                    // FTS Indexing
                    const SearchService = require('./SearchService');
                    try {
                        await SearchService.indexBatch(trx, tableName, newRows, dataType);
                    } catch (err) {
                        console.warn('[DataImporter] Failed to index batch for FTS (Array):', err.message);
                    }

                    importedCount += newRows.length;
                }

                dbDuplicateCount += (chunk.length - newRows.length);
            }

            return {
                success: true,
                importedCount,
                duplicateCount: batchDuplicateCount + dbDuplicateCount,
                totalCount: data.length,
                message: `成功导入 ${importedCount} 条数据`
            };
        });
    }

    /**
     * Calc file hash using stream
     */
    static async calculateFileHash(filePath) {
        return new Promise((resolve, reject) => {
            const hash = crypto.createHash('md5');
            const stream = fs.createReadStream(filePath);
            stream.on('error', err => reject(err));
            stream.on('data', chunk => hash.update(chunk));
            stream.on('end', () => resolve(hash.digest('hex')));
        });
    }

    static async processFileStream(trx, tableName, options) {
        const { sourceFile } = options;
        const ext = path.extname(sourceFile).toLowerCase();

        if (ext === '.csv') {
            return this.processCSV(trx, tableName, options);
        } else if (ext === '.xlsx' || ext === '.xls') {
            return this.processExcel(trx, tableName, options);
        } else {
            throw new Error(`Unsupported file type: ${ext}`);
        }
    }

    static async processCSV(trx, tableName, options) {
        const stream = fs.createReadStream(options.sourceFile).pipe(csv());
        return this.processAsyncStream(stream, trx, tableName, options);
    }

    static async processExcel(trx, tableName, options) {
        const stream = fs.createReadStream(options.sourceFile);
        const workbookReader = new ExcelJS.stream.xlsx.WorkbookReader(stream, {
            entries: 'emit',
            sharedStrings: 'cache',
            styles: 'ignore'
        });

        let headers = null;
        let batch = [];
        let totalImported = 0;
        let totalDuplicates = 0;
        let totalRows = 0;

        for await (const worksheetReader of workbookReader) {
            for await (const row of worksheetReader) {
                if (row.number === 1) {
                    headers = {};
                    row.eachCell((cell, colNumber) => {
                        headers[colNumber] = cell.value ? cell.value.toString().trim() : '';
                    });
                    continue;
                }

                const rowObj = {};
                let hasData = false;
                row.eachCell((cell, colNumber) => {
                    const header = headers[colNumber];
                    if (header) {
                        let value = cell.value;
                        if (value !== null && value !== undefined) {
                            if (typeof value === 'object') {
                                if (value instanceof Date) {
                                    try {
                                        const y = value.getFullYear();
                                        if (y === 1899) {
                                            value = value.toTimeString().split(' ')[0];
                                        } else {
                                            value = value.toISOString().replace('T', ' ').split('.')[0];
                                        }
                                    } catch (e) { value = String(value); }
                                } else if (value.text) value = value.text;
                                else if (value.result !== undefined) value = value.result;
                                else if (value.richText) value = value.richText.map(t => t.text).join('');
                            }
                            rowObj[header] = String(value).trim();
                            hasData = true;
                        }
                    }
                });

                if (!hasData) continue;

                const mapped = this.mapRow(rowObj, options.mapping, options.customValues, options.dataType);
                if (mapped) {
                    batch.push(mapped);
                    totalRows++;
                }

                if (batch.length >= 500) {
                    const { inserted, duplicates } = await this.insertBatch(trx, tableName, batch, options.dataType);
                    totalImported += inserted;
                    totalDuplicates += duplicates;
                    batch = [];
                }
            }
        }

        if (batch.length > 0) {
            const { inserted, duplicates } = await this.insertBatch(trx, tableName, batch);
            totalImported += inserted;
            totalDuplicates += duplicates;
        }

        return {
            success: true,
            importedCount: totalImported,
            duplicateCount: totalDuplicates,
            totalCount: totalRows
        };
    }

    static async processAsyncStream(iterator, trx, tableName, options) {
        let batch = [];
        let totalImported = 0;
        let totalDuplicates = 0;
        let totalRows = 0;

        for await (const row of iterator) {
            const mapped = this.mapRow(row, options.mapping, options.customValues, options.dataType);
            if (mapped) {
                batch.push(mapped);
                totalRows++;
            }

            if (batch.length >= 500) {
                const { inserted, duplicates } = await this.insertBatch(trx, tableName, batch, options.dataType);
                totalImported += inserted;
                totalDuplicates += duplicates;
                batch = [];
            }
        }

        if (batch.length > 0) {
            const { inserted, duplicates } = await this.insertBatch(trx, tableName, batch);
            totalImported += inserted;
            totalDuplicates += duplicates;
        }

        return {
            success: true,
            importedCount: totalImported,
            duplicateCount: totalDuplicates,
            totalCount: totalRows
        };
    }

    static mapRow(row, mapping, customValues, dataType) {
        const mappedRow = {};
        const extraData = {};

        for (const [sysField, fileHeader] of Object.entries(mapping)) {
            if (Array.isArray(fileHeader)) {
                // Multi-column mapping
                const values = fileHeader.map(h => row[h]).filter(v => v !== undefined && v !== null && v !== '');
                const merged = values.join(' ').trim();
                if (sysField.startsWith('_custom_')) {
                    if (merged) extraData[fileHeader.join('+')] = merged;
                } else if (sysField !== '_ignore_') {
                    mappedRow[sysField] = merged;
                }
            } else {
                // Single column
                const val = row[fileHeader];
                if (sysField.startsWith('_custom_')) {
                    if (val !== undefined && val !== null && val !== '') {
                        extraData[fileHeader] = val;
                    }
                } else if (sysField !== '_ignore_') {
                    mappedRow[sysField] = val;
                }
            }
        }

        // Add custom values
        if (customValues) {
            for (const [key, val] of Object.entries(customValues)) {
                mappedRow[key] = val;
            }
        }

        // Auto cleaning
        for (const [fieldName, value] of Object.entries(mappedRow)) {
            if (value === undefined || value === null || value === '') continue;

            const cleanerType = inferCleanerType(fieldName);
            if (cleanerType) {
                const cleaner = getCleanerForType(cleanerType);
                if (cleaner) {
                    const cleaned = cleaner.clean(value);
                    if (cleaned !== null) {
                        mappedRow[fieldName] = cleaned;
                    }
                }
            }
        }

        if (Object.keys(extraData).length > 0) {
            mappedRow.extra_data = JSON.stringify(extraData);
        }

        // Generate Hash
        const hash = DataSchema.generateRowHash(mappedRow, dataType);
        if (hash) mappedRow.row_hash = hash;

        return mappedRow;
    }

    static deduplicateInBatch(data) {
        const seen = new Set();
        const unique = [];
        let duplicateCount = 0;

        for (const row of data) {
            const hash = row.row_hash;
            if (hash && !seen.has(hash)) {
                seen.add(hash);
                unique.push(row);
            } else {
                duplicateCount++;
            }
        }
        return { unique, duplicateCount };
    }

    static async insertBatch(trx, tableName, batch, dataType) {
        // Deduplicate
        const { unique, duplicateCount } = this.deduplicateInBatch(batch);

        if (unique.length === 0) return { inserted: 0, duplicates: duplicateCount };

        // DB check for existing hashes
        const hashes = unique.map(r => r.row_hash);
        const existing = await trx(tableName).whereIn('row_hash', hashes).select('row_hash');
        const existingSet = new Set(existing.map(r => r.row_hash));

        const newRows = unique.filter(r => !existingSet.has(r.row_hash));

        if (newRows.length > 0) {
            await trx(tableName).insert(newRows);

            // FTS Indexing
            const SearchService = require('./SearchService');
            try {
                await SearchService.indexBatch(trx, tableName, newRows, dataType);
            } catch (err) {
                console.warn('[DataImporter] Failed to index batch for FTS:', err.message);
                // Don't fail the whole import if search indexing fails
            }
        }

        return {
            inserted: newRows.length,
            duplicates: duplicateCount + (unique.length - newRows.length)
        };
    }
}

module.exports = DataImporter;
