-- =====================================================
-- EnPlatform Assistant PostgreSQL数据库初始化脚本
-- 版本: 1.0.0
-- 创建日期: 2025-06-29
-- 描述: 用于生产环境的PostgreSQL数据库初始化
-- 支持: PostgreSQL 12+
-- =====================================================

-- 创建数据库（如果不存在）
-- CREATE DATABASE enplatform_assistant 
--     WITH ENCODING 'UTF8' 
--     LC_COLLATE='zh_CN.UTF-8' 
--     LC_CTYPE='zh_CN.UTF-8' 
--     TEMPLATE=template0;

-- 连接到数据库
-- \c enplatform_assistant;

-- 创建扩展
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
CREATE EXTENSION IF NOT EXISTS "pg_trgm";

-- 删除已存在的表（如果存在）
DROP TABLE IF EXISTS import_records CASCADE;

-- =====================================================
-- 1. 导入记录表 (import_records)
-- 用于跟踪Excel文件导入的状态和统计信息
-- =====================================================

-- 创建枚举类型
CREATE TYPE import_status AS ENUM ('PROCESSING', 'SUCCESS', 'FAILED', 'PARTIAL_SUCCESS', 'CANCELLED');

CREATE TABLE import_records (
    -- 主键ID，使用UUID
    id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
    
    -- 文件基本信息
    file_name VARCHAR(255) NOT NULL,
    file_size BIGINT CHECK (file_size >= 0),
    file_hash VARCHAR(32),
    
    -- 导入状态
    status import_status NOT NULL DEFAULT 'PROCESSING',
    
    -- 记录统计信息
    total_records INTEGER CHECK (total_records >= 0),
    valid_records INTEGER CHECK (valid_records >= 0),
    success_records INTEGER CHECK (success_records >= 0),
    failed_records INTEGER CHECK (failed_records >= 0),
    
    -- 时间信息
    start_time TIMESTAMP WITH TIME ZONE,
    end_time TIMESTAMP WITH TIME ZONE,
    processing_time_ms BIGINT CHECK (processing_time_ms >= 0),
    created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
    updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
    
    -- 错误和调试信息
    error_message TEXT,
    
    -- 向量存储相关信息
    vector_collection VARCHAR(100),
    vector_dimension INTEGER CHECK (vector_dimension > 0),
    
    -- 用户和权限信息
    created_by VARCHAR(100),
    
    -- 扩展信息（JSON格式）
    metadata JSONB,
    processing_steps TEXT,
    
    -- 约束检查
    CONSTRAINT chk_success_records_valid CHECK (
        success_records IS NULL OR valid_records IS NULL OR success_records <= valid_records
    ),
    CONSTRAINT chk_valid_records_total CHECK (
        valid_records IS NULL OR total_records IS NULL OR valid_records <= total_records
    )
);

-- 添加表注释
COMMENT ON TABLE import_records IS 'Excel导入记录表，用于跟踪文件导入的状态和统计信息';
COMMENT ON COLUMN import_records.id IS '主键ID，UUID格式';
COMMENT ON COLUMN import_records.file_name IS '原始文件名';
COMMENT ON COLUMN import_records.file_size IS '文件大小（字节）';
COMMENT ON COLUMN import_records.file_hash IS '文件MD5哈希值，用于去重';
COMMENT ON COLUMN import_records.status IS '导入状态';
COMMENT ON COLUMN import_records.total_records IS '总记录数';
COMMENT ON COLUMN import_records.valid_records IS '有效记录数（通过验证的记录）';
COMMENT ON COLUMN import_records.success_records IS '成功处理的记录数（成功生成向量并存储）';
COMMENT ON COLUMN import_records.failed_records IS '失败记录数';
COMMENT ON COLUMN import_records.start_time IS '开始处理时间';
COMMENT ON COLUMN import_records.end_time IS '结束处理时间';
COMMENT ON COLUMN import_records.processing_time_ms IS '处理耗时（毫秒）';
COMMENT ON COLUMN import_records.created_at IS '创建时间';
COMMENT ON COLUMN import_records.updated_at IS '更新时间';
COMMENT ON COLUMN import_records.error_message IS '错误信息';
COMMENT ON COLUMN import_records.vector_collection IS '向量集合名称';
COMMENT ON COLUMN import_records.vector_dimension IS '向量维度';
COMMENT ON COLUMN import_records.created_by IS '创建者';
COMMENT ON COLUMN import_records.metadata IS '扩展元数据，存储JSON格式的额外信息';
COMMENT ON COLUMN import_records.processing_steps IS '处理步骤记录，存储JSON格式的处理过程';

-- =====================================================
-- 2. 创建索引
-- =====================================================

-- 文件名索引（用于查询特定文件的导入记录）
CREATE INDEX idx_import_records_file_name ON import_records(file_name);

-- 状态索引（用于查询特定状态的记录）
CREATE INDEX idx_import_records_status ON import_records(status);

-- 创建时间索引（用于时间范围查询）
CREATE INDEX idx_import_records_created_at ON import_records(created_at);

-- 创建者索引（用于查询特定用户的导入记录）
CREATE INDEX idx_import_records_created_by ON import_records(created_by);

-- 文件哈希索引（用于去重检查）
CREATE INDEX idx_import_records_file_hash ON import_records(file_hash);

-- 向量集合索引（用于查询特定集合的记录）
CREATE INDEX idx_import_records_vector_collection ON import_records(vector_collection);

-- 复合索引：状态+创建时间（用于状态统计和时间排序）
CREATE INDEX idx_import_records_status_created_at ON import_records(status, created_at);

-- 复合索引：创建者+创建时间（用于用户导入历史查询）
CREATE INDEX idx_import_records_created_by_created_at ON import_records(created_by, created_at);

-- JSONB索引（用于元数据查询）
CREATE INDEX idx_import_records_metadata ON import_records USING GIN(metadata);

-- 全文搜索索引（用于文件名搜索）
CREATE INDEX idx_import_records_file_name_trgm ON import_records USING GIN(file_name gin_trgm_ops);

-- =====================================================
-- 3. 插入测试数据
-- =====================================================

-- 插入示例导入记录
INSERT INTO import_records (
    id, file_name, file_size, file_hash, status,
    total_records, valid_records, success_records, failed_records,
    start_time, end_time, processing_time_ms,
    vector_collection, vector_dimension, created_by,
    metadata, processing_steps
) VALUES 
(
    uuid_generate_v4(),
    '元模型数据202506181726.xlsx',
    15360,
    'a1b2c3d4e5f6789012345678901234567',
    'SUCCESS',
    100, 95, 90, 5,
    CURRENT_TIMESTAMP - INTERVAL '1 hour',
    CURRENT_TIMESTAMP - INTERVAL '50 minutes',
    600000,
    'knowledge_base',
    384,
    'system',
    '{"source": "test", "version": "1.0", "tags": ["demo", "test"]}'::jsonb,
    '["文件上传完成", "Excel解析完成，共解析100条记录", "数据验证完成，有效记录95条", "向量生成完成", "向量存储完成，成功90条"]'
),
(
    uuid_generate_v4(),
    '设备属性定义.xlsx',
    8192,
    'b2c3d4e5f6789012345678901234567890',
    'PROCESSING',
    50, 48, 0, 0,
    CURRENT_TIMESTAMP - INTERVAL '10 minutes',
    NULL,
    NULL,
    'knowledge_base',
    384,
    'admin',
    '{"source": "manual", "priority": "high"}'::jsonb,
    '["文件上传完成", "Excel解析完成，共解析50条记录", "数据验证完成，有效记录48条", "正在生成向量..."]'
),
(
    uuid_generate_v4(),
    '错误测试文件.xlsx',
    4096,
    'c3d4e5f6789012345678901234567890123',
    'FAILED',
    20, 0, 0, 20,
    CURRENT_TIMESTAMP - INTERVAL '2 hours',
    CURRENT_TIMESTAMP - INTERVAL '2 hours' + INTERVAL '5 minutes',
    300000,
    NULL,
    NULL,
    'test_user',
    '{"source": "test", "error_type": "validation"}'::jsonb,
    '["文件上传完成", "Excel解析失败：文件格式错误"]'
),
(
    uuid_generate_v4(),
    '大型数据集.xlsx',
    102400,
    'd4e5f6789012345678901234567890123a',
    'PARTIAL_SUCCESS',
    1000, 950, 900, 50,
    CURRENT_TIMESTAMP - INTERVAL '3 hours',
    CURRENT_TIMESTAMP - INTERVAL '2 hours' + INTERVAL '30 minutes',
    5400000,
    'knowledge_base',
    384,
    'data_analyst',
    '{"source": "batch", "batch_id": "B001", "priority": "normal"}'::jsonb,
    '["文件上传完成", "Excel解析完成，共解析1000条记录", "数据验证完成，有效记录950条", "向量生成完成", "向量存储完成，成功900条，失败50条"]'
);

-- =====================================================
-- 4. 创建视图
-- =====================================================

-- 导入统计视图
CREATE VIEW import_statistics AS
SELECT 
    COUNT(*) as total_imports,
    SUM(CASE WHEN status = 'SUCCESS' THEN 1 ELSE 0 END) as successful_imports,
    SUM(CASE WHEN status = 'FAILED' THEN 1 ELSE 0 END) as failed_imports,
    SUM(CASE WHEN status = 'PROCESSING' THEN 1 ELSE 0 END) as processing_imports,
    SUM(CASE WHEN status = 'PARTIAL_SUCCESS' THEN 1 ELSE 0 END) as partial_success_imports,
    SUM(CASE WHEN status = 'CANCELLED' THEN 1 ELSE 0 END) as cancelled_imports,
    SUM(COALESCE(total_records, 0)) as total_records_processed,
    SUM(COALESCE(success_records, 0)) as total_successful_records,
    ROUND(
        CASE 
            WHEN COUNT(*) > 0 THEN 
                (SUM(CASE WHEN status = 'SUCCESS' THEN 1 ELSE 0 END) * 100.0 / COUNT(*))
            ELSE 0 
        END, 2
    ) as success_rate_percentage,
    ROUND(AVG(COALESCE(processing_time_ms, 0)), 2) as avg_processing_time_ms,
    MIN(created_at) as first_import_time,
    MAX(created_at) as last_import_time
FROM import_records;

-- 今日导入统计视图
CREATE VIEW today_import_statistics AS
SELECT 
    COUNT(*) as today_imports,
    SUM(CASE WHEN status = 'SUCCESS' THEN 1 ELSE 0 END) as today_successful,
    SUM(CASE WHEN status = 'FAILED' THEN 1 ELSE 0 END) as today_failed,
    SUM(CASE WHEN status = 'PROCESSING' THEN 1 ELSE 0 END) as today_processing,
    SUM(CASE WHEN status = 'PARTIAL_SUCCESS' THEN 1 ELSE 0 END) as today_partial_success,
    SUM(COALESCE(total_records, 0)) as today_total_records,
    SUM(COALESCE(success_records, 0)) as today_success_records,
    ROUND(AVG(COALESCE(processing_time_ms, 0)), 2) as today_avg_processing_time
FROM import_records 
WHERE DATE(created_at) = CURRENT_DATE;

-- 用户导入统计视图
CREATE VIEW user_import_statistics AS
SELECT 
    created_by,
    COUNT(*) as user_total_imports,
    SUM(CASE WHEN status = 'SUCCESS' THEN 1 ELSE 0 END) as user_successful_imports,
    SUM(CASE WHEN status = 'FAILED' THEN 1 ELSE 0 END) as user_failed_imports,
    SUM(COALESCE(total_records, 0)) as user_total_records,
    SUM(COALESCE(success_records, 0)) as user_success_records,
    ROUND(
        CASE 
            WHEN COUNT(*) > 0 THEN 
                (SUM(CASE WHEN status = 'SUCCESS' THEN 1 ELSE 0 END) * 100.0 / COUNT(*))
            ELSE 0 
        END, 2
    ) as user_success_rate,
    MIN(created_at) as first_import,
    MAX(created_at) as last_import
FROM import_records
WHERE created_by IS NOT NULL
GROUP BY created_by;

-- =====================================================
-- 5. 创建函数和存储过程
-- =====================================================

-- 计算成功率函数
CREATE OR REPLACE FUNCTION calculate_success_rate(
    p_total_records INTEGER,
    p_success_records INTEGER
) RETURNS DECIMAL(5,2) AS $$
BEGIN
    IF p_total_records IS NULL OR p_total_records = 0 THEN
        RETURN 0.00;
    ELSE
        RETURN ROUND((p_success_records * 100.0 / p_total_records), 2);
    END IF;
END;
$$ LANGUAGE plpgsql;

-- 获取导入统计信息函数
CREATE OR REPLACE FUNCTION get_import_statistics(
    p_start_date DATE DEFAULT NULL,
    p_end_date DATE DEFAULT NULL,
    p_created_by VARCHAR(100) DEFAULT NULL
) RETURNS TABLE (
    total_imports BIGINT,
    successful_imports BIGINT,
    failed_imports BIGINT,
    processing_imports BIGINT,
    total_records_processed BIGINT,
    total_successful_records BIGINT,
    avg_processing_time_ms NUMERIC
) AS $$
BEGIN
    RETURN QUERY
    SELECT
        COUNT(*) as total_imports,
        SUM(CASE WHEN status = 'SUCCESS' THEN 1 ELSE 0 END) as successful_imports,
        SUM(CASE WHEN status = 'FAILED' THEN 1 ELSE 0 END) as failed_imports,
        SUM(CASE WHEN status = 'PROCESSING' THEN 1 ELSE 0 END) as processing_imports,
        SUM(COALESCE(ir.total_records, 0)) as total_records_processed,
        SUM(COALESCE(ir.success_records, 0)) as total_successful_records,
        ROUND(AVG(COALESCE(ir.processing_time_ms, 0)), 2) as avg_processing_time_ms
    FROM import_records ir
    WHERE
        (p_start_date IS NULL OR DATE(ir.created_at) >= p_start_date)
        AND (p_end_date IS NULL OR DATE(ir.created_at) <= p_end_date)
        AND (p_created_by IS NULL OR ir.created_by = p_created_by);
END;
$$ LANGUAGE plpgsql;

-- 清理旧记录函数
CREATE OR REPLACE FUNCTION cleanup_old_records(
    p_days_to_keep INTEGER
) RETURNS TEXT AS $$
DECLARE
    v_cutoff_date TIMESTAMP WITH TIME ZONE;
    v_deleted_count INTEGER;
BEGIN
    v_cutoff_date := CURRENT_TIMESTAMP - INTERVAL '1 day' * p_days_to_keep;

    DELETE FROM import_records
    WHERE created_at < v_cutoff_date
      AND status IN ('SUCCESS', 'FAILED', 'CANCELLED');

    GET DIAGNOSTICS v_deleted_count = ROW_COUNT;

    RETURN '已删除 ' || v_deleted_count || ' 条记录';
END;
$$ LANGUAGE plpgsql;

-- 更新时间戳函数
CREATE OR REPLACE FUNCTION update_updated_at_column()
RETURNS TRIGGER AS $$
BEGIN
    NEW.updated_at = CURRENT_TIMESTAMP;
    RETURN NEW;
END;
$$ LANGUAGE plpgsql;

-- =====================================================
-- 6. 创建触发器
-- =====================================================

-- 更新时间触发器
CREATE TRIGGER tr_import_records_update_time
    BEFORE UPDATE ON import_records
    FOR EACH ROW
    EXECUTE FUNCTION update_updated_at_column();

-- 记录数一致性检查函数
CREATE OR REPLACE FUNCTION check_records_consistency()
RETURNS TRIGGER AS $$
BEGIN
    -- 确保成功记录数不超过有效记录数
    IF NEW.success_records IS NOT NULL AND NEW.valid_records IS NOT NULL THEN
        IF NEW.success_records > NEW.valid_records THEN
            RAISE EXCEPTION '成功记录数不能超过有效记录数';
        END IF;
    END IF;

    -- 确保有效记录数不超过总记录数
    IF NEW.valid_records IS NOT NULL AND NEW.total_records IS NOT NULL THEN
        IF NEW.valid_records > NEW.total_records THEN
            RAISE EXCEPTION '有效记录数不能超过总记录数';
        END IF;
    END IF;

    RETURN NEW;
END;
$$ LANGUAGE plpgsql;

-- 记录数一致性检查触发器
CREATE TRIGGER tr_import_records_consistency_check
    BEFORE INSERT OR UPDATE ON import_records
    FOR EACH ROW
    EXECUTE FUNCTION check_records_consistency();

-- =====================================================
-- 7. 用户和权限设置
-- =====================================================

-- 创建应用专用用户（生产环境使用）
-- CREATE USER enplatform_app WITH PASSWORD 'your_secure_password_here';
-- GRANT CONNECT ON DATABASE enplatform_assistant TO enplatform_app;
-- GRANT USAGE ON SCHEMA public TO enplatform_app;
-- GRANT SELECT, INSERT, UPDATE, DELETE ON ALL TABLES IN SCHEMA public TO enplatform_app;
-- GRANT USAGE, SELECT ON ALL SEQUENCES IN SCHEMA public TO enplatform_app;

-- 创建只读用户（用于报表和监控）
-- CREATE USER enplatform_readonly WITH PASSWORD 'your_readonly_password_here';
-- GRANT CONNECT ON DATABASE enplatform_assistant TO enplatform_readonly;
-- GRANT USAGE ON SCHEMA public TO enplatform_readonly;
-- GRANT SELECT ON ALL TABLES IN SCHEMA public TO enplatform_readonly;

-- =====================================================
-- 8. 初始化完成确认
-- =====================================================

-- 查询验证表创建成功
SELECT 'PostgreSQL数据库初始化完成' as message, COUNT(*) as test_records_count FROM import_records;

-- 显示表结构
\d import_records;

-- 显示索引信息
\di import_records*;

-- 显示视图数据
SELECT 'import_statistics视图数据:' as info;
SELECT * FROM import_statistics;

SELECT 'today_import_statistics视图数据:' as info;
SELECT * FROM today_import_statistics;

SELECT 'user_import_statistics视图数据:' as info;
SELECT * FROM user_import_statistics;

-- 测试函数
SELECT * FROM get_import_statistics();

-- =====================================================
-- 脚本执行完成
-- =====================================================
