-- ClickHouse初始化SQL脚本
-- 业务含义：创建ClickHouse表结构，用于存储大量日志数据，支持高性能查询

-- 创建数据库
CREATE DATABASE IF NOT EXISTS data_warehouse;

USE data_warehouse;

-- 1. 接口调用日志表 (ClickHouse版本)
-- 业务含义：存储大量接口调用日志，支持高性能查询和聚合分析
CREATE TABLE IF NOT EXISTS ods_api_call_log_ch (
    id UInt64,
    resource_id UInt64,
    api_url String,
    call_time DateTime,
    response_time UInt32,
    status_code UInt16,
    ip_address String,
    user_id String,
    department String,
    request_params String,
    response_data String
) ENGINE = MergeTree ()
PARTITION BY
    toYYYYMM (call_time)
ORDER BY (resource_id, call_time) SETTINGS index_granularity = 8192;

-- 2. 下载日志表 (ClickHouse版本)
-- 业务含义：存储大量下载日志，支持按时间分区的高性能查询
CREATE TABLE IF NOT EXISTS ods_download_log_ch (
    id UInt64,
    resource_id UInt64,
    file_name String,
    download_time DateTime,
    ip_address String,
    user_id String,
    file_size UInt64,
    department String,
    download_source String,
    user_agent String
) ENGINE = MergeTree ()
PARTITION BY
    toYYYYMM (download_time)
ORDER BY (resource_id, download_time) SETTINGS index_granularity = 8192;

-- 3. 资源日汇总表 (ClickHouse版本)
-- 业务含义：存储按日汇总的资源使用统计，支持快速聚合查询
CREATE TABLE IF NOT EXISTS dws_resource_daily_ch (
    id UInt64,
    resource_id UInt64,
    stat_date Date,
    resource_type String,
    department String,
    call_count UInt32,
    download_count UInt32,
    unique_user_count UInt32,
    avg_response_time Float32,
    success_rate Float32,
    total_data_size UInt64
) ENGINE = SummingMergeTree ()
PARTITION BY
    toYYYYMM (stat_date)
ORDER BY (resource_id, stat_date) SETTINGS index_granularity = 8192;

-- 4. 实时监控指标表 (ClickHouse版本)
-- 业务含义：存储实时监控指标，支持高频写入和查询
CREATE TABLE IF NOT EXISTS ads_realtime_monitor_ch (
    id UInt64,
    metric_name String,
    metric_value Float64,
    metric_unit String,
    department String,
    resource_type String,
    update_time DateTime
) ENGINE = ReplacingMergeTree (update_time)
PARTITION BY
    toYYYYMM (update_time)
ORDER BY (
        metric_name, department, resource_type
    ) SETTINGS index_granularity = 8192;

-- 5. 告警信息表 (ClickHouse版本)
-- 业务含义：存储系统告警信息，支持按时间分区的高效查询
CREATE TABLE IF NOT EXISTS ads_alert_info_ch (
    id UInt64,
    alert_type String,
    alert_level String,
    alert_title String,
    alert_content String,
    alert_source String,
    department String,
    resource_id UInt64,
    alert_time DateTime,
    status String,
    resolve_time DateTime,
    resolve_user String
) ENGINE = MergeTree ()
PARTITION BY
    toYYYYMM (alert_time)
ORDER BY (alert_type, alert_time) SETTINGS index_granularity = 8192;

-- 创建物化视图：实时调用量统计
-- 业务含义：自动计算每分钟的接口调用量，用于实时监控
CREATE MATERIALIZED VIEW IF NOT EXISTS mv_realtime_call_stats ENGINE = SummingMergeTree ()
PARTITION BY
    toYYYYMM (call_time)
ORDER BY (resource_id, call_time) AS
SELECT
    resource_id,
    toStartOfMinute (call_time) as call_time,
    count() as call_count,
    avg(response_time) as avg_response_time,
    countIf (status_code = 200) as success_count,
    countIf (status_code != 200) as failure_count
FROM ods_api_call_log_ch
GROUP BY
    resource_id,
    toStartOfMinute (call_time);

-- 创建物化视图：实时下载量统计
-- 业务含义：自动计算每分钟的文件下载量，用于实时监控
CREATE MATERIALIZED VIEW IF NOT EXISTS mv_realtime_download_stats ENGINE = SummingMergeTree ()
PARTITION BY
    toYYYYMM (download_time)
ORDER BY (resource_id, download_time) AS
SELECT
    resource_id,
    toStartOfMinute (download_time) as download_time,
    count() as download_count,
    sum(file_size) as total_size,
    uniq (user_id) as unique_users
FROM ods_download_log_ch
GROUP BY
    resource_id,
    toStartOfMinute (download_time);

-- 创建物化视图：异常行为检测
-- 业务含义：自动检测异常下载行为，如同一IP频繁下载
CREATE MATERIALIZED VIEW IF NOT EXISTS mv_abnormal_download_detection ENGINE = SummingMergeTree ()
PARTITION BY
    toYYYYMM (download_time)
ORDER BY (ip_address, download_time) AS
SELECT
    ip_address,
    toStartOfHour (download_time) as download_time,
    count() as download_count,
    uniq (resource_id) as unique_resources,
    uniq (user_id) as unique_users
FROM ods_download_log_ch
GROUP BY
    ip_address,
    toStartOfHour (download_time)
HAVING
    count() > 20;
-- 1小时内下载超过20次

-- 插入演示数据
-- 业务含义：插入少量演示数据，用于学习ClickHouse查询

-- 插入接口调用日志演示数据
INSERT INTO
    ods_api_call_log_ch
VALUES (
        1,
        2,
        'http://api.example.com/enterprise/query',
        '2024-01-15 10:30:00',
        150,
        200,
        '192.168.1.100',
        'user001',
        '企业用户',
        '{"query":"企业名称"}',
        '{"result":"success"}'
    ),
    (
        2,
        4,
        'http://api.example.com/traffic/realtime',
        '2024-01-15 11:15:00',
        200,
        200,
        '192.168.1.101',
        'user002',
        '交通部门',
        '{"location":"广州市"}',
        '{"traffic":"normal"}'
    ),
    (
        3,
        7,
        'http://api.example.com/environment/monitor',
        '2024-01-15 12:00:00',
        300,
        200,
        '192.168.1.102',
        'user003',
        '环保部门',
        '{"monitor":"空气质量"}',
        '{"aqi":"good"}'
    ),
    (
        4,
        10,
        'http://api.example.com/finance/statistics',
        '2024-01-15 13:45:00',
        180,
        200,
        '192.168.1.103',
        'user004',
        '金融部门',
        '{"period":"2024"}',
        '{"gdp":"增长"}'
    ),
    (
        5,
        2,
        'http://api.example.com/enterprise/query',
        '2024-01-15 14:20:00',
        120,
        200,
        '192.168.1.104',
        'user005',
        '企业用户',
        '{"query":"注册号"}',
        '{"result":"found"}'
    );

-- 插入下载日志演示数据
INSERT INTO
    ods_download_log_ch
VALUES (
        1,
        1,
        '广东省GDP统计数据.xlsx',
        '2024-01-15 09:30:00',
        '192.168.1.200',
        'user101',
        2048000,
        '科研机构',
        'web',
        'Mozilla/5.0'
    ),
    (
        2,
        5,
        '教育统计数据.xlsx',
        '2024-01-15 10:45:00',
        '192.168.1.201',
        'user102',
        1536000,
        '教育部门',
        'web',
        'Mozilla/5.0'
    ),
    (
        3,
        8,
        '就业统计信息.xlsx',
        '2024-01-15 11:30:00',
        '192.168.1.202',
        'user103',
        1024000,
        '人社部门',
        'api',
        'Python-requests'
    ),
    (
        4,
        1,
        '广东省GDP统计数据.xlsx',
        '2024-01-15 14:15:00',
        '192.168.1.203',
        'user104',
        2048000,
        '企业用户',
        'web',
        'Mozilla/5.0'
    ),
    (
        5,
        5,
        '教育统计数据.xlsx',
        '2024-01-15 15:00:00',
        '192.168.1.204',
        'user105',
        1536000,
        '科研机构',
        'web',
        'Mozilla/5.0'
    );

-- 创建查询函数：获取实时监控数据
-- 业务含义：提供标准化的查询接口，用于获取实时监控指标
CREATE OR REPLACE FUNCTION get_realtime_metrics()
RETURNS TABLE (
    metric_name String,
    metric_value Float64,
    metric_unit String,
    update_time DateTime
) AS $$
SELECT 
    'total_resources' as metric_name,
    count() as metric_value,
    '个' as metric_unit,
    now() as update_time
FROM ods_resource_metadata
UNION ALL
SELECT 
    'today_api_calls' as metric_name,
    count() as metric_value,
    '次' as metric_unit,
    now() as update_time
FROM ods_api_call_log_ch
WHERE toDate(call_time) = today()
UNION ALL
SELECT 
    'today_downloads' as metric_name,
    count() as metric_value,
    '次' as metric_unit,
    now() as update_time
FROM ods_download_log_ch
WHERE toDate(download_time) = today();
$$;

-- 创建查询函数：获取趋势分析数据
-- 业务含义：提供标准化的查询接口，用于获取历史趋势数据
CREATE OR REPLACE FUNCTION get_trend_analysis(start_date Date, end_date Date)
RETURNS TABLE (
    stat_date Date,
    resource_type String,
    call_count UInt32,
    download_count UInt32,
    unique_users UInt32
) AS $$
SELECT 
    toDate(call_time) as stat_date,
    'API' as resource_type,
    count() as call_count,
    0 as download_count,
    uniq(user_id) as unique_users
FROM ods_api_call_log_ch
WHERE toDate(call_time) BETWEEN start_date AND end_date
GROUP BY toDate(call_time)
UNION ALL
SELECT 
    toDate(download_time) as stat_date,
    'FILE' as resource_type,
    0 as call_count,
    count() as download_count,
    uniq(user_id) as unique_users
FROM ods_download_log_ch
WHERE toDate(download_time) BETWEEN start_date AND end_date
GROUP BY toDate(download_time)
ORDER BY stat_date, resource_type;
$$;