// FlinkPetFeatureProcessor.java
package com.lhd.app.processor;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.TableResult;

/**
 * 第二层：Flink SQL计算和标签处理（修复版）
 */
public class FlinkPetFeatureProcessor {
    
    private static final String KAFKA_BOOTSTRAP_SERVERS = "hadoop102:9092";
    private static final String TOPIC_NAME = "pet_behavior_topic";
    private static final String OUTPUT_CSV_PATH = "data/宠物特征标签数据";
    
    public static void main(String[] args) throws Exception {
        // 设置执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        
        EnvironmentSettings settings = EnvironmentSettings
            .newInstance()
            .inStreamingMode()
            .build();
            
        TableEnvironment tableEnv = TableEnvironment.create(settings);
        
        // 创建Kafka源表 - 修复DDL语法
        String createKafkaTable = String.format(
            "CREATE TABLE pet_behavior_source (\n" +
            "  record_id STRING,\n" +
            "  user_id STRING,\n" +
            "  behavior_type STRING,\n" +
            "  behavior_time STRING,\n" +
            "  product_category STRING,\n" +
            "  product_title STRING,\n" +
            "  base_behavior_weight INT,\n" +
            "  pet_type STRING,\n" +
            "  is_common_category BOOLEAN,\n" +
            "  age_stage STRING\n" +
            ") WITH (\n" +
            "  'connector' = 'kafka',\n" +
            "  'topic' = '%s',\n" +
            "  'properties.bootstrap.servers' = '%s',\n" +
            "  'properties.group.id' = 'pet-feature-group',\n" +
            "  'format' = 'json',\n" +
            "  'scan.startup.mode' = 'earliest-offset',\n" +
            "  'json.fail-on-missing-field' = 'false',\n" +
            "  'json.ignore-parse-errors' = 'true'\n" +
            ")", TOPIC_NAME, KAFKA_BOOTSTRAP_SERVERS);
        
        tableEnv.executeSql(createKafkaTable);
        
        // 创建特征计算视图 - 简化时间计算
        String createFeatureView = 
            "CREATE TEMPORARY VIEW pet_feature_view AS \n" +
            "SELECT \n" +
            "  user_id,\n" +
            "  behavior_type,\n" +
            "  behavior_time,\n" +
            "  product_category,\n" +
            "  product_title,\n" +
            "  base_behavior_weight,\n" +
            "  pet_type,\n" +
            "  is_common_category,\n" +
            "  age_stage,\n" +
            "  -- 简化时间衰减因子计算\n" +
            "  CASE \n" +
            "    WHEN DATEDIFF(TIMESTAMP '2025-05-01 00:00:00', CAST(behavior_time AS TIMESTAMP)) <= 30 THEN 1.0\n" +
            "    WHEN DATEDIFF(TIMESTAMP '2025-05-01 00:00:00', CAST(behavior_time AS TIMESTAMP)) <= 60 THEN 0.9\n" +
            "    WHEN DATEDIFF(TIMESTAMP '2025-05-01 00:00:00', CAST(behavior_time AS TIMESTAMP)) <= 90 THEN 0.81\n" +
            "    WHEN DATEDIFF(TIMESTAMP '2025-05-01 00:00:00', CAST(behavior_time AS TIMESTAMP)) <= 120 THEN 0.73\n" +
            "    WHEN DATEDIFF(TIMESTAMP '2025-05-01 00:00:00', CAST(behavior_time AS TIMESTAMP)) <= 150 THEN 0.66\n" +
            "    ELSE 0.59\n" +
            "  END AS time_decay_factor,\n" +
            "  -- 行为权重映射\n" +
            "  CASE behavior_type\n" +
            "    WHEN '购买' THEN 5\n" +
            "    WHEN '加购' THEN 3\n" +
            "    WHEN '收藏' THEN 2\n" +
            "    WHEN '浏览' THEN 1\n" +
            "    ELSE 0\n" +
            "  END AS behavior_weight\n" +
            "FROM pet_behavior_source";
        
        tableEnv.executeSql(createFeatureView);
        
        // 创建宠物类型标签计算视图
        String createPetTypeView = 
            "CREATE TEMPORARY VIEW pet_type_tags AS \n" +
            "SELECT \n" +
            "  user_id,\n" +
            "  -- 计算各宠物类型得分\n" +
            "  CAST(SUM(CASE \n" +
            "    WHEN (pet_type = '狗' OR (is_common_category = true AND pet_type = '通用')) THEN behavior_weight * time_decay_factor * 0.5\n" +
            "    ELSE 0 \n" +
            "  END) AS DOUBLE) AS dog_score,\n" +
            "  CAST(SUM(CASE \n" +
            "    WHEN (pet_type = '猫' OR (is_common_category = true AND pet_type = '通用')) THEN behavior_weight * time_decay_factor * 0.5\n" +
            "    ELSE 0 \n" +
            "  END) AS DOUBLE) AS cat_score,\n" +
            "  CAST(SUM(CASE \n" +
            "    WHEN pet_type = '仓鼠' THEN behavior_weight * time_decay_factor\n" +
            "    ELSE 0 \n" +
            "  END) AS DOUBLE) AS hamster_score,\n" +
            "  CAST(SUM(CASE \n" +
            "    WHEN pet_type = '鸟类' THEN behavior_weight * time_decay_factor\n" +
            "    ELSE 0 \n" +
            "  END) AS DOUBLE) AS bird_score,\n" +
            "  COUNT(*) as total_behaviors\n" +
            "FROM pet_feature_view \n" +
            "GROUP BY user_id";
        
        tableEnv.executeSql(createPetTypeView);
        
        // 创建最终标签结果表 - 修复文件系统连接器配置
        String createResultTable = String.format(
            "CREATE TABLE pet_feature_result (\n" +
            "  user_id STRING,\n" +
            "  dog_score DOUBLE,\n" +
            "  cat_score DOUBLE,\n" +
            "  hamster_score DOUBLE,\n" +
            "  bird_score DOUBLE,\n" +
            "  total_behaviors BIGINT,\n" +
            "  pet_type_tags STRING,\n" +
            "  pet_age_tags STRING,\n" +
            "  processing_time TIMESTAMP(3)\n" +
            ") WITH (\n" +
            "  'connector' = 'filesystem',\n" +
            "  'path' = '%s',\n" +
            "  'format' = 'csv',\n" +
            "  'sink.rolling-policy.file-size' = '1MB',\n" +
            "  'sink.rolling-policy.rollover-interval' = '10 min',\n" +
            "  'sink.rolling-policy.check-interval' = '1 min'\n" +
            ")", OUTPUT_CSV_PATH);
        
        tableEnv.executeSql(createResultTable);
        
        // 插入标签计算结果 - 修复语法
        String insertResult = 
            "INSERT INTO pet_feature_result \n" +
            "SELECT \n" +
            "  user_id,\n" +
            "  dog_score,\n" +
            "  cat_score,\n" +
            "  hamster_score,\n" +
            "  bird_score,\n" +
            "  total_behaviors,\n" +
            "  -- 宠物类型标签判定（阈值10分）\n" +
            "  CONCAT(\n" +
            "    CASE WHEN dog_score >= 10 THEN '狗,' ELSE '' END,\n" +
            "    CASE WHEN cat_score >= 10 THEN '猫,' ELSE '' END,\n" +
            "    CASE WHEN hamster_score >= 10 THEN '仓鼠,' ELSE '' END,\n" +
            "    CASE WHEN bird_score >= 10 THEN '鸟类' ELSE '' END\n" +
            "  ) AS pet_type_tags,\n" +
            "  -- 宠物年龄标签（简化实现）\n" +
            "  CASE \n" +
            "    WHEN dog_score >= 10 AND cat_score >= 10 THEN '狗-成年,猫-成年'\n" +
            "    WHEN dog_score >= 10 THEN '狗-成年'\n" +
            "    WHEN cat_score >= 10 THEN '猫-成年'\n" +
            "    ELSE '未知'\n" +
            "  END AS pet_age_tags,\n" +
            "  CURRENT_TIMESTAMP AS processing_time\n" +
            "FROM pet_type_tags";
        
        System.out.println("开始执行Flink计算任务...");
        System.out.println("从Kafka主题读取: " + TOPIC_NAME);
        System.out.println("输出到: " + OUTPUT_CSV_PATH);
        
        // 执行插入
        TableResult result = tableEnv.executeSql(insertResult);
        
        // 等待任务完成（在实际生产环境中应该持续运行）
        try {
            result.await();
            System.out.println("Flink计算任务完成！");
        } catch (Exception e) {
            System.err.println("任务执行异常: " + e.getMessage());
            e.printStackTrace();
        }
    }
}