package net.bwie.gd.dwd;


import org.apache.flink.configuration.Configuration;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableEnvironment;

public class KafkaJob {
    public static void main(String[] args) throws Exception {


        //1-表的执行环境
        TableEnvironment tabEnv = getTableEnv();

        //2-输入表-input：映射到Kafka消息队列
        createInputTable(tabEnv);


        //3-数据处理-select
        handle(tabEnv);

        //4-输出表-output：映射到doris表
        createOutputTable(tabEnv);

        //5-将数据保存到doris
        saveToDoris(tabEnv) ;

    }

    private static void saveToDoris(TableEnvironment tabEnv) {


        tabEnv.executeSql(
                "INSERT INTO user_pet_type_age_sum_doris_sink\n" +
                        "select\n" +
                        "    user_id, pet_type, pet_age,  pet_type_score\n" +
                        "from user_pet_type_age_sum"
        );

    }


    //映射到doris表
    private static void createOutputTable(TableEnvironment tabEnv) {

        tabEnv.executeSql(
                "CREATE TABLE user_pet_type_age_sum_doris_sink(\n" +
                        "    `user_id` string,\n" +
                        "    `pet_type` STRING,\n" +
                        "    `pet_age` STRING,\n" +
                        "    `pet_type_score` bigint\n" +
                        ") WITH (\n" +
                        "    'connector' = 'doris',\n" +
                        "    'fenodes' = 'node102:8030',\n" +
                        "    'table.identifier' = 'db_hxl.user_pet_type_age_sum',\n" +
                        "    'username' = 'root',\n" +
                        "    'password' = '123456',\n" +
                        "    'sink.batch.interval' = '10s',\n" +
                        "    'sink.max-retries' = '3',\n" +
                        "    'sink.batch.size' = '1000'\n" +
                        ")"
        );


    }

    private static void handle(TableEnvironment tabEnv) {
        tabEnv.executeSql(
                "CREATE TABLE `dim_item_properties_kafka_source`(\n" +
                        "    `item_id` BIGINT,\n" +
                        "    `category_level1_id` BIGINT,\n" +
                        "    `item_title` STRING,\n" +
                        "    `item_keywords` string \n" +
                        ") WITH (\n" +
                        "    'connector' = 'kafka',\n" +
                        "    'topic' = 'dim_item_properties',\n" +
                        "    'properties.bootstrap.servers' = 'node101:9092,node102:9092,node103:9092',\n" +
                        "    'properties.group.id' = 'gid-dim-item_properties',\n" +
                        "    'scan.startup.mode' = 'earliest-offset',\n" +
                        "    'format' = 'json',\n" +
                        "    'json.fail-on-missing-field' = 'false',\n" +
                        "    'json.ignore-parse-errors' = 'true'\n" +
                        ")"
        );

//        Table table = tabEnv.sqlQuery("select * from dim_item_properties_kafka_source");
//        table.execute().print();


        tabEnv.executeSql(
                "CREATE TABLE `dim_item_category_level1_kafka_source`(\n" +
                        "    `category_level1_id` BIGINT,\n" +
                        "    `category_level1_name` STRING\n" +
                        ") WITH (\n" +
                        "    'connector' = 'kafka',\n" +
                        "    'topic' = 'dim_item_category_level1',\n" +
                        "    'properties.bootstrap.servers' = 'node101:9092,node102:9092,node103:9092',\n" +
                        "    'properties.group.id' = 'gid-dim-item_category_level1',\n" +
                        "    'scan.startup.mode' = 'earliest-offset',\n" +
                        "    'format' = 'json',\n" +
                        "    'json.fail-on-missing-field' = 'false',\n" +
                        "    'json.ignore-parse-errors' = 'true'\n" +
                        ")"
        );

//        Table table = tabEnv.sqlQuery("select * from dim_item_category_level1_kafka_source");
//        table.execute().print();


        tabEnv.executeSql(
                "CREATE TABLE `dim_item_category_level2_kafka_source`(\n" +
                        "    `category_level2_id` bigint,\n" +
                        "    `category_level1_id` bigint,\n" +
                        "    `category_level2_name` string\n" +
                        ") WITH (\n" +
                        "    'connector' = 'kafka',\n" +
                        "    'topic' = 'dim_item_category_level2',\n" +
                        "    'properties.bootstrap.servers' = 'node101:9092,node102:9092,node103:9092',\n" +
                        "    'properties.group.id' = 'gid-dim-item_category_level2',\n" +
                        "    'scan.startup.mode' = 'earliest-offset',\n" +
                        "    'format' = 'json',\n" +
                        "    'json.fail-on-missing-field' = 'false',\n" +
                        "    'json.ignore-parse-errors' = 'true'\n" +
                        ")"
        );

//        Table table = tabEnv.sqlQuery("select * from dim_item_category_level2_kafka_source");
//        table.execute().print();


        Table table = tabEnv.sqlQuery(
                "select\n" +
                        "    distinct d1.user_id, d1.behavior_type, d1.item_id,\n" +
                        "    d2.item_title, d2.item_keywords,\n" +
                        "    d3.category_level1_name,\n" +
                        "    d1.behavior_time\n" +
                        "from dwd_user_behavior_log_kafka_source d1\n" +
                        "left join dim_item_properties_kafka_source d2\n" +
                        "on d1.item_id = d2.item_id\n" +
                        "left join dim_item_category_level1_kafka_source d3\n" +
                        "on d2.category_level1_id = d3.category_level1_id\n" +
                        "left join dim_item_category_level2_kafka_source d4\n" +
                        "on d2.category_level1_id = d4.category_level1_id"
        );
        tabEnv.createTemporaryView("dwd_user_pet_log", table);


        //宠物的类型和宠物的年龄
        Table table1 = tabEnv.sqlQuery(
                " SELECT user_id, \n" +
                        "               CASE \n" +
                        "                   WHEN item_title LIKE '%犬%' THEN '狗' \n" +
                        "                   WHEN item_title LIKE '%猫%' THEN '猫' \n" +
                        "                   WHEN item_title LIKE '%兔%' THEN '兔' \n" +
                        "                   WHEN item_title LIKE '%鸟%' THEN '鸟' \n" +
                        "               END AS pet_type, \n" +
                        "               CASE \n" +
                        "                   WHEN item_keywords LIKE '%幼%' THEN '幼年' \n" +
                        "                   WHEN item_keywords LIKE '%成%' THEN '成年' \n" +
                        "                   WHEN item_keywords LIKE '%老%' THEN '老年' \n" +
                        "               END AS pet_age, \n" +
                        "               CASE \n" +
                        "                   WHEN behavior_type LIKE '%浏览%' THEN 1 \n" +
                        "                   WHEN behavior_type LIKE '%收藏%' THEN 2 \n" +
                        "                   WHEN behavior_type LIKE '%加购%' THEN 3 \n" +
                        "                   WHEN behavior_type LIKE '%购买%' THEN 5 \n" +
                        "               END AS pet_type_score \n" +
                        "        FROM dwd_user_pet_log \n" +
                        "        WHERE behavior_time BETWEEN '2024-12-11' AND '2025-06-09' "
        );

        tabEnv.createTemporaryView("user_pet_type_age", table1);


        //宠物类型和宠物年龄的权重值
        Table table2 = tabEnv.sqlQuery(
                "select\n" +
                        "    user_id, cast(pet_type as varchar) as pet_type, cast(pet_age as varchar) as pet_age, SUM(pet_type_score) AS pet_type_score\n" +
                        "from user_pet_type_age\n" +
                        "where pet_type is not null and pet_age is not null\n" +
                        "GROUP BY user_id, pet_type, pet_age"
        );
        tabEnv.createTemporaryView("user_pet_type_age_sum", table2);




    }


    //todo 2-输入表-input：映射到Kafka消息队列
    private static void createInputTable(TableEnvironment tabEnv) {
        tabEnv.executeSql(
                "CREATE TABLE `dwd_user_behavior_log_kafka_source`(\n" +
                        "    `user_id` STRING,\n" +
                        "    `behavior_type` STRING,\n" +
                        "    `item_id` BIGINT,\n" +
                        "    `behavior_time` STRING\n" +
                        ") WITH (\n" +
                        "    'connector' = 'kafka',\n" +
                        "    'topic' = 'dwd_user_behavior_log',\n" +
                        "    'properties.bootstrap.servers' = 'node101:9092,node102:9092,node103:9092',\n" +
                        "    'properties.group.id' = 'gid-dwd-user-log',\n" +
                        "    'scan.startup.mode' = 'earliest-offset',\n" +
                        "    'format' = 'json',\n" +
                        "    'json.fail-on-missing-field' = 'false',\n" +
                        "    'json.ignore-parse-errors' = 'true'\n" +
                        ")"
        );

//        Table table = tabEnv.sqlQuery("select * from dwd_user_behavior_log_kafka_source");
//        table.execute().print();

    }


    //todo 1-表的执行环境
    private static TableEnvironment getTableEnv() {
        //1-环境属性设置
        EnvironmentSettings settings = EnvironmentSettings.newInstance()
                .inStreamingMode()
                .useBlinkPlanner()
                .build();
        TableEnvironment tabEnv = TableEnvironment.create(settings);
        //2-配置属性设置
        Configuration configuration = tabEnv.getConfig().getConfiguration();
        configuration.setString("table.local-time-zone", "Asia/Shanghai");
        configuration.setString("table.exec.resource.default-parallelism", "1");
        configuration.setString("table.exec.state.ttl", "5 s");

        //3-返回对象
        return tabEnv;
    }
}
