package com.intct.flink.study;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.RestOptions;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author gufg
 * @since 2025-10-13 17:30
 */
public class SQLTest08 {

    public static void main(String[] args) {
        // 配置属性
        Configuration conf = new Configuration();
        conf.set(RestOptions.BIND_PORT, "8081");

        // 1 获取取环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        // 并行度
         env.setParallelism(1);
        // 开启检点
//        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
        // 获取SQL执行环境
        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);

        tenv.executeSql("CREATE TABLE pageviews (" +
                "  user_id BIGINT," +
                "  page_id BIGINT," +
                "  viewtime timestamp(3), " +
                "  watermark for viewtime as viewtime - interval '0' second " +
                ") WITH (" +
                " 'connector' = 'kafka'," +
                " 'topic' = 'pageviews'," +
                " 'properties.bootstrap.servers' = 'cdh-node:9092'," +
                " 'properties.group.id' = 'pageviews_a'," +
                " 'scan.startup.mode' = 'earliest-offset'," +
                " 'format' = 'json'" +
                ")");

        // 映射kafak sink
        tenv.executeSql("CREATE TABLE pageviews_per_region (" +
                "  user_region BIGINT," +
                "  pv BIGINT," +
                "  uv BIGINT," +
                "  PRIMARY KEY (user_region) NOT ENFORCED" +
                ") WITH (" +
                "  'connector' = 'upsert-kafka'," +
                "  'topic' = 'pageviews_per_region'," +
                "  'properties.bootstrap.servers' = 'cdh-node:9092'," +
                "  'key.format' = 'json'," +
                "  'value.format' = 'json'" +
                ")");

        tenv.executeSql("insert into pageviews_per_region" +
                " SELECT user_id, count(*), count(DISTINCT user_id) FROM pageviews GROUP BY user_id").print();
    }
}
