package cn._51doit.flink.day12;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * 按照各个维度统计新增用的信息
 */
public class GroupingSetDemo {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);

        //新增用户数据
        //u001,辽宁省,huawei
        //u002,辽宁省,apple
        //u003,辽宁省,oppo
        //u004,山东省,oppo
        //u005,山东省,huawei

        tEnv.executeSql(
                "CREATE TABLE tb_user_event (\n" +
                        "  `uid`      STRING ,\n" +
                        "  `province` STRING ,\n" +
                        "  `brand`    STRING  \n" +
                        ") WITH (\n" +
                        "  'connector' = 'kafka',\n" +
                        "  'topic' = 'user-event2',\n" +
                        "  'properties.bootstrap.servers' = 'node-1.51doit.cn:9092,node-2.51doit.cn:9092,node-3.51doit.cn:9092',\n" +
                        "  'properties.group.id' = 'testGroup',\n" +
                        "  'scan.startup.mode' = 'earliest-offset',\n" +
                        "  'format' = 'csv'\n" +
                        ")"
        );

        tEnv.executeSql(
                "CREATE TABLE tb_print_table (\n" +
                        "  `province` STRING ,\n" +
                        "  `brand`    STRING ,\n" +
                        "  `counts`    BIGINT  \n" +
                        ") WITH (\n" +
                        "  'connector' = 'print'\n" +
                        ")"
        );

        tEnv.executeSql("INSERT INTO tb_print_table " +
                "SELECT province, brand, count(*) counts FROM tb_user_event GROUP BY GROUPING SETS ((province, brand), (province))");

    }
}
