package com.bw.app.dws;


import com.bw.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class Dws_user_cout {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        String topic="dwd_order_pre";
        String groupId="user_count_2114";
        tableEnv.executeSql("CREATE TABLE user_count (\n" +
                "  `sku_id` STRING,\n" +
                "  `user_id` STRING,\n" +
                "  `is_ordered` STRING,\n" +
                "  `row_op_ts` STRING,\n" +
                "  `coun` STRING,\n" +
                "  `time_ltz` AS TO_TIMESTAMP_LTZ(\n" +
                "    UNIX_TIMESTAMP(REPLACE(row_op_ts, ' ', 'T'), 'yyyy-MM-dd''T''HH:mm:ss') * 1000,\n" +
                "    3\n" +  // 毫秒精度
                "  ),\n" +
                "  WATERMARK FOR time_ltz AS time_ltz - INTERVAL '2' seconds\n" +
                ")  " + MyKafkaUtil.getKafkaDDL(topic,groupId) );
        tableEnv.executeSql("SELECT window_start, window_end,sku_id,user_id,is_ordered,count(distinct user_id) coun\n" +
                "  FROM TABLE(\n" +
                "    TUMBLE(TABLE user_count, DESCRIPTOR(time_ltz), INTERVAL '2' seconds))\n" +
                "where is_ordered = '1'"+
                "  GROUP BY window_start, window_end, is_ordered ,sku_id,user_id ").print();

        env.execute();



    }
}
