package com.bw.ads;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class Test4_弃用 {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);

        // 把kafka的数据映射表
        tEnv.executeSql("CREATE TABLE dwd_order_detail (\n" +
                "  `id` BIGINT,\n" +
                "  `receiver_province_id` STRING,\n" +
                "  `sender_province_id` STRING,\n" +
                "  `create_time` STRING,\n" +
                "  `create_ts` BIGINT,\n" +
                "  `times` as to_timestamp_ltz(create_ts,3),\n" +
                "   WATERMARK FOR times AS times - INTERVAL '5' SECOND\n" +
                ") WITH (\n" +
                "  'connector' = 'kafka',\n" +
                "  'topic' = 'dwd-order-detail-yk5-1',\n" +
                "  'properties.bootstrap.servers' = 'hadoop102:9092',\n" +
                "  'properties.group.id' = 'testGroup10',\n" +
                "  'scan.startup.mode' = 'earliest-offset',\n" +
                "  'format' = 'json'\n" +
                ")");
//        tEnv.sqlQuery("SELECT receiver_province_id,count(*) from dwd_order_detail group by receiver_province_id").execute().print();

        Table table = tEnv.sqlQuery("select  \n" +
                "receiver_province_id,\n" +
                "count(*) \n" +
                "from dwd_order_detail group by receiver_province_id,create_time");


        tEnv.createTemporaryView("t1",table);

        tEnv.sqlQuery("select \n" +
                "  receiver_province_id,\n" +
                "  create_time,\n" +
                "  rank() over(partition by create_time order by ct desc)\n" +
                "from t1").execute().print();

        // 2  开窗聚合
        tEnv.sqlQuery("SELECT\n" +
                "  TUMBLE_START(times, INTERVAL '1' day) AS wStart,\n" +
                "  TUMBLE_END(times, INTERVAL '1' day) AS wEnd,\n" +
                "  receiver_province_id,\n" +
                "  count(*) \n" +
                "FROM dwd_order_detail\n" +
                "GROUP BY\n" +
                "TUMBLE(times, INTERVAL '1' day),receiver_province_id").execute().print();


        //按日期统计不同省份发件量、收件量top5的转运站(使用开窗函数)
//        tEnv.sqlQuery("select \n" +
//                " TUMBLE_START(times,INTERVAL '1' day) as wsStart,\n" +
//                " TUMBLE_END(times,INTERVAL '1' day) as wsEnd,\n" +
//                " receiver_province_id,\n" +
//                " count(*) province_ct,\n" +
//                " RANK() OVER ( PARTITION BY TUMBLE(times,INTERVAL '1' day), order BY COUNT(*) DESC ) AS rank\n" +
//                " from dwd_order_detail\n" +
//                " where rank<=5" +
//                " group by TUMBLE(times,INTERVAL '1' day),receiver_province_id"
//        ).execute().print();
    }
}
