package com.bw.yk05;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;


public class Mn5TM4_1 {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        tEnv.executeSql("create table dwd_order_detail (\n" +
                "    id int,\n" +
                "    receiver_province_id int,\n" +
                "    sender_province_id int,\n" +
                "    create_time string,\n" +
                "    times as to_timestamp(create_time),\n" +
                "    WATERMARK FOR times AS times - INTERVAL '0' SECOND\n" +
                ")with(\n" +
                "    'connector' = 'kafka',\n" +
                "    'topic' = 'dwd-order-detail',\n" +
                "    'properties.bootstrap.servers' = 'hadoop-single:9092',\n" +
                "    'properties.group.id' = 'group1',\n" +
                "    'scan.startup.mode' = 'earliest-offset',\n" +
                "    'format' = 'json'\n" +
                ")");
        //按日期统计不同省份发件量、收件量top5的转运站(使用开窗函数)
       tEnv.sqlQuery("select \n" +
                       " TUMBLE_START(times,INTERVAL '1' day) as wsStart\n" +
                       " TUMBLE_END(times,INTERVAL '1' day) as wsEnd,\n" +
                       " receiver_province_id,\n" +
                       " count(*) province_ct,\n" +
                       " RANK() OVER ( PARTITION BY TUMBLE(times,INTERVAL '1' day), ORDER BY COUNT(*) DESC ) AS rank\n" +
                       " from dwd_order_detail\n" +
                       " where rank<=5" +
                       " group by TUMBLE(times,INTERVAL '1' day),receiver_province_id"
               ).execute().print();

        tEnv.sqlQuery("select \n" +
                " TUMBLE_START(times,INTERVAL '1' day) as wsStart\n" +
                " TUMBLE_END(times,INTERVAL '1' day) as wsEnd,\n" +
                " sender_province_id,\n" +
                " count(*) province_ct,\n" +
                " RANK() OVER ( PARTITION BY TUMBLE(times,INTERVAL '1' day) ORDER BY COUNT(*) DESC ) AS rank\n" +
                " from dwd_order_detail\n" +
                " where rank<=5" +
                " group by TUMBLE(times,INTERVAL '1' day),sender_province_id"
        ).execute().print();

        ////按日期统计各城市收件量的峰值时段，以小时为单位统计 (取24小时内2两个峰值)； HOUR

        tEnv.sqlQuery("select \n" +
                " TUMBLE_START(times,INTERVAL '1' day) as wsStart\n" +
                " TUMBLE_END(times,INTERVAL '1' day) as wsEnd,\n" +
                " receiver_city_id,\n" +
                " HOUR(create_time) hour" +
                " count(*) province_ct,\n" +
                " RANK() OVER ( PARTITION BY TUMBLE(times,INTERVAL '1' day) ORDER BY COUNT(*) DESC ) AS rank\n" +
                " from dwd_order_detail\n" +
                " where rank<=2" +
                " group by TUMBLE(times,INTERVAL '1' day),receiver_city_id,HOUR(create_time)"
        ).execute().print();

//        yyyy-MM-dd HH:mm:ss

    }

}














