package com.bw.ads;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;


public class Mn5TM4_6 {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        tEnv.executeSql("create table mn5TM4Wide (\n" +
                "    receiver_city_id bigint,\n" +
                "    receiver_city_name string,\n" +
                "    ls double,\n" +
                "    ys double,\n" +
                "    zz double,\n" +
                "    ps double,\n" +
                "    zong double,\n" +
                "    create_time string,\n" +
                "    times as to_timestamp(create_time),\n" +
                "    WATERMARK FOR times AS times - INTERVAL '0' SECOND\n" +
                ")with(\n" +
                "    'connector' = 'kafka',\n" +
                "    'topic' = 'mn5tm4topic3',\n" +
                "    'properties.bootstrap.servers' = 'hadoop102:9092',\n" +
                "    'properties.group.id' = 'group1',\n" +
                "    'scan.startup.mode' = 'earliest-offset',\n" +
                "    'format' = 'json'\n" +
                ")");
        //按日期统计各城市收件量的峰值时段，以小时为单位统计 (取24小时内2两个峰值)；
        Table table1 = tEnv.sqlQuery("" +
                "select " +
                " TUMBLE_START(times,INTERVAL '1' day) as wsStart," +
                " TUMBLE_END(times,INTERVAL '1' day) as wsEnd," +
                " receiver_city_name," +
                " sum(ls)/sum(zong) lss,sum(ys)/sum(zong) yss,sum(zz)/sum(zong) zzs,sum(ps)/sum(zong) pss from mn5TM4Wide " +
                "GROUP BY TUMBLE(times,INTERVAL '1' day),receiver_city_name");


        tEnv.createTemporaryView("tmp",table1);

        tEnv.sqlQuery("select * from tmp where zzs > pss*3").execute().print();

        // 表专流
    }

}














