package com.bw.ads;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.annotation.DataTypeHint;
import org.apache.flink.table.annotation.FunctionHint;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.functions.TableFunction;
import org.apache.flink.types.Row;

/*
create table mn5TM4Wide (
    start_org_id bigint,
    end_org_id bigint,
    start_org_name string,
    end_org_name string,
    start_province_name string,
    end_province_name string,
    create_time string
)with(
    'connector' = 'kafka',
    'topic' = 'mn5tm4topic1',
    'properties.bootstrap.servers' = 'hadoop-single:9092',
    'properties.group.id' = 'group1',
    'scan.startup.mode' = 'earliest-offset',
    'format' = 'json'
)
 */
public class Mn5TM4_2 {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        tEnv.executeSql("create table mn5TM4Wide (\n" +
                "    start_org_id bigint,\n" +
                "    end_org_id bigint,\n" +
                "    start_org_name string,\n" +
                "    end_org_name string,\n" +
                "    start_province_name string,\n" +
                "    end_province_name string,\n" +
                "    create_time string,\n" +
                "    times as to_timestamp(create_time),\n" +
                "    WATERMARK FOR times AS times - INTERVAL '0' SECOND\n" +
                ")with(\n" +
                "    'connector' = 'kafka',\n" +
                "    'topic' = 'mn5tm4topic1',\n" +
                "    'properties.bootstrap.servers' = 'hadoop102:9092',\n" +
                "    'properties.group.id' = 'group1',\n" +
                "    'scan.startup.mode' = 'earliest-offset',\n" +
                "    'format' = 'json'\n" +
                ")");


//        tEnv.sqlQuery("select * from mn5TM4Wide").execute().print();

        //通过udtf
        // 省份 转运站             发送  接收   create_time
        // 湖南 湖南省长沙市雨花区转运站   1    0
        // 湖南 湖南省长沙市转运中心     0    1
        // 湖南 湖南省长沙市转运中心     1    0
        // 广东 广东省广州市转运中心     0    1
        tEnv.createTemporaryFunction("myudtf",SplitFunction.class);
        Table table = tEnv.sqlQuery("select province,orgname,scnt,rcnt,times  " +
                " from mn5TM4Wide,LATERAL TABLE(myudtf(start_org_name,end_org_name,start_province_name,end_province_name))");
        tEnv.createTemporaryView("tmp1",table);







        Table table1 = tEnv.sqlQuery("" +
                "select " +
                " TUMBLE_START(times,INTERVAL '30' second) as wsStart," +
                " TUMBLE_END(times,INTERVAL '30' second) as wsEnd," +
                " province,orgname,sum(scnt) scnts,sum(rcnt) rcnts from tmp1 " +
                "GROUP BY TUMBLE(times,INTERVAL '30' second),province,orgname");
        tEnv.createTemporaryView("tmp2",table1);

//        tEnv.sqlQuery("select * from tmp2").execute().print();

        /*
        +----+-------------------------+-------------------------+--------------------------------+--------------------------------+-------------+-------------+
        | op |                 wsStart |                   wsEnd |                       province |                        orgname |       scnts |       rcnts |
        +----+-------------------------+-------------------------+--------------------------------+--------------------------------+-------------+-------------+
        | +I |        2023-07-12T16:00 |        2023-07-12T17:00 |                           湖南 |       湖南省长沙市雨花区转运站 |           6 |           2 |
        | +I |        2023-07-12T16:00 |        2023-07-12T17:00 |                           湖南 |           湖南省长沙市转运中心 |          30 |          30 |
         */
        //tEnv.sqlQuery("select * from tmp2").execute().print();
        tEnv.sqlQuery("" +
                "select * from (" +
                "select * ,row_number() over(partition by wsStart,wsEnd,province order by scnts desc) rk " +
                " from tmp2) where rk<=5").execute().print();

//        tEnv.sqlQuery("" +
//                "select * from (" +
//                "select * ,row_number() over(partition by wsStart,wsEnd,province order by rcnts desc) rk " +
//                " from tmp2) where rk<=5").execute().print();
        //分组聚合

    }

    @FunctionHint(output = @DataTypeHint("ROW<province STRING, orgname STRING,scnt INT,rcnt Int>"))
    public static class SplitFunction extends TableFunction<Row> {
        public void eval(String start_org_name,String end_org_name,String start_province_name,String end_province_name) {
                collect(Row.of(start_province_name, start_org_name,1,0));
                collect(Row.of(end_province_name, end_org_name,0,1));
        }
    }

}
