package com.lsx143.realtime.app.dws;

import com.lsx143.realtime.app.BaseSQLApp;
import com.lsx143.realtime.bean.ProvinceStats;
import com.lsx143.realtime.common.Constants;
import com.lsx143.realtime.util.WriteUtil;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DWSProvinceStatsSQLApp extends BaseSQLApp {
    private final static String appName = "DWSProvinceStatsSQLApp";

    public static void main(String[] args) {
        new DWSProvinceStatsSQLApp().init(
                30004,
                appName,
                1);
    }

    /**
     * 业务逻辑
     *
     * @param tEnv flink-stream的执性环境
     */
    @Override
    protected void run(StreamTableEnvironment tEnv) {
        //1.sinTable,从kafka的读取订单宽表
        tEnv.executeSql("create table order_wide(" +
                "   province_id bigint, " +
                "   province_name string, " +
                "   province_area_code string, " +
                "   province_iso_code string, " +
                "   province_3166_2_code string, " +
                "   split_total_amount decimal(20, 2), " +
                "   order_id bigint, " +
                "   create_time string, " +
                "   et as to_timestamp(create_time), " +
                "   watermark for et as et - interval '3' second " +
                ")with(" +
                "   'connector' = 'kafka'," +
                "   'properties.bootstrap.servers' = '" + Constants.KAFKA_BROKERS + "'," +
                "   'properties.group.id' = '" + appName + "'," +
                "   'topic' = '" + Constants.TOPIC_DWM_ORDER_WIDE + "'," +
                "   'scan.startup.mode' = 'latest-offset', " +
                "   'format' = 'json' " +
                ")"
        );
        //tEnv.executeSql("select * from order_wide").print();

        //2.开窗聚合
        Table result = tEnv.sqlQuery("select " +
                " date_format(tumble_start(et, interval '5' second), 'yyyy-MM-dd HH:mm:ss') stt, " +
                " date_format(tumble_end(et, interval '5' second), 'yyyy-MM-dd HH:mm:ss') edt, " +
                " province_id, " +
                " province_name, " +
                " province_area_code area_code, " +
                " province_iso_code iso_code, " +
                " province_3166_2_code iso_3166_2, " +
                " sum(split_total_amount) order_amount, " +
                " count(distinct(order_id)) order_count, " +
                " unix_timestamp() * 1000 ts " +
                "from order_wide " +
                "group by " +
                "   province_id, " +
                "   province_name, " +
                "   province_area_code, " +
                "   province_iso_code, " +
                "   province_3166_2_code, " +
                "   tumble(et, interval '5' second)"
        );
        //result.execute().print();
        //3.写出结果到ClickHouse
        //3.1 转换结果
        SingleOutputStreamOperator<ProvinceStats> psStream = tEnv
                .toRetractStream(result, ProvinceStats.class)
                .filter(t -> t.f0)
                .map(t -> t.f1);
        //3.2 写出到ClickHouse
        WriteUtil.writeToClickHouse(
                psStream,
                Constants.CLICKHOUSE_TABLE_PROVINCE_STATS,
                ProvinceStats.class);

    }
}
