package app.dws;

import app.dwd.BaseAppSQL;
import bean.ProvinceStats;
import common.Constant;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;
import util.GmallSinkUtil;

public class DWSProvinceStatsApp extends BaseAppSQL {

    public static void main(String[] args) {
        new DWSProvinceStatsApp().init(4236, 1, "DWSProvinceStatsApp");
    }

    @Override
    protected void run(StreamTableEnvironment tEnv) {
        //1.创建表连接kafka, 从dwm order wide中取数据
        tEnv.executeSql("create table order_wide(" +
                            "province_id bigint," +
                            "province_name string," +
                            "province_area_code string," +
                            "province_iso_code string," +
                            "province_3166_2_code string," +
                            "split_total_amount decimal(20,2)," +
                            "order_id bigint," +
                            "create_time string," +
                            "et as to_timestamp(create_time) ," +
                            "watermark for et as et - interval '5' second" +
                            ")with(" +
                            "  'connector' = 'kafka'," +
                            "  'topic' = 'dwm_order_wide'," +
                            "  'properties.bootstrap.servers' = 'hadoop162:9092'," +
                            "  'properties.group.id' = 'DWSProvinceStatsApp'," +
                            "  'scan.startup.mode' = 'latest-offset'," +
                            "  'format' = 'json' " +
                            ")");


        //2. 连续查询---聚合
        Table table = tEnv.sqlQuery("select" +
                                        " date_format(tumble_start(et,interval '5' second),'yyy-MM-dd HH:mm:ss') stt, " +
                                        " date_format(tumble_end(et,interval '5' second) ,'yyy-MM-dd HH:mm:ss') edt, " +
                                        " province_id, " +
                                        " province_name, " +
                                        " province_area_code area_code, " +
                                        " province_iso_code iso_code, " +
                                        " province_3166_2_code iso_3166_2, " +
                                        " sum(split_total_amount) order_amount, " +
                                        " count(distinct order_id) order_count, " +
                                        " unix_timestamp() * 1000 ts " +
                                        " from order_wide " +
                                        " group by " +
                                        " tumble(et,interval '5' second), " +
                                        " province_id, " +
                                        " province_name, " +
                                        " province_area_code, " +
                                        " province_iso_code, " +
                                        " province_3166_2_code ");
        //3.创建表连接ck,将连续查询的数据写入该表
        /*tEnv.executeSql(" create table province_stats_2021( " +
                            "  stt string," +
                            "  edt string," +
                            "  province_id bigint," +
                            "  province_name string," +
                            "  area_code  string, " +
                            "  iso_code string, " +
                            "  iso_3166_2 string, " +
                            "  order_amount decimal(20,2), " +
                            "  order_count bigint," +
                            "  ts bigint " +
                            ") with(" +
                            "   'connector' = 'clickhouse'," +
                            "   'url' = 'clickhouse://hadoop162:8123'," +
                            "   'database-name' = 'gmall2021'," +
                            "   'table-name' = 'province_stats_2021', " +
                            "   'sink.batch-size' = '3'," +
                            "   'sink.flush-interval' = '1000', " +
                            "    'sink.max-retries' = '3'" +
                            ")");

        table.executeInsert("province_stats_2021");*/
        tEnv
            .toRetractStream(table, ProvinceStats.class)
            .filter(t -> t.f0)
            .map(t -> t.f1)
            .addSink(GmallSinkUtil.getClickHouseSink(Constant.CLICKHOUSE_DATABASE, Constant.CLICKHOUSE_PROVINCE_STATS_2021, ProvinceStats.class));//反射无法解析row格式,所以得自定有pojo

    }
}
