package com.atguigu.realtime.app.dws;


import com.atguigu.realtime.app.BaseSqlApp;
import com.atguigu.realtime.bean.ProvinceStats;
import com.atguigu.realtime.util.FlinkSinkUtil;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import static com.atguigu.realtime.common.Constant.KAFKA_BROKERS;
import static com.atguigu.realtime.common.Constant.TOPIC_DWM_ORDER_WIDE;

public class DwsProvinceStatsApp extends BaseSqlApp {
    public static void main(String[] args) {
        new DwsProvinceStatsApp().init(4003,1,"DwsProvinceStatsApp");
    }
    @Override
    protected void run(StreamTableEnvironment tEnv) {
        //1.建立动态表,与topic关联
        tEnv.executeSql("create table ow(" +
                " province_id bigint, " +
                " province_name string, " +
                " province_area_code string, " +
                " province_iso_code string, " +
                " province_3166_2_code string," +
                " split_total_amount decimal(20,2)," +
                " order_id bigint," +
                " create_time string," +
                " et as to_timestamp(create_time)," +
                " watermark for et as et - interval '3' second " +
                ")with(" +
                " 'connector'='kafka'," +
                " 'properties.bootstrap.servers'='"+KAFKA_BROKERS+ "'," +
                " 'properties.group.id'='DwsProvinceStatsApp'," +
                " 'topic'='"+TOPIC_DWM_ORDER_WIDE + "'," +
                " 'format'='json'," +
                " 'scan.startup.mode'='earliest-offset' " +
                ")");
       // tEnv.sqlQuery("select * from ow").execute().print();

        //2.开窗聚合
        Table result = tEnv.sqlQuery("select " +
                " province_id, " +
                " province_name, " +
                " province_area_code area_code, " +
                " province_iso_code iso_code, " +
                " province_3166_2_code iso_3166_2, " +
                " date_format(tumble_start(et,interval '5' second),'yyyy-MM-dd HH:mm:ss') stt, " +
                " date_format(tumble_end(et,interval '5' second),'yyyy-MM-dd HH:mm:ss') edt, " +
                " sum(split_total_amount) order_amount, " +
                " count(distinct(order_id)) order_count, " +
                " unix_timestamp() *1000 ts " +
                " from ow " +
                " group by " +
                " tumble(et,interval '5' second), " +
                " province_id, " +
                " province_name, " +
                " province_area_code, " +
                " province_iso_code, " +
                " province_3166_2_code ");
       // result.execute().print();
        //3.把结果写入到clickhouse
        // 方法1.使用纯sql:创建一个动态表与ClickHouse的表关联,向动态表写数据,得有clckhouse连接器 只支持到1.2版本,后面的jar包阿里没有对外开放了.只能用方法二
        //  方法2. 把结果转成流,然后使用clickhousesink写入到clickhouse中

        //有聚合的时候需要撤回,所以使用toRetractStream,没有聚合可以使用appendStream
        // 设置一个POJO类型
        tEnv.toRetractStream(result, ProvinceStats.class)
                .filter(t ->t.f0)
                .map(t ->t.f1)
                .addSink(FlinkSinkUtil.getClickHouseSink("gmall2021","province_stats_2021",ProvinceStats.class));

    }
}
