package com.atguigu.realtime.app.dws;

import com.atguigu.realtime.app.BaseTenvApp;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @ClassName: DWSProvinceStatsSqlApp
 * @Description:
 * @Author: kele
 * @Date: 2021/4/27 10:49
 *
 * 获取订单宽表
 *
 **/
public class DWSProvinceStatsSqlApp extends BaseTenvApp {

    public static void main(String[] args){

        new DWSProvinceStatsSqlApp().init(4003,
                2,
                "DWSProvinceStatsSqlApp");
    }

    @Override
    protected void run(StreamTableEnvironment tenv) {

        /**
         * 1、创建表，从kafka消费数据
         *      通过订单宽表得到地区信息，订单号，订单总额，
         *      如果是使用事件时间，需要添加水印，需要将timestamp作为水印字段
         */
        tenv.executeSql("create table order_wide(\n" +
                " province_id BIGINT,\n" +
                " province_name STRING,\n" +
                " province_area_code STRING,\n" +
                " province_iso_code STRING,\n" +
                " province_3166_2_code STRING,\n" +
                " order_id STRING,\n" +
                " split_total_amount BIGINT,\n" +
                " create_time STRING,\n" +
                " rowtime AS to_timestamp(create_time),\n" +
                " WATERMARK FOR  rowtime  AS rowtime - interval '5' second\n" +
                ")with(\n" +
                " 'connector' = 'kafka',\n" +
                " 'topic'='dwm_order_wide',\n" +
                " 'properties.bootstrap.servers'='hadoop162:9092,hadoop163:9092,hadoop164:9092',\n" +
                " 'properties.group.id'='DWSProvinceStatsSqlApp',\n" +
                " 'scan.startup.mode' = 'earliest-offset',\n" +
                " 'format' = 'json'\n" +
                ")");

        /**
         * 2、创建表，关联clickhouse，将来写入到clickhouse中
         *
         * 参考：
         * https://help.aliyun.com/document_detail/185696.html
         */

        tenv.executeSql("create table province_stats_2021(" +
                "   stt string," +
                "   edt string," +
                "   province_id bigint," +
                "   province_name string," +
                "   area_code string," +
                "   iso_code string," +
                "   iso_3166_2 string," +
                "   order_amount decimal(20, 2)," +
                "   order_count bigint, " +
                "   ts bigint, " +
                "   primary key(stt,edt,province_id) not enforced" +
                ")with(" +
                "   'connector' = 'clickhouse', " +
                "   'url' = 'clickhouse://hadoop162:8123', " +
                "   'database-name' = 'gmall2021', " +
                "   'table-name' = 'province_stats_2021', " +
                "   'sink.batch-size' = '10', " +
                "   'sink.flush-interval' = '1000', " +
                "   'sink.max-retries' = '6' " +
                ")");

        /**
         * 3、查询数据，根据省份信息进行聚合，开窗，窗口函数（滑动窗口）聚合，
         */
        Table table = tenv.sqlQuery("select \n" +
                "  date_format( TUMBLE_START(rowtime, INTERVAL '5' second),'yyyy-MM-dd HH:mm:ss') sst,\n" +
                "  date_format( TUMBLE_END(rowtime, INTERVAL '5' second),'yyyy-MM-dd HH:mm:ss') edt,\n" +
                "  province_id ,\n" +
                "  province_name,\n" +
                "  province_area_code area_code,\n" +
                "  province_iso_code iso_code,\n" +
                "  province_3166_2_code iso_3166_2,\n" +
                "  sum(split_total_amount) order_amount,\n" +
                "  count(distinct order_id) order_count,\n" +
                "  unix_timestamp() * 1000 ts\n" +
                "from order_wide\n" +
                "group by \n" +
                "  TUMBLE(rowtime, INTERVAL '5' second),\n" +
                "  province_id,\n" +
                "  province_name,\n" +
                "  province_iso_code,\n" +
                "  province_3166_2_code," +
                "  province_area_code");

        table.executeInsert("province_stats_2021");

    }
}
