package com.atguigu.gmall.realtime.app.dws;

import com.atguigu.gmall.realtime.bean.ProvinceStats;
import com.atguigu.gmall.realtime.utils.ClickHouseUtil;
import com.atguigu.gmall.realtime.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/*
FlinkSQL 实现地区主题宽表计算
 */
public class ProvinceStatsSqlApp {
    public static void main(String[] args) throws Exception {

        //TODO 0 基本环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(3);

        //CK设置

        //TODO 1 定义Table流环境
        EnvironmentSettings settings = EnvironmentSettings.newInstance()
                .inStreamingMode()
                .build();

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env,settings);



        //TODO 2 将数据源定义为动态表
        String groupId="province_stats";
        String orderWideTopic = "dwm_order_wide";

        tableEnv.executeSql("CREATE TABLE ORDER_WIDE(province_id BIGINT, "
        + "province_name STRING,province_area_code STRING, "
        +        "province_iso_code STRING, province_3166_2_code STRING,order_id STRING, "
        + "split_total_amount DOUBLE, create_time STRING, rowtime AS TO_TIMESTAMP(create_time) ,"
        + " WATERMARK FOR rowtime AS rowtime) "  // 水印是单调递增水印
        + "WITH (" + MyKafkaUtil.getKafkaDDL(orderWideTopic,groupId) +")");


        //TODO 3.聚合计算，定义窗口宽度为10
        Table provinceStateTable = tableEnv.sqlQuery("select " +
                "DATE_FORMAT(TUMBLE_START(rowtime, INTERVAL '10' SECOND ),'yyyy-MM-dd HH:mm:ss') stt, " +
                "DATE_FORMAT(TUMBLE_END(rowtime, INTERVAL '10' SECOND ),'yyyy-MM-dd HH:mm:ss') edt , " +
                " province_id,province_name,province_area_code area_code," +
                "province_iso_code iso_code ,province_3166_2_code iso_3166_2 ," +
                "COUNT( DISTINCT  order_id) order_count, sum(split_total_amount) order_amount," +
                "UNIX_TIMESTAMP()*1000 ts "+
                " from  ORDER_WIDE group by  TUMBLE(rowtime, INTERVAL '10' SECOND )," +
                " province_id,province_name,province_area_code,province_iso_code,province_3166_2_code ");

        //TODO 4.将动态表转换为数据流
        DataStream<ProvinceStats> provinceStatsDS = tableEnv.toAppendStream(provinceStateTable, ProvinceStats.class);
        //DataStream<Tuple2<Boolean, ProvinceStats>> provinceStatsDS = tableEnv.toRetractStream(provinceStateTable, ProvinceStats.class);

        provinceStatsDS.print(">>>>");

        //TODO 5.将流中的数据保存到ClickHouse
        provinceStatsDS.addSink(
                ClickHouseUtil.getJdbcSink(
                        "insert into  province_stats_2021  values(?,?,?,?,?,?,?,?,?,?)"
                )
        );


        env.execute();

    }
}
