package com.raylu.realtime.app.dws;

import com.alibaba.fastjson.JSONObject;
import com.raylu.realtime.app.func.ConvertTimeStampFunction;
import com.raylu.realtime.app.func.ConvertTimeStrFunction;
import com.raylu.realtime.app.template.DimAsyncFunction;
import com.raylu.realtime.bean.ProvinceStats;
import com.raylu.realtime.utils.ClickHouseUtil;
import com.raylu.realtime.utils.KafkaSourceUtil;
import com.raylu.realtime.utils.PropertiesUtil;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.util.Properties;
import java.util.concurrent.TimeUnit;

/**
 * Description:
 * <p>
 * Create by lucienoz on 2022/1/9.
 * Copyright © 2022 lucienoz. All rights reserved.
 */
public class ProvinceStatsApp {
    public static void main(String[] args) throws Exception {
        Properties load = PropertiesUtil.load("config.properties");
        //TODO 1. 准备运行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        //TODO 2. 配置检查点
//        env.enableCheckpointing(5000);
//        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(60 * 1000L);
//        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
//        env.setStateBackend(new FsStateBackend(load.getProperty("province.stats.app.fsstatebackend.url")));
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, org.apache.flink.api.common.time.Time.seconds(10L), org.apache.flink.api.common.time.Time.hours(10L)));
//
        //TODO 3. 准备Flink表运行环境
        StreamTableEnvironment tableEnvironment = StreamTableEnvironment.create(env);
        //TODO 4. 创建flinkSql表
        //TODO 4.0 注册自定义函数
        tableEnvironment.createTemporarySystemFunction("to_timestamp", new ConvertTimeStampFunction());
        tableEnvironment.createTemporarySystemFunction("to_timestampstr", new ConvertTimeStrFunction());
        //TODO 4.1 创建Flink临时表PageLog表
        tableEnvironment.executeSql("CREATE TABLE PAGE_LOG (\n" +
                "  during_time     STRING,\n" +
                "  item            STRING,\n" +
                "  item_type       STRING,\n" +
                "  last_page_id    STRING,\n" +
                "  page_id         STRING,\n" +
                "  source_type     STRING,\n" +
                "  ar              STRING,\n" +
                "  ba              STRING,\n" +
                "  ch              STRING,\n" +
                "  is_new          STRING,\n" +
                "  md              STRING,\n" +
                "  mid             STRING,\n" +
                "  os              STRING,\n" +
                "  uid             STRING,\n" +
                "  vc              STRING,\n" +
                "  ts\t\t\t\t      BIGINT,\n" +
                "    rowtime as to_timestamp(ts),\n" +
                "    WATERMARK FOR rowtime AS rowtime - INTERVAL '3' SECOND\n" +
                ") WITH (\n" +
                KafkaSourceUtil.getConnectorDDl(load.getProperty("province.stats.app.kafka.source-topic1"), load.getProperty("province.stats.app.kafka.group-id"), "json")
                +
                ")");


        //TODO 4.2 创建Flink临时表UVLog表
        tableEnvironment.executeSql("CREATE TABLE UV_LOG (\n" +
                "  during_time     STRING,\n" +
                "  item            STRING,\n" +
                "  item_type       STRING,\n" +
                "  last_page_id    STRING,\n" +
                "  page_id         STRING,\n" +
                "  source_type     STRING,\n" +
                "  ar              STRING,\n" +
                "  ba              STRING,\n" +
                "  ch              STRING,\n" +
                "  is_new          STRING,\n" +
                "  md              STRING,\n" +
                "  mid             STRING,\n" +
                "  os              STRING,\n" +
                "  uid             STRING,\n" +
                "  vc              STRING,\n" +
                "  ts\t\t\t\t      BIGINT,\n" +
                "    rowtime as to_timestamp(ts),\n" +
                "    WATERMARK FOR rowtime AS rowtime - INTERVAL '3' SECOND\n" +
                ") WITH (\n" +
                KafkaSourceUtil.getConnectorDDl(load.getProperty("province.stats.app.kafka.source-topic2"), load.getProperty("province.stats.app.kafka.group-id"), "json")
                +
                ")");


        //TODO 4.3 创建Flink表order_info
        tableEnvironment.executeSql("create table order_info(\n" +
                "    id STRING,\n" +
                "    total_amount DECIMAL(18,6),\n" +
                "    order_status STRING,\n" +
                "    user_id STRING,\n" +
                "    create_time STRING,\n" +
                "    operate_time STRING,\n" +
                "    expire_time STRING,\n" +
                "    process_status STRING,\n" +
                "    parent_order_id STRING,\n" +
                "    province_id STRING,\n" +
                "    activity_reduce_amount DECIMAL(18,6),\n" +
                "    coupon_reduce_amount DECIMAL(18,6),\n" +
                "    original_total_amount DECIMAL(18,6),\n" +
                "    feight_fee DECIMAL(18,6),\n" +
                "    feight_fee_reduce DECIMAL(18,6),\n" +
                "    ts BIGINT,\n" +
                "    rowtime as to_timestamp(create_time),\n" +
                "    WATERMARK FOR rowtime AS rowtime - INTERVAL '3' SECOND\n" +
                ") WITH (" +
                KafkaSourceUtil.getConnectorDDl(load.getProperty("province.stats.app.kafka.source-topic3"), load.getProperty("province.stats.app.kafka.group-id"), "json") +
                ")");

        //TODO 4.3 上述各个source表分别进行聚合汇总,在将结果转成流进行union


        Table province_stats_page = tableEnvironment.sqlQuery("with province_stats\n" +
                " as (\n" +
                "SELECT \n" +
                "'' stt, \n" +
                "'' edt, \n" +
                "'' province_id, \n" +
                "'' province_name, \n" +
                "'' region_id, \n" +
                "ar area_code, \n" +
                "'' iso_code, \n" +
                "'' iso_3166_2, \n" +
                "1 pv_cnt, \n" +
                "0 uv_cnt, \n" +
                "0 order_cnt, \n" +
                "0 order_amount, \n" +
                "ts,\n" +
                "rowtime\n" +
                "from PAGE_LOG\n" +
                ")" +
                "select \n" +
                "to_timestampstr(TUMBLE_START(rowtime, INTERVAL '3' SECOND)) as stt,\n" +
                "to_timestampstr(TUMBLE_END(rowtime, INTERVAL '3' SECOND)) as edt,\n" +
                "province_id,\n" +
                "province_name,\n" +
                "region_id,\n" +
                "area_code,\n" +
                "iso_code,\n" +
                "iso_3166_2,\n" +
                "sum(pv_cnt) pv_cnt,\n" +
                "sum(uv_cnt) uv_cnt,\n" +
                "sum(order_cnt) order_cnt,\n" +
                "sum(order_amount) order_amount\n" +
                "from province_stats\n" +
                "GROUP BY TUMBLE(rowtime, INTERVAL '3' SECOND),\n" +
                "province_id,\n" +
                "province_name,\n" +
                "region_id,\n" +
                "area_code,\n" +
                "iso_code,\n" +
                "iso_3166_2,\n" +
                "area_code");

        Table province_stats_uv = tableEnvironment.sqlQuery("with province_stats\n" +
                " as (\n" +
                "SELECT \n" +
                "'' stt, \n" +
                "'' edt, \n" +
                "'' province_id, \n" +
                "'' province_name, \n" +
                "'' region_id, \n" +
                "ar area_code, \n" +
                "'' iso_code, \n" +
                "'' iso_3166_2, \n" +
                "0 pv_cnt, \n" +
                "1 uv_cnt, \n" +
                "0 order_cnt, \n" +
                "0 order_amount, \n" +
                "ts,\n" +
                "rowtime\n" +
                "from UV_LOG" +
                ")" +
                "select \n" +
                "to_timestampstr(TUMBLE_START(rowtime, INTERVAL '3' SECOND)) as stt,\n" +
                "to_timestampstr(TUMBLE_END(rowtime, INTERVAL '3' SECOND)) as edt,\n" +
                "province_id,\n" +
                "province_name,\n" +
                "region_id,\n" +
                "area_code,\n" +
                "iso_code,\n" +
                "iso_3166_2,\n" +
                "sum(pv_cnt) pv_cnt,\n" +
                "sum(uv_cnt) uv_cnt,\n" +
                "sum(order_cnt) order_cnt,\n" +
                "sum(order_amount) order_amount\n" +
                "from province_stats\n" +
                "GROUP BY TUMBLE(rowtime, INTERVAL '3' SECOND),\n" +
                "province_id,\n" +
                "province_name,\n" +
                "region_id,\n" +
                "area_code,\n" +
                "iso_code,\n" +
                "iso_3166_2,\n" +
                "area_code");

        Table province_stats_order = tableEnvironment.sqlQuery("with province_stats\n" +
                " as (\n" +
                "SELECT \n" +
                "'' stt, \n" +
                "'' edt, \n" +
                "province_id, \n" +
                "'' province_name, \n" +
                "'' region_id, \n" +
                "'' area_code, \n" +
                "'' iso_code, \n" +
                "'' iso_3166_2, \n" +
                "0 pv_cnt, \n" +
                "0 uv_cnt, \n" +
                "1 order_cnt, \n" +
                "total_amount order_amount, \n" +
                "ts,\n" +
                "rowtime\n" +
                "from order_info" +
                ")" +
                "select \n" +
                "to_timestampstr(TUMBLE_START(rowtime, INTERVAL '3' SECOND)) as stt,\n" +
                "to_timestampstr(TUMBLE_END(rowtime, INTERVAL '3' SECOND)) as edt,\n" +
                "province_id,\n" +
                "province_name,\n" +
                "region_id,\n" +
                "area_code,\n" +
                "iso_code,\n" +
                "iso_3166_2,\n" +
                "sum(pv_cnt) pv_cnt,\n" +
                "sum(uv_cnt) uv_cnt,\n" +
                "sum(order_cnt) order_cnt,\n" +
                "sum(order_amount) order_amount\n" +
                "from province_stats\n" +
                "GROUP BY TUMBLE(rowtime, INTERVAL '3' SECOND),\n" +
                "province_id,\n" +
                "province_name,\n" +
                "region_id,\n" +
                "area_code,\n" +
                "iso_code,\n" +
                "iso_3166_2,\n" +
                "area_code");


        DataStream<ProvinceStats> provinceStatsPage = tableEnvironment.toAppendStream(province_stats_page, ProvinceStats.class);
        DataStream<ProvinceStats> provinceStatsUv = tableEnvironment.toAppendStream(province_stats_uv, ProvinceStats.class);
        DataStream<ProvinceStats> provinceStatsOrder = tableEnvironment.toAppendStream(province_stats_order, ProvinceStats.class);

        DataStream<ProvinceStats> provinceStatsDS = provinceStatsPage.union(provinceStatsUv, provinceStatsOrder);

        // TODO 5. 根据维关联条件进行分流
        OutputTag<ProvinceStats> provinceStatsOutputTag = new OutputTag<ProvinceStats>("provinceStatsWithAreaCode") {
        };

//        SingleOutputStreamOperator<ProvinceStats> provinceStatsDSWithProvinceID = provinceStatsDS.filter(r -> r.getProvince_id() != null || !"".equals(r.getProvince_id()));
//        SingleOutputStreamOperator<ProvinceStats> provinceStatsDSWithAreaCode = provinceStatsDS.filter(r -> r.getArea_code() != null || !"".equals(r.getArea_code()));

        SingleOutputStreamOperator<ProvinceStats> provinceStatsWithProvinceIdDS = provinceStatsDS.process(new ProcessFunction<ProvinceStats, ProvinceStats>() {
            @Override
            public void open(Configuration parameters) throws Exception {
                super.open(parameters);

            }

            @Override
            public void processElement(ProvinceStats value, ProcessFunction<ProvinceStats, ProvinceStats>.Context ctx, Collector<ProvinceStats> out) throws Exception {
                if (value.getArea_code() != null && !"".equals(value.getArea_code())) {
                    ctx.output(provinceStatsOutputTag, value);
                }
                if (value.getProvince_id() != null && !"".equals(value.getProvince_id())) {
                    out.collect(value);
                }
            }
        });

        //TODO 5.1 更新province维度数据,通过area_code进行关联
        DataStream<ProvinceStats> provinceStatsWithAreaCodeDS = provinceStatsWithProvinceIdDS.getSideOutput(provinceStatsOutputTag);
        SingleOutputStreamOperator<ProvinceStats> provinceStatsAreaCodeDS = AsyncDataStream.unorderedWait(provinceStatsWithAreaCodeDS,
                new DimAsyncFunction<ProvinceStats>() {
                    @Override
                    public Tuple2<String, String>[] getConditions() {
                        Tuple2<String, String>[] tuple2s = new Tuple2[1];
                        tuple2s[0] = Tuple2.of("area_code", "AREA_CODE");
                        return tuple2s;
                    }

                    @Override
                    public String getTable() {
                        return load.getProperty("base.db.app.cdc.phoenix.schem") + "." + "DIM_BASE_PROVINCE";
                    }

                    @Override
                    public void join(ProvinceStats input, JSONObject jsonObject) {
                        input.setProvince_id(jsonObject.getString("ID"));
                        input.setProvince_name(jsonObject.getString("NAME"));
                        input.setIso_3166_2(jsonObject.getString("ISO_3166_2"));
                        input.setIso_code(jsonObject.getString("ISO_CODE"));
                        input.setRegion_id(jsonObject.getString("REGION_ID"));
                    }
                }, 60, TimeUnit.SECONDS);

        //TODO 5.2. 更新province维度数据,通过province_id进行关联
        SingleOutputStreamOperator<ProvinceStats> provinceStatsProvinceIdDS = AsyncDataStream.unorderedWait(provinceStatsWithProvinceIdDS,
                new DimAsyncFunction<ProvinceStats>() {
                    @Override
                    public Tuple2<String, String>[] getConditions() {
                        Tuple2<String, String>[] tuple2s = new Tuple2[1];
                        tuple2s[0] = Tuple2.of("province_id", "ID");
                        return tuple2s;
                    }

                    @Override
                    public String getTable() {
                        return load.getProperty("base.db.app.cdc.phoenix.schem") + "." + "DIM_BASE_PROVINCE";
                    }

                    @Override
                    public void join(ProvinceStats input, JSONObject jsonObject) {
                        input.setArea_code(jsonObject.getString("AREA_CODE"));
                        input.setProvince_name(jsonObject.getString("NAME"));
                        input.setIso_3166_2(jsonObject.getString("ISO_3166_2"));
                        input.setIso_code(jsonObject.getString("ISO_CODE"));
                        input.setRegion_id(jsonObject.getString("REGION_ID"));
                    }
                }, 60, TimeUnit.SECONDS);

        provinceStatsDS.print();

        //TODO 6. 映射对应的
        provinceStatsProvinceIdDS.addSink(ClickHouseUtil.getJdbcSink("INSERT INTO gmall.dws_province_stats (stt, edt, province_id, province_name, region_id, area_code, iso_code, iso_3166_2, pv_cnt, uv_cnt, order_cnt, order_amount) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"));
        provinceStatsAreaCodeDS.addSink(ClickHouseUtil.getJdbcSink("INSERT INTO gmall.dws_province_stats (stt, edt, province_id, province_name, region_id, area_code, iso_code, iso_3166_2, pv_cnt, uv_cnt, order_cnt, order_amount) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"));
        provinceStatsProvinceIdDS.print("province_id");
        provinceStatsAreaCodeDS.print("area_code");


        env.execute();

    }
}
