package com.realtime.temp;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
import com.alibaba.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.realtime.util.ClickHouseUtil;
import com.realtime.util.JsonDebeziumDeserializationUtil;

import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.time.Duration;
import java.util.Properties;
import java.util.concurrent.TimeUnit;

import static org.apache.flink.table.api.Expressions.$;

public class HourGMV {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(60 * 1000L);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
        env.getCheckpointConfig().enableExternalizedCheckpoints(
                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
        );
        env.setRestartStrategy(RestartStrategies.failureRateRestart(
                10, Time.of(1L, TimeUnit.DAYS), Time.of(3L, TimeUnit.MINUTES)
        ));
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop101:8020/gmall/ck");
        System.setProperty("HADOOP_USER_NAME", "root");
        SourceFunction<String> mySqlSource = MySQLSource.<String>builder()
                .hostname("hadoop101")
                .port(3306)
                .databaseList("gmall") // set captured database
                .tableList("gmall.order_info_realtime,gmall.store") // set captured table
                .username("root")
                .password("root")
                .debeziumProperties((Properties) new Properties().setProperty("LOG_LEVEL","ERROR"))
                .deserializer(new JsonDebeziumDeserializationUtil()) // converts SourceRecord to JSON String
                .build();
        DataStreamSource<String> streamSource = env.addSource(mySqlSource);
        OutputTag<String> store_stream = new OutputTag<String>("store"){};
        SingleOutputStreamOperator<String> process = streamSource.process(new ProcessFunction<String, String>() {
            @Override
            public void processElement(String value, ProcessFunction<String, String>.Context ctx, Collector<String> out) throws Exception {
                JSONObject jsonObject = JSON.parseObject(value);
                String tb = jsonObject.getString("table");
                String data = jsonObject.getString("data");
                if (tb.equals("store")) {
                    ctx.output(store_stream, data);
                } else {
                    out.collect(data);
                }
            }
        });
        DataStream<String> store = process.getSideOutput(store_stream);
        SingleOutputStreamOperator<OrderInfo> orderInfoStream = process.map(new MapFunction<String, OrderInfo>() {

            @Override
            public OrderInfo map(String value) throws Exception {
                JSONObject jsonObject = JSON.parseObject(value);
                String orderId = jsonObject.getString("id");
                String status = jsonObject.getString("order_status");
                Double totalAmount = jsonObject.getDouble("total_amount");
                Long ts = jsonObject.getLong("create_time");
                String store_id = jsonObject.getString("store_id");
                OrderInfo orderInfo = new OrderInfo(orderId, status, totalAmount, ts, store_id);
                return orderInfo;
            }
        }).assignTimestampsAndWatermarks( WatermarkStrategy
                .<OrderInfo>forBoundedOutOfOrderness(Duration.ofSeconds(10))
                .withTimestampAssigner((event, timestamp) -> event.getTs())).filter(new FilterFunction<OrderInfo>() {
            @Override
            public boolean filter(OrderInfo value) throws Exception {
                return !value.getStatus().equals("1003") && !value.getStatus().equals("1001");
            }
        });

        SingleOutputStreamOperator<Store> storeBeanStream = store.map(new MapFunction<String, Store>() {
            @Override
            public Store map(String value) throws Exception {
                return JSONObject.parseObject(value, Store.class);
            }
        });
        orderInfoStream.print();
        tEnv.createTemporaryView("order_info", orderInfoStream,$("orderId"),$("status"),$("totalAmount"),$("store_id"),$("times").rowtime());

                tEnv.createTemporaryView("store", storeBeanStream);
	//开窗求每小时每个店铺的gmv
        Table table = tEnv.sqlQuery("select window_start,window_end,store_id,storeName,sum_total_amount,UNIX_TIMESTAMP() as `times` from (SELECT window_start, window_end,store_id,SUM(totalAmount) sum_total_amount\n" +
                "  FROM TABLE(\n" +
                "    TUMBLE(TABLE order_info, DESCRIPTOR(times), INTERVAL '60' MINUTES))\n" +
                "  GROUP BY window_start, window_end,store_id) t1 left join store on store.storeID=t1.store_id");
        DataStream<Tuple2<Boolean, ResultBean>> tuple2DataStream = tEnv.toRetractStream(table, ResultBean.class);
        SingleOutputStreamOperator<ResultBean> result = tuple2DataStream.map(new MapFunction<Tuple2<Boolean, ResultBean>, ResultBean>() {
            @Override
            public ResultBean map(Tuple2<Boolean, ResultBean> value) throws Exception {
                return value.f1;
            }
        });
	//写入clickhouse
        SinkFunction<ResultBean> jdbcSink = ClickHouseUtil.<ResultBean>getJdbcSink("insert into HourGMV values(?,?,?,?,?,?)");
        result.addSink(jdbcSink);

        env.execute();


    }
}
