package com.ry.flink.process;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.ry.flink.source.RedisSource;
import com.ry.flink.utils.Constants;
import com.ry.flink.utils.FlinkKafkaUtils;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.CoFlatMapFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer011;
import org.apache.flink.streaming.connectors.kafka.internals.KeyedSerializationSchemaWrapper;
import org.apache.flink.util.Collector;
import java.util.HashMap;
import java.util.Properties;

/**
 * 完成redis和kafka的数据拼接
 *  基于connect实现数据拼接
 * */
public class IndicatorProcessMain {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //根据Topic partition的数量进行设置
        env.setParallelism(1);
        //开启checkpoint机制，备份数据
        env.enableCheckpointing(20000);
        //设置仅一次语义
        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        //二次Checkpoint之间的间隔
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000);
        //保留几个checkpoint的结果，保存1个就行了
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(1);
        //设置checkpint的超时时间
        env.getCheckpointConfig().setCheckpointTimeout(60000);
        //程序失败或者完成是否保存checkpoint数据
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        //设置state的保存策略
//        env.setStateBackend(new FsStateBackend());
        //设置重启策略
//        env.setRestartStrategy();

        String topic = "realtime_indicators1";
        Properties properties = FlinkKafkaUtils.getProducerProperties(Constants.BROKERS);
        //Kafka参数设置
        FlinkKafkaConsumer011<String> kafkaConsumer011 = new FlinkKafkaConsumer011<String>(topic,new SimpleStringSchema(),properties);
        //kafka数据源
        DataStreamSource<String> dataSource = env.addSource(kafkaConsumer011);

        //redis的数据源，需要广播
        /**
         * 下游在处理的时候，每个task需要拿到全量的数据
         * */
        DataStream<HashMap<String, String>> redisSource = env.addSource(new RedisSource()).broadcast();

        SingleOutputStreamOperator<String> stringSingleOutputStreamOperator = dataSource.connect(redisSource).flatMap(new CoFlatMapFunction<String, HashMap<String, String>, String>() {
            /**
             * 如果考虑到，flatMap1有数据进入了，这个时候redisSourceMap数据还没有过来，
             * 可以给redisSourceMap初始化值
             * */
            HashMap<String, String> redisSourceMap = new HashMap<>();
            /**
             *  kafka的数据
             * {"datetime":"2021-06-03 10:51:52","countryID":"SH","datas":[{"type":"a1","artifact_code":0.1,"sku_code
             * char":"A"},{"type":"b1","artifact_code":0.2,"sku_codechar":"B"}]}
             */
            @Override
            public void flatMap1(String s, Collector<String> out) {
                JSONObject jsonObject = JSON.parseObject(s);
                String dt = jsonObject.getString("datetime");
                String countryID = jsonObject.getString("countryID");
                //根据countryID到redis的动态表匹配对应的区域
                String area = redisSourceMap.get(countryID);
                //获取嵌套的数据(datas)然后输出到下游
                JSONArray datas = jsonObject.getJSONArray("data");
                for (int i = 0; i < datas.size(); i++) {
                    JSONObject jsonObj = datas.getJSONObject(i);
                    jsonObj.put("datetime", dt);
                    jsonObj.put("countryID", area);
                    if (jsonObj.getString("datetime") != null &&
                            jsonObj.getString("datetime").length() > 0 &&
                            jsonObj.getString("countryID") != null &&
                            jsonObj.getString("countryID").length() > 0) {
                        out.collect(jsonObj.toString());
                    }

                }
            }

            @Override
            public void flatMap2(HashMap<String, String> stringStringHashMap, Collector<String> collector) {
                this.redisSourceMap = stringStringHashMap;
            }
        });

        /**
         * 结果数据写入到kafka
         * */
        String saveResultTopic = "realtime_indicators1_result";
        FlinkKafkaProducer011<String> producer011 = new FlinkKafkaProducer011<String>(saveResultTopic,new KeyedSerializationSchemaWrapper<>(new SimpleStringSchema()),properties);
        stringSingleOutputStreamOperator.addSink(producer011);
        env.execute("IndicatorProcessMain");
    }

}
