package com.gy.flink.project.data.etl;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.pool2.impl.GenericObjectPoolConfig;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.CoFlatMapFunction;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.internals.KeyedSerializationSchemaWrapper;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import redis.clients.jedis.HostAndPort;
import redis.clients.jedis.JedisCluster;

import java.io.IOException;
import java.util.*;
import java.util.concurrent.TimeUnit;

/*
flink  source源  file/collection/自定义
Co是连接的流

 */
public class DataClean {

    public static void main(String[] args) throws Exception {
        String consumerTopic = "allData";
        Properties properties = new Properties();
        properties.put("bootstrap.servers", "centos102:9092");
        properties.put("group.id", "sum_uat_001");

        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());
        //读取数据 根据 kafka里面topic的partition来的
        env.setParallelism(3);
        env.enableCheckpointing(60 * 1000);
        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(10 * 1000);
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(1);
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.DELETE_ON_CANCELLATION);
//        env.setStateBackend(new RocksDBStateBackend("/flink/RocksDBStateBackend/etl/"));

        FlinkKafkaConsumer<String> consumer011 = new FlinkKafkaConsumer<>(consumerTopic, new SimpleStringSchema(), properties);
        DataStreamSource<String> dataStream = env.addSource(consumer011);
        //设置为广播变量 发送大下流的每个task中
        DataStream<Map<String, String>> redisStream = env.addSource(new RedisSource()).broadcast();

        SingleOutputStreamOperator<String> etlStream = dataStream.connect(redisStream)
                .flatMap(new CoFlatMapFunction<String, Map<String, String>, String>() {
                    Map<String, String> cacheMap = new HashMap<String, String>();

                    //kafka数据
                    @Override
                    public void flatMap1(String value, Collector<String> out) throws Exception {
                        JSONObject originJsonData = JSON.parseObject(value);
                        String countryCode = originJsonData.getString("countryCode");
                        String area = cacheMap.get(countryCode);
                        JSONArray datas = originJsonData.getJSONArray("data");
                        datas.forEach(x -> {
                            JSONObject etlJson = (JSONObject) x;
                            etlJson.put("countryCode", originJsonData.getString("countryCode"));
                            etlJson.put("area", area);
                            etlJson.put("dt", originJsonData.getString("dt"));

                            out.collect(etlJson.toJSONString());
                        });
                    }

                    //redis数据
                    @Override
                    public void flatMap2(Map<String, String> value, Collector<String> out) throws Exception {
                        cacheMap = value;
                    }
                });

        String outputTopic = "allDataClean";

        Properties outputProp = new Properties();
        outputProp.put("bootstrap.servers", "centos101:9092");

        FlinkKafkaProducer<String> outputKafka = new FlinkKafkaProducer<>(outputTopic,
                new KeyedSerializationSchemaWrapper<String>(new SimpleStringSchema()),
                outputProp);
        etlStream.addSink(outputKafka);

        etlStream.print();


        env.execute(DataClean.class.getSimpleName());
    }

    /**
     * 自定义数据源
     *  SourceFunction单数据源
     *  prarllelSourceFunction并行数据源
     */
    static class RedisSource implements SourceFunction<Map<String, String>> {
        Logger log = LoggerFactory.getLogger(this.getClass());

        private JedisCluster jedis;
        private String redisKey = "areas";
        private boolean isRunning = true;


        @Override
        public void run(SourceContext<Map<String, String>> ctx) throws Exception {
            this.jedis = new JedisCluster(new HashSet<>(Arrays.asList(
                    new HostAndPort("centos103", 17001),
                    new HostAndPort("centos103", 17002),
                    new HostAndPort("centos103", 17003),
                    new HostAndPort("centos103", 17004),
                    new HostAndPort("centos103", 17005),
                    new HostAndPort("centos103", 17006)
            )), 2000, 2000, 5, "123456", new GenericObjectPoolConfig());

            HashMap<String, String> map = new HashMap<String, String>();
            while (isRunning) {
                try {
                    map.clear();
                    Map<String, String> values = jedis.hgetAll(redisKey);
                    values.forEach((key, value) -> Arrays.stream(value.split(",")).forEach(y -> map.put(y, key)));
                    TimeUnit.SECONDS.sleep(60);
                    if (map.size() == 0) return;

                    ctx.collect(map);
                } catch (InterruptedException e) {
                    e.printStackTrace();
                    log.error("redis连接器异常：{}", e.getMessage());
                }
            }

        }

        @Override
        public void cancel() {
            isRunning = false;
            if (jedis != null) {
                try {
                    jedis.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }
    }

}
