package net.sina.realtime.traffic.controller;


import net.sina.realtime.traffic.bean.AreaControl;
import net.sina.realtime.traffic.bean.MonitorInfo;
import net.sina.realtime.traffic.bean.MyTrigger;
import net.sina.realtime.traffic.schema.JSONDeserializationSchema;
import net.sina.realtime.traffic.utils.BloomFilterUtil;
import net.sina.realtime.traffic.utils.JedisUtils;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.redis.RedisSink;
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisPoolConfig;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommand;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommandDescription;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisMapper;
import org.apache.flink.util.Collector;
import redis.clients.jedis.Jedis;

import java.util.HashMap;
import java.util.Properties;

public class _06VehicleDistributionV4 {
    public static void main(String[] args) throws Exception {
        //1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);  //设置并行度

        //2.设置数据源
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers","node101:9092,node102:9092");
        properties.setProperty("group.id","g5");

        FlinkKafkaConsumer<MonitorInfo> consumer = new FlinkKafkaConsumer<MonitorInfo>("topic-car",
                new JSONDeserializationSchema<>(MonitorInfo.class),properties);
        consumer.setStartFromEarliest(); // 从最早的消息开始消费
        DataStreamSource<MonitorInfo> ds1 = env.addSource(consumer);
        ds1.print();
        SingleOutputStreamOperator<AreaControl> resultDf = ds1.keyBy(k -> k.getAreaId())
                .window(TumblingProcessingTimeWindows.of(Time.minutes(1)))
                .trigger(new MyTrigger())
                .apply(new WindowFunction<MonitorInfo, AreaControl, String, TimeWindow>() {

                    // 为什么要在apply前面定义一个map,而不是沿用carCount +=1
//                    HashMap<String,Integer> map = new HashMap<String,Integer>();
                    HashMap<String, Integer> hashMap = new HashMap<String, Integer>();
                    @Override
                    public void apply(String areaId, TimeWindow window, Iterable<MonitorInfo> input, Collector<AreaControl> out) throws Exception {
                        long start = window.getStart();
                        long end = window.getEnd();
                        String startStr = DateFormatUtils.format(start, "yyyy-MM-dd HH:mm:ss");
                        String endStr = DateFormatUtils.format(end, "yyyy-MM-dd HH:mm:ss");
                        Jedis jedis = new Jedis("node101", 6379);

                        String redisKey = "";
                        // 核心点在于统计汽车的数量
                        for (MonitorInfo monitorInfo : input) {
                            String car = monitorInfo.getCar();
                            //int hashCode = car.hashCode();
                            int[] offsets = BloomFilterUtil.getOffsets(car);
                            redisKey = "areaId:"+areaId+":startTime:"+start;

                            if(hashMap.containsKey(redisKey)){
                                // 根据位置判断这个车是否存在
                                Boolean a1 = jedis.getbit(redisKey, offsets[0]);
                                Boolean b1 = jedis.getbit(redisKey, offsets[1]);
                                if(!a1 || !b1){
                                    hashMap.put(redisKey,hashMap.get(redisKey)+1);
                                    jedis.setbit(redisKey,offsets[0],true);
                                    jedis.setbit(redisKey,offsets[1],true);
                                }
                            }else{
                                hashMap.put(redisKey,1);
                            }

                        }
                        AreaControl areaControl = new AreaControl(0,areaId,hashMap.get(redisKey),startStr,endStr);
                        out.collect(areaControl);
                        System.out.println(areaControl);
                    }
                });
        // 修改数据进入redis ,因为
        FlinkJedisPoolConfig conf = new FlinkJedisPoolConfig
                .Builder()
                .setHost("node101")
                .setPort(6379)
                .build();

        resultDf.addSink(new RedisSink<>(conf, new RedisMapper<AreaControl>() {
            /**
             *    key                       value
             * 区域编号:窗口结束时间         区域内车的数量
             */
            @Override
            public RedisCommandDescription getCommandDescription() {
                return new RedisCommandDescription(RedisCommand.SET);  //set
            }

            @Override
            public String getKeyFromData(AreaControl data) {

                return "area:"+data.getAreaId()+":"+data.getWindowEnd();
            }

            @Override
            public String getValueFromData(AreaControl data) {
                return data.getCarCount().toString();
            }
        }));

        env.execute();
    }

}
