package com.jiuzhi.logger;

import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.serialization.SerializationSchema;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer011;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.HashMap;
import java.util.Properties;

/**
 * Created with IDEA
 * author:tangsixiang@163.com
 * Date:2018/11/27
 * Time:11:26
 */


public class EmpFlinkStreamMul {

    static Logger logger = LoggerFactory.getLogger(EmpFlinkStreamMul.class);
    private static ObjectMapper MAPPER = new ObjectMapper();

    public static void main(String[] args) {

        String topic = "jiuzhi_nginx_log";//设置为topic默认值
        String kafkaIP = "172.16.1.151:9092";//赋值为默认
        //  LocalStreamEnvironment environment = StreamExecutionEnvironment.createLocalEnvironment();
        StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
        environment.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime);
        environment.enableCheckpointing(20000L);
        //     environment.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
//        environment.setParallelism(2); //设置job的默认并行度。
        //设置 kafka配置信息
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", kafkaIP);
        properties.setProperty("zookeeper.connect", "172.16.1.151:2181,172.16.1.75:2181,172.16.1.74:2181");
        properties.setProperty("group.id", "bigData-flink-mu");

        //开启一个消费者基于flink封装版本
        FlinkKafkaConsumer011<String> consumer010
                = new FlinkKafkaConsumer011<String>(topic, new SimpleStringSchema(), properties);
        consumer010.setStartFromLatest();

        //复杂业务字符串处理
        KeyedStream<Tuple3<String, String, Integer>, Tuple2<String, String>> keystren = environment.addSource(consumer010)
                .filter(new FilterFunction<String>() {
                    @Override
                    public boolean filter(String value) throws Exception {
                        //   logger.warn(value);
                        return value.indexOf("---接口请求参数明细：") >= 1;
                    }
                })
                .flatMap(new FlatMapFunction<String, Tuple3<String, String, Integer>>() {

                    @Override
                    public void flatMap(String message, Collector<Tuple3<String, String, Integer>> out) throws Exception {
                        try {
                            message = message.substring(message.indexOf("{"));
                            HashMap<String, String> mmap = new ObjectMapper().readValue(message, HashMap.class);//转换为 map格式对

                            out.collect(new Tuple3<>(mmap.getOrDefault("identityKey", "imie"), mmap.getOrDefault("remortIP", "0.0.0.0"), 1));
                            //      logger.warn("mmap:解析值为：{}", mmap);

                        } catch (Exception e) {
                            logger.error(e.getMessage());

                        }
                    }
                })

                .keyBy(new KeySelector<Tuple3<String, String, Integer>, Tuple2<String, String>>() {

                    @Override
                    public Tuple2<String, String> getKey(Tuple3<String, String, Integer> value) throws Exception {
                        //   logger.warn("通过分组keyBy:{}-{}", value.f0, value.f1);
                        return new Tuple2<>(value.f0, value.f1);
                    }
                });
        keystren.timeWindow(Time.seconds(10), Time.seconds(5))

//                       .apply(new WindowFunction<Tuple3<String, String, Integer>, Object, Tuple2<String, String>, TimeWindow>() {
//
//                   @Override
//                   public void apply(Tuple2<String, String> stringStringTuple2, TimeWindow window, Iterable<Tuple3<String, String, Integer>> input, Collector<Object> out) throws Exception {
//                          int count =0;
//                        for(Tuple3 tuple3 :input){
//                             count++;
//                        }out.collect( window+"---count:"+count);
//                   }
//
//               }).print();

                .reduce(

                        new ReduceFunction<Tuple3<String, String, Integer>>() {
                            @Override
                            public Tuple3<String, String, Integer> reduce(Tuple3<String, String, Integer> value1,
                                                                          Tuple3<String, String, Integer> value2)
                                    throws Exception {
                                Integer tmp = value1.f2 + value2.f2;
                                logger.error("汇总数据并----累加数据和 {},{}:{}", value1.f0, value2.f1, tmp);
                                return new Tuple3<>(value1.f0, value1.f1, value2.f2 + value1.f2);
                            }
                        }

                )
                //设置统计结果 至kafka消费topic中
                .addSink(new FlinkKafkaProducer011<Tuple3<String, String, Integer>>("jiuzhi-logger-reciver", new SerializationSchema<Tuple3<String, String, Integer>>() {
                //采用序列化形式 生成字符
            @Override
            public byte[] serialize(Tuple3<String, String, Integer> element) {

                return String.valueOf(element).getBytes();
            }
        }, properties));


//                  .keyBy(new KeySelector<Tuple3<String, String, Integer>, Tuple2<String, String>>() {
//
//              @Override
//              public Tuple2<String, String> getKey(Tuple3<String, String, Integer> value) throws Exception {
//                 logger.warn("二次分组keyBy:{}-{}", value.f0, value.f1);
//                  return new Tuple2<>(value.f0, value.f1);
//              }
//          })

        //.window(SlidingEventTimeWindows.of(Time.seconds(30),Time.minutes(1))

//        .map(new MapFunction<Tuple3<String, String, Integer>, String >() {
//            @Override
//            public String  map(Tuple3<String, String, Integer> value) throws Exception {
//                Map<String, Object> map = new HashMap<>();
//                map.put("identityKey", value.f0);
//                map.put("remortIP", value.f1);
//                map.put("visitCount", value.f2);
//                logger.error("完美处理完数据--投掷到:kafkad队列中：{}", map);
//                return new ObjectMapper().writeValueAsString(map);
//            }
//        })
// .addSink(new FlinkKafkaProducer011<String>("jiuzhi-logger-reciver",  new SimpleStringSchema(), properties));//打印到控制台
        try {
            environment.execute("emp-logo-Thinking-kafka");
        } catch (Exception e) {
            e.printStackTrace();
        }


    }
}
