package com.jiuzhi.logger;

import com.fasterxml.jackson.databind.ObjectMapper;
import com.jiuzhi.logger.util.NginxStrSplitter;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.core.fs.FileSystem;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer010;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer011;
import org.apache.flink.streaming.connectors.redis.RedisSink;
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisPoolConfig;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommand;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommandDescription;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisMapper;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.HashMap;
import java.util.Map;
import java.util.Properties;

/**
 * Created with IDEA
 * author:tangsixiang@163.com
 * Date:2018/11/27
 * Time:11:26
 */


public class EmpFlinkStreaming {

    static Logger logger = LoggerFactory.getLogger(EmpFlinkStreaming.class);
    private static ObjectMapper MAPPER = new ObjectMapper();

    public static void main(String[] args) {

        String topic = "jiuzhi_nginx_log";//设置为topic默认值
        String kafkaIP = "172.16.1.151:9092";//赋值为默认
        StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
//        environment.enableCheckpointing(70000);// // 非常关键，一定要设置启动检查点！！ 10秒一次
//        environment.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
//        environment.setParallelism(2); //设置job的默认并行度。
        //设置 kafka配置信息
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", kafkaIP);
        properties.setProperty("zookeeper.connect", "172.16.1.151:2181,172.16.1.75:2181,172.16.1.74:2181");
        properties.setProperty("group.id", "bigData-flink");

        //开启一个消费者基于flink封装版本
        FlinkKafkaConsumer011<String> consumer010
                = new FlinkKafkaConsumer011<String>(topic, new SimpleStringSchema(), properties);
       //从最新数据点开始消费
        consumer010.setStartFromLatest();
         //设置从最新的offset开始消费
     //   consumer010.setStartFromGroupOffsets();
        consumer010.setCommitOffsetsOnCheckpoints(true);
        //实例化Flink和Redis关联类FlinkJedisPoolConfig，设置Redis端口
        FlinkJedisPoolConfig conf = new FlinkJedisPoolConfig.Builder().setHost("172.16.1.151").setDatabase(14).build();
        //复杂业务字符串处理
       environment.addSource(consumer010)
                .filter(new FilterFunction<String>() {
                    @Override
                    public boolean filter(String value) throws Exception {
                        //   logger.warn(value);
                        return value.indexOf("---接口请求参数明细：") >= 1;
                    }
                })
                .flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() {
                    @Override
                    public void flatMap(String message, Collector<Tuple2<String, Integer>> out) throws Exception {
                        try {
                            message = message.substring(message.indexOf("{"));
                            HashMap<String, String> mmap = new ObjectMapper().readValue(message, HashMap.class);//转换为 map格式对象

                         //   logger.warn("mmap:解析值为：{}", mmap);
                            out.collect(new Tuple2<>(mmap.get("identityKey"), 1));
                            out.collect(new Tuple2<>(mmap.get("method"), 1));
                            out.collect(new Tuple2<>(mmap.get("remortIP"), 1));
                            out.collect(new Tuple2<>(mmap.get("imei"), 1));
                        } catch (Exception e) {
                            logger.error(e.getMessage());

                        }
                    }
                })
                .keyBy(0).timeWindow(Time.seconds(20), Time.seconds(5)).reduce(new ReduceFunction<Tuple2<String, Integer>>() {
                    @Override
                    public Tuple2<String, Integer> reduce(Tuple2<String, Integer> value1, Tuple2<String, Integer> value2) throws Exception {
                   // logger.warn("获取值为V1：{}--{},V2:{}-{}",value1.f0,value1.f1,value2.f0,value2.f1);
               return new Tuple2<>(value1.f0,value1.f1+value2.f1);
           }}).addSink(new RedisSink<Tuple2<String, Integer>>(conf, new RedisMapper<Tuple2<String, Integer>>() {
           @Override
           public RedisCommandDescription getCommandDescription() {
               return new RedisCommandDescription(RedisCommand.SET);
//                new RedisCommandDescription(RedisCommand.HSET,"thinking-flink");
           }

           @Override
           public String getKeyFromData(Tuple2<String, Integer> stringIntegerTuple2) {
               return stringIntegerTuple2.f0==null?"tsx":stringIntegerTuple2.f0;
           }

           @Override
           public String getValueFromData(Tuple2<String, Integer> stringIntegerTuple2) {
               return "浏览次数:"+(stringIntegerTuple2.f1==null?"0":stringIntegerTuple2.f1);
           }
       }));
//        datastream.print();
        //通过消息 empId 分组处理业务
        logger.info("Printing result to stdout. Use --output to specify output path.");

        try {
            environment.execute("emp-logo-keyNum-Thinking");
        } catch (Exception e) {
            e.printStackTrace();
        }


    }
}
