package com.jiuzhi.logger;

import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.serialization.SerializationSchema;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.core.fs.FileSystem;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer011;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.HashMap;
import java.util.Map;
import java.util.Properties;

/**
 * Created with IDEA
 * author:tangsixiang@163.com
 * Date:2018/11/27
 * Time:11:26
 */


public class EmpFlinkStreamkafka {

    static Logger logger = LoggerFactory.getLogger(EmpFlinkStreamkafka.class);
    private static ObjectMapper MAPPER = new ObjectMapper();

    public static void main(String[] args) {

        String topic = "jiuzhi_nginx_log";//设置为topic默认值
        String kafkaIP = "172.16.1.151:9092";//赋值为默认
        StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
//        environment.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
//      environment.setParallelism(1); //设置job的默认并行度。
        //设置 kafka配置信息
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", kafkaIP);
        //properties.setProperty("zookeeper.connect", "172.16.1.151:2181,172.16.1.74:2181,172.16.1.75:2181");
        properties.setProperty("group.id", "bigData-flink-mu");
        properties.setProperty("zookeeper.connect", "172.16.1.151:2181,172.16.1.75:2181,172.16.1.74:2181");
        //开启一个消费者基于flink封装版本
        FlinkKafkaConsumer011<String> consumer010
                = new FlinkKafkaConsumer011<String>(topic, new SimpleStringSchema(), properties);
        consumer010.setStartFromLatest();

        environment.addSource(consumer010)
                .filter(new FilterFunction<String>() {
                    @Override
                    public boolean filter(String value) throws Exception {
                        //   logger.warn(value);
                        return value.indexOf("---接口请求参数明细：") >= 1;
                    }
                }).addSink(new FlinkKafkaProducer011<String>("jiuzhi-logger-reciver", new SerializationSchema<String>() {
            @Override
            public byte[] serialize(String element) {

                logger.error("數據寫入到:kafkad队列中中：{}", element);
                return element.getBytes();
            }
        }, properties));//打印到控制台


        try {
            environment.execute("emp-logo-Thinking-dec");
        } catch (Exception e) {
            e.printStackTrace();
        }


    }
}
