package com.chis.flink;

import java.io.IOException;
import java.util.Map;
import java.util.Properties;

import org.apache.commons.collections.map.HashedMap;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer010;
import org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartition;
import org.apache.flink.streaming.util.serialization.KeyedDeserializationSchema;


public class FlinkDemon {

    public static void main(String[] args) throws Exception {
        Properties props = new Properties();
        props.put("bootstrap.servers", "10.88.88.107:9092,10.88.88.106:9092,10.88.88.105:9092");
        props.put("group.id", "wljgroup");
        props.put("enable.auto.commit", "true");
        props.put("auto.commit.interval.ms", "1000");
        props.put("session.timeout.ms", "30000");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("max.poll.records",10);


        String topic = "wlj2";

//        FlinkKafkaConsumer010 consumer = new FlinkKafkaConsumer010("wlj2", new SimpleStringSchema(), props);
        FlinkKafkaConsumer010 consumer = new FlinkKafkaConsumer010(topic, new KeyedDeserializationSchema<KafkaBean>() {
            @Override
            public KafkaBean deserialize(byte[] messageKey, byte[] message, String topic, int partition, long offset) throws IOException {
                // 反序列化得到一个Tuple2对象，第一个是topic，第二个是对应的value，类似的可以获取key和分区，offset等信息
                System.out.println("source:主题："+topic+" 分区："+partition+" 偏移量："+offset+" 值："+new String(message));
                return new KafkaBean(topic, new String(message), partition, offset);
            }

            @Override
            public boolean isEndOfStream(KafkaBean stringStringTuple2) {
                return false;
            }

            @Override
            public TypeInformation<KafkaBean> getProducedType() {
                // 此处返回的是类型参数
                return TypeInformation.of(new TypeHint<KafkaBean>() {
                });
            }
        }, props);

        Map<KafkaTopicPartition, Long> offsets = new HashedMap();
        offsets.put(new KafkaTopicPartition("wlj2", 0), 73L);
        offsets.put(new KafkaTopicPartition("wlj2", 1), 59L);
        offsets.put(new KafkaTopicPartition("wlj2", 2), 59L);
        consumer.setStartFromSpecificOffsets(offsets);

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
//        env.setParallelism(5);
        DataStreamSource<KafkaBean> source = env.addSource(consumer).setParallelism(3);

//        source.map(new MapFunction<KafkaBean, Object>() {
//            @Override
//            public Object map(KafkaBean value) throws Exception {
//                System.out.println("map:主题："+value.getKafkatopic()+" 分区："+value.getKafkapart()+" 偏移量："+value.getKafkaoffset()+" 值："+value.getKafkavalue());
//                return null;
//            }
//        }).setParallelism(5);
        
        source.addSink(new RichSinkFunction<KafkaBean>() {
            @Override
            public void invoke(KafkaBean value, Context context) throws Exception {
                System.out.println("sink:主题："+value.getKafkatopic()+" 分区："+value.getKafkapart()+" 偏移量："+value.getKafkaoffset()+" 值："+value.getKafkavalue());
            }
        }).setParallelism(5);

        env.execute("flinktest");
    }

}
