package com.yc.bigdata.flink.demo;

import com.yc.bigdata.flink.demo.schema.UserActionKafkaSchema;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;

import java.util.Properties;

/**
 * <p></p>
 *
 * @author: YuanChilde
 * @date: 2020-02-11 10:44
 * @version: 1.0
 * Modification History:
 * Date    Author      Version     Description
 * -----------------------------------------------------------------
 * 2020-02-11 10:44    YuanChilde     1.0        新增
 */
public class KafkaConsumerProductor {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 设置检查点
        env.enableCheckpointing(5000);

        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "dev.dongbaosoft.com:9092");
        properties.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        properties.setProperty("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        properties.setProperty("transaction.timeout.ms",1000*60*5+"");
        properties.setProperty("group.id", "flink-test-group");


        FlinkKafkaConsumer consumer = new FlinkKafkaConsumer<>("bigdata-test-in", new SimpleStringSchema(), properties);

        DataStreamSource<String> data = env.addSource(consumer);
        data.addSink(new FlinkKafkaProducer("bigdata-test-out", new UserActionKafkaSchema("bigdata-test-out"), properties,
                FlinkKafkaProducer.Semantic.AT_LEAST_ONCE)).setParallelism(1);
        data.print();
        env.execute("Kafka Test");
    }
}
