package com.youxin.dataStream;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer010;

import java.util.ArrayList;
import java.util.List;
import java.util.Properties;

public class StreamKafkaSink {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        List<String> list = new ArrayList();
        list.add("hello ni hao ya tom");
        list.add("wo hen bu hao");
        list.add("ni hao huai o");
        DataStreamSource<String> dataSource = env.fromCollection(list).setParallelism(1);
        env.setParallelism(3);
        //kafka的相关的配置
        Properties properties = new Properties();
        properties.put("bootstrap.servers","hadoop-1:9092");
        properties.put("zookeeper.connect","hadoop-1:2181");
        properties.put("group.id","flink");
        properties.put("auto.offset.reset","latest");
        properties.put("key.deserializer","org.apache.kafka.common.serialization.StringDeserializer");
        properties.put("value.deserializer","org.apache.kafka.common.serialization.StringDeserializer");

        dataSource.addSink(new FlinkKafkaProducer010<String>("topic",new SimpleStringSchema(),properties));


    }
}
