package com.atguigu.flink.chapter05.sink;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;

import javax.annotation.Nullable;
import java.nio.charset.StandardCharsets;
import java.util.Properties;

public class KafkaSinkDemoOld {
    public static void main(String[] args) {
        Configuration conf = new Configuration();
        conf.setInteger("rest.port", 2000);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(1);

        SingleOutputStreamOperator<String> stream = env.socketTextStream("hadoop102", 8888)
                .flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() {
                    @Override
                    public void flatMap(String s,
                                        Collector<Tuple2<String, Integer>> out) throws Exception {
                        String[] words = s.split(" ");
                        for (String word : words) {
                            out.collect(Tuple2.of(word, 1));
                        }
                    }
                })
                .keyBy(t -> t.f0)
                .sum(1)
                .map(t -> t.f0 + "_" + t.f1);

        /*stream.addSink(new FlinkKafkaProducer<String>(
                "hadoop102:9092,hadoop103:9092",
                "s2",
                new SimpleStringSchema()
        ))
         */
        Properties porps = new Properties();
        porps.setProperty("bootstrap.serves","haoop102:9092,hadoop103:9092");
        stream.addSink(new FlinkKafkaProducer<String>(
                "default",
                new KafkaSerializationSchema<String>() {
                    @Override
                    public ProducerRecord<byte[], byte[]> serialize(String elements,
                                                                    @Nullable Long timestamp) {
                        if (elements != null){
                            return new ProducerRecord<>("s2",elements.getBytes(StandardCharsets.UTF_8));
                        }
                        return null;
                    }
                },
                porps,
                FlinkKafkaProducer.Semantic.AT_LEAST_ONCE
        ));

        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}

//
//public class KafkaSinkDemoOld {
//    public static void main(String[] args) {
//        Configuration conf = new Configuration();
//        conf.setInteger("rest.port", 2000);
//        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
//        env.setParallelism(1);
//
//        SingleOutputStreamOperator<String> stream = env.socketTextStream("hadoop102", 8888)
//                .flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() {
//                    @Override
//                    public void flatMap(String s,
//                                        Collector<Tuple2<String, Integer>> out) throws Exception {
//                        String[] words = s.split(" ");
//                        for (String word : words) {
//                            out.collect(Tuple2.of(word, 1));
//                        }
//                    }
//                })
//                .keyBy(t -> t.f0)
//                .sum(1)
//                .map(t -> t.f0 + "_" + t.f1);
//
//        /*stream.addSink(new FlinkKafkaProducer<String>(
//                "hadoop102:9092,hadoop103:9092",
//                "s2",
//                new SimpleStringSchema()
//        ))
//         */
//
//        Properties props = new Properties();
//        props.setProperty("bootstrap.serves","hadoop102:9092,hadoop103:9092,hadoop104:9092");
//        stream.addSink(
//                new FlinkKafkaProducer<String>(
//                        "default",
//                        new KafkaSerializationSchema<String>() {
//                            @Override
//                            public ProducerRecord<byte[], byte[]> serialize(String elements,
//                                                                            @Nullable Long aLong) {
//                                if (elements != null){
//                                    return new ProducerRecord<>("s2",elements.getBytes(StandardCharsets.UTF_8));
//                                }
//                                return null;
//                            }
//                        },props,FlinkKafkaProducer.Semantic.AT_LEAST_ONCE)
//        );
//
//        try {
//            env.execute();
//        } catch (Exception e) {
//            e.printStackTrace();
//        }
//    }
//}

//public class KafkaSinkDemoOld {
//    public static void main(String[] args) {
//        Configuration conf = new Configuration();
//        conf.setInteger("rest.port", 2000);
//        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
//        env.setParallelism(1);
//
//        SingleOutputStreamOperator<String> stream = env.socketTextStream("hadoop102", 8888)
//                .flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() {
//                    @Override
//                    public void flatMap(String value,
//                                        Collector<Tuple2<String, Integer>> out) throws Exception {
//                        String[] words = value.split(" ");
//                        for (String word : words) {
//                            out.collect(Tuple2.of(word, 1));
//                        }
//                    }
//                })
//                .keyBy(t -> t.f0)
//                .sum(1)
//                .map(t -> t.f0 + "_" + t.f1);
//
//        Properties props = new Properties();
//        props.setProperty("bootstrap.serves","hadoop102:9092,hadoop103:9092,hadoop104:9092");
//        stream.addSink(new FlinkKafkaProducer<String>(
//                "defalut",
//                new KafkaSerializationSchema<String>() {
//                    @Override
//                    public ProducerRecord<byte[], byte[]> serialize(String elements,
//                                                                    @Nullable Long aLong) {
//                        if (elements != null){
//                            return new ProducerRecord<>("s2",elements.getBytes(StandardCharsets.UTF_8));
//                        }
//                        return null;
//                    }
//                },
//                props,
//                FlinkKafkaProducer.Semantic.AT_LEAST_ONCE
//        ));
//
//        try {
//            env.execute();
//        } catch (Exception e) {
//            e.printStackTrace();
//        }
//    }
//}