package com.atguigu.chapter5.sink;

import com.alibaba.fastjson.JSON;
import com.atguigu.chapter5.source.WaterSensor;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSink;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.junit.Test;

import javax.annotation.Nullable;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Properties;

/**
 * @ClassName: KafkaSink
 * @Description:
 * @Author: kele
 * @Date: 2021/4/5 18:13
 **/
public class KafkaSink {

    @Test
    public void listsinkkafka() throws Exception {

        Configuration conf = new Configuration();
        conf.setInteger("rest.port",20000);

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(2);

        ArrayList<WaterSensor> waterSensors = new ArrayList<>();
        waterSensors.add(new WaterSensor("sensor_1", 1607527992000L, 20));
        waterSensors.add(new WaterSensor("sensor_1", 1607527994000L, 50));
        waterSensors.add(new WaterSensor("sensor_1", 1607527996000L, 30));
        waterSensors.add(new WaterSensor("sensor_2", 1607527993000L, 10));
        waterSensors.add(new WaterSensor("sensor_2", 1607527995000L, 30));

        DataStreamSource<WaterSensor> ds = env.fromCollection(waterSensors);

//        SingleOutputStreamOperator<String> ds1 = ds.map(waterSensor -> JSON.toJSONString(waterSensor));
        SingleOutputStreamOperator<String> ds1 = ds.map(JSON::toJSONString);

        ds1.addSink(new FlinkKafkaProducer<String>("hadoop162:9092", "senion", new SimpleStringSchema()));

        env.execute();

    }



    @Test
    public void socketSinlKafka() throws Exception {

        Configuration conf = new Configuration();
        conf.setInteger("rest.port",20000);

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(2);

        DataStreamSource<String> ds = env.socketTextStream("hadoop162", 8888);

        ds.addSink(new FlinkKafkaProducer<String>("hadoop162:9092", "senion", new SimpleStringSchema()));

        env.execute();

    }



    /**
     * 向kafka写数据是按照直连的方式写入，如果是一个并行度，会写到一个分区中，为了保障数据写入均衡，手动平衡
     * @throws Exception
     *
     * 1、
     */
    public void sinkKakfa() throws Exception {

        Configuration conf = new Configuration();
        conf.setInteger("rest.port",20000);

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(2);


        ArrayList<WaterSensor> waterSensors = new ArrayList<>();
        waterSensors.add(new WaterSensor("sensor_1", 1607527992000L, 20));
        waterSensors.add(new WaterSensor("sensor_1", 1607527994000L, 50));
        waterSensors.add(new WaterSensor("sensor_1", 1607527996000L, 30));
        waterSensors.add(new WaterSensor("sensor_2", 1607527993000L, 10));
        waterSensors.add(new WaterSensor("sensor_2", 1607527995000L, 30));

        DataStreamSource<WaterSensor> ds = env.fromCollection(waterSensors);

//        SingleOutputStreamOperator<String> ds1 = ds.map(waterSensor -> JSON.toJSONString(waterSensor));
//        SingleOutputStreamOperator<String> ds1 = ds.map(JSON::toJSONString);

        Properties prop = new Properties();
        prop.put("bootstrap-server","hadoop102:9092,hadoop103:9092");

        ds.addSink(new FlinkKafkaProducer<>(
                "senion",

                new KafkaSerializationSchema<WaterSensor>() {

                    @Override
                    public ProducerRecord<byte[], byte[]> serialize(WaterSensor waterSensor, @Nullable Long aLong) {
                        //方式一：以轮询的方式写入kafka，将key值设置为null，如果没有key，此时kafka默认按照轮询的方式写入分区中
                        //return new ProducerRecord<>("senion",null,JSON.toJSONString(waterSensor).getBytes(StandardCharsets.UTF_8));

                        //方式二：将key值设置为id，安找id分入到不同的分区中
                        return new ProducerRecord<>("senion",waterSensor.getId().getBytes(),JSON.toJSONString(waterSensor).getBytes(StandardCharsets.UTF_8));
                    }
                },
                prop,
                FlinkKafkaProducer.Semantic.EXACTLY_ONCE
        ));



        env.execute();


    }

}
