package com.galeno.day11;

import org.apache.flink.api.common.serialization.SerializationSchema;
import org.apache.flink.metrics.MetricGroup;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.flink.util.UserCodeClassLoader;
import org.apache.kafka.clients.producer.ProducerRecord;

import javax.annotation.Nullable;
import java.nio.charset.Charset;

/**
 * @author galeno
 * @Title:
 * @Description:
 * @date 2021/10/2920:25
 */
public class MyKafkaSerializationSchema implements KafkaSerializationSchema<String> {
    //主题
    private String topic;
    //编码格式
    private String charset;
    //构造方法传入默认编码UTF-8
    public MyKafkaSerializationSchema(String topic){
        this.topic=topic;
        this.charset="UTF-8";
    }
    public MyKafkaSerializationSchema(String topic,String charset){
        this.topic=topic;
        this.charset=charset;
    }


    //默认不进行重写
    @Override
    public void open(SerializationSchema.InitializationContext context) throws Exception {
        MetricGroup metricGroup = context.getMetricGroup();
        UserCodeClassLoader userCodeClassLoader = context.getUserCodeClassLoader();


        KafkaSerializationSchema.super.open(context);
    }
    //来一条数据,将数据进行序列化
    @Override
    public ProducerRecord<byte[], byte[]> serialize(String s, @Nullable Long aLong) {
        return new ProducerRecord<>(topic, s.getBytes(Charset.forName(charset)));
    }
}
