package net.bwie.realtime.jtp.utils;

import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;

import javax.annotation.Nullable;
import java.nio.charset.StandardCharsets;
import java.util.Properties;

public class KafkaUtil {

	// ---------------- 消费 Kafka ----------------

	public static DataStream<String> consumerKafka(StreamExecutionEnvironment env, String topic){
		KafkaSource<String> source = KafkaSource.<String>builder()
				.setBootstrapServers("node101:9092,node102:9092,node103:9092")
				.setTopics(topic)
				.setGroupId("gid-" + topic)
				.setStartingOffsets(OffsetsInitializer.earliest())
				.setValueOnlyDeserializer(new org.apache.flink.api.common.serialization.SimpleStringSchema())
				.build();
		DataStreamSource<String> stream = env.fromSource(
				source, WatermarkStrategy.noWatermarks(), "Kafka Source"
		);
		return stream;
	}

	public static DataStream<String> consumerKafkaFromLatest(StreamExecutionEnvironment env, String topic){
		KafkaSource<String> source = KafkaSource.<String>builder()
				.setBootstrapServers("node101:9092,node102:9092,node103:9092")
				.setTopics(topic)
				.setGroupId("g-" + topic)
				.setStartingOffsets(OffsetsInitializer.latest())
				.setValueOnlyDeserializer(new org.apache.flink.api.common.serialization.SimpleStringSchema())
				.build();
		DataStreamSource<String> stream = env.fromSource(
				source, WatermarkStrategy.noWatermarks(), "Kafka Source"
		);
		return stream;
	}

	// ---------------- 写入 Kafka ----------------

	/**
	 * 写 String 到 Kafka
	 * （JtpDouyinEtlJob 用到这个方法）
	 */
	public static void producerKafka(SingleOutputStreamOperator<String> stream, String topic){
		Properties props = new Properties();
		props.put("bootstrap.servers", "node101:9092,node102:9092,node103:9092");
		props.put(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG, 15 * 60 * 1000);

		KafkaSerializationSchema<String> serializationSchema = new KafkaSerializationSchema<String>() {
			@Override
			public ProducerRecord<byte[], byte[]> serialize(String element, @Nullable Long timestamp) {
				return new ProducerRecord<>(
						topic,
						element.getBytes(StandardCharsets.UTF_8)
				);
			}
		};

		FlinkKafkaProducer<String> producer = new FlinkKafkaProducer<>(
				topic,
				serializationSchema,
				props,
				FlinkKafkaProducer.Semantic.EXACTLY_ONCE
		);

		stream.addSink(producer);
	}

	/**
	 * 写 String 到 Kafka
	 * （DwsLiveRoomAggJob 用到这个方法）
	 */
	public static void producerKafkaString(SingleOutputStreamOperator<String> stream, String topic){
		Properties props = new Properties();
		props.put("bootstrap.servers", "node101:9092,node102:9092,node103:9092");
		props.put(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG, 15 * 60 * 1000);

		KafkaSerializationSchema<String> serializationSchema = new KafkaSerializationSchema<String>() {
			@Override
			public ProducerRecord<byte[], byte[]> serialize(String element, @Nullable Long timestamp) {
				return new ProducerRecord<>(
						topic,
						element.getBytes(StandardCharsets.UTF_8)
				);
			}
		};

		FlinkKafkaProducer<String> producer = new FlinkKafkaProducer<>(
				topic,
				serializationSchema,
				props,
				FlinkKafkaProducer.Semantic.EXACTLY_ONCE
		);

		stream.addSink(producer);
	}
}
