package com.jzsec.tzdslog;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Properties;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicInteger;

public class SinkToKafka011 extends RichSinkFunction<String> {
	Logger log = LoggerFactory.getLogger(this.getClass());

	private Properties props;
	private String topic;
	private KafkaProducer<Integer, byte[]> producerB;
	private AtomicInteger messageNo = new AtomicInteger(0);

	public SinkToKafka011(Properties props, String topic) {
		this.props = props;
		this.topic = topic;
	}
	@Override
	public void open(Configuration parameters) throws Exception {
		super.open(parameters);
		producerB = new KafkaProducer(props);

	}

	@Override
	public void invoke(String value, Context context) {
		try {
			producerB.send(new ProducerRecord(
					topic,
					messageNo.getAndIncrement(),
					value.getBytes()))
					.get();
			// log.info(value);
		} catch (InterruptedException e) {
			e.printStackTrace();
		} catch (ExecutionException e) {
			e.printStackTrace();
		}
	}

	@Override
	public void close() throws Exception {
		if (null != producerB) producerB.close();
		super.close();
	}
}
