package cn.com.ytst.data.integration.components.writer.kafka;

import java.io.File;
import java.util.Properties;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicLong;

import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;

import com.alibaba.datax.common.util.Configuration;

import cn.com.ytst.data.integration.components.reader.kafka.KafkaWriteKey;
import cn.com.ytst.data.integration.core.DefaultEngine;
import cn.com.ytst.data.integration.core.plugin.Writer;
import lombok.extern.slf4j.Slf4j;


/**
 * @Author: 123774135@qq.com
 * @ClassName: cn.com.ytst.data.integration.event
 * @Description:
 * @Date:Create：in 2019/11/17 0017 21:34
 * @Modified By：
 */
@Slf4j
public class KafkaWriter extends Writer {

	private KafkaProducer<String, String> kafkaProducer;

	Properties properties ;

	protected String bootstrapServers;

	protected String batchsize;

	protected String lingerms;

	protected String produceracks;

	protected String buffermemory;

	protected String kafka_producer_retries;

	protected String topic;

	protected String key;



	protected File file;

	protected MqPrarmsVo mqPrarmsVo;


	private Configuration writerConfig;

	private AtomicLong number;

	public KafkaWriter() {

	}

	@Override
	public void startWriter(String line) {
		//发送前估算是加密还是非加密的
		this.number.incrementAndGet();
		System.out.println("接受数据条数:"+this.number.get());
//		ProducerRecord<String,String> producerRecord = new ProducerRecord<>(topic,this.key, line);
//		try {
//			 final Future<RecordMetadata> future =	kafkaProducer.send(producerRecord, new Callback() {
//				@Override
//				public void onCompletion(RecordMetadata metadata, Exception exception) {
//					if(null != exception){
//					   log.warn("{}",exception);
//					}else{
//						log.trace("发送成功");
//					}
//				}
//			});
//			if (log.isDebugEnabled()){
//				log.info("发送数据条数:{}",this.number.get());
//			}
//		} catch (Exception e) {
//			e.printStackTrace();
//		}
		
	}

	@Override
	public void destroy() {
		number.set(0);
		kafkaProducer.close();
	}

	@Override
	public void init() {
		this.number = new AtomicLong();
		this.writerConfig = this.getPluginJobConf();
//		this.properties = new Properties();
//		this.topic = writerConfig.getString(KafkaWriterKey.KAFAK_WRITER_TOPIC_TAG);
//		this.bootstrapServers = writerConfig.getString(KafkaWriterKey.KAFAK_WRITER_BOOTSTRAPSERVERS_TAG);
//		this.key = writerConfig.getString(KafkaWriterKey.KAFAK_WRITER_KEY_TAG);
//		this.batchsize = String.valueOf(16384*2);
//		this.lingerms = "2";
//		this.produceracks = "1";
//		this.buffermemory = "33554432";
//		this.kafka_producer_retries = "1";
//		properties.put("bootstrap.servers", this.bootstrapServers);
//		properties.put("acks", this.produceracks);
//		properties.put("retries", this.kafka_producer_retries);
//		properties.put("batch.size", this.batchsize);
//		properties.put("linger.ms", this.lingerms);
//		properties.put("buffer.memory", this.buffermemory);
//		properties.put("enable.auto.commit", false);
//		properties.put("key.serializer", KafkaWriteKey.KAFKA_PRODUCER_KEY_SERIALIZER);
//		properties.put("value.serializer", KafkaWriteKey.KAFKA_PRODUCER_VALUE_SERIALIZER);
//		kafkaProducer = new KafkaProducer<>(properties);
	}

     public static void main(String[] args) {
		new DefaultEngine().startengin("D:/job/http/http_adsb.json");
	}

}
