package net.alan.etl;

import java.util.Properties;
import java.util.Vector;
import java.util.concurrent.TimeUnit;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.log4j.Logger;

import net.alan.etl.kafka.FlowParams;

/**
 * @author 李岩飞
 * @email eliyanfei@126.com
 * @date 2018年5月10日下午2:07:54
 */
public class FlowProductor implements Runnable {
	static Logger logger = Logger.getLogger(FlowProductor.class);

	public FlowProductor() {
	}

	KafkaProducer<String, FlowParams> producer = null;

	private void initProductor() {
		String BROKER_LIST = "localhost:9092";
		Properties props = new Properties();
		props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, BROKER_LIST);
		props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
		props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "net.alan.etl.kafka.FlowParamEncodeing");
		props.put("metadata.broker.list", BROKER_LIST);

		logger.warn("Flow 生产者已启动...");
		producer = new KafkaProducer<String, FlowParams>(props);
	}

	@Override
	protected Object clone() throws CloneNotSupportedException {
		try {
			producer.close();
		} catch (Exception e) {
		}
		return super.clone();
	}

	@Override
	public void run() {
		initProductor();
		while (true) {
			try {
				Vector<FlowParams> list = FlowContext.createContext().getParmsCache();
				if (list.size() > 0) {
					FlowParams params = list.remove(0);
					producer.send(new ProducerRecord<String, FlowParams>("flow-" + params.getTaskId(),
							"key-" + System.currentTimeMillis(), params));
				}
				TimeUnit.SECONDS.sleep(3);
			} catch (Exception e) {
				e.printStackTrace();
			}
		}
	}

	public void startup() {
		new Thread(this).start();
	}
}
