package com.inji.spark.biz.service.kafka;

import java.util.Arrays;
import java.util.Properties;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.inji.spark.biz.dto.SparkApiLogReq;
import com.inji.spark.biz.util.JsonMapper;



/**
 * @author liukz
 *
 */
public class KafkaJavaProducer {
	public final static String TOPIC = "test";
	public final static String BROKER_LIST = "172.16.201.240:9092";
	private static Logger logger = LoggerFactory.getLogger(KafkaJavaProducer.class);
	
	public static void main(String[] args) {
		initInjiKafkaProducer();
	}

	public static void initInjiKafkaProducer() {
		Properties props = new Properties();
		props.put("metadata.broker.list", BROKER_LIST);
		props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, BROKER_LIST);
		props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
				"org.apache.kafka.common.serialization.StringSerializer");
		props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
				"org.apache.kafka.common.serialization.StringSerializer");
		logger.info("开始生产消息...");
		KafkaProducer<String, String> producer = new KafkaProducer<String, String>(props);
		while (true) {
			String[] selects  = {"order_id", "resp_content","api_code"};
			String[] toDFs  = {"orderId", "respContent","apiCode"};
			SparkApiLogReq req = new SparkApiLogReq();
			req.setApiCode("bfangCreditQuery");
			req.setFromTable("tp_api_log");
			req.setSelectList(Arrays.asList(selects));
			req.setWhereStr("api_code ='bfangCreditQuery'");
			req.setUserId("212611");
			req.setToDFList(Arrays.asList(toDFs));
			String apiReqJson = JsonMapper.buildNonDefaultMapper().toJson(req);
			producer.send(new ProducerRecord<String, String>(TOPIC, apiReqJson));
			try {
				Thread.sleep(3000);
			} catch (Exception e) {
				e.printStackTrace();
			}
		}

	}
}
