package com.dmp.storm.bolt.kafka;

import org.apache.commons.lang3.StringUtils;

import storm.kafka.bolt.selector.KafkaTopicSelector;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Properties;

import static com.dmp.util.Constant.DATA_SEPARATOR_DEFAULT;
import static com.dmp.util.Constant.FIELD_SEPARATOR_DEFAULT;


/**
 * @ClassName customKafkaBolt
 * @Description
 *
 * @author xiexc
 * @date 2017年3月15日 下午7:49:23
 * @version 1.0
 *
 */
public class BaseLogsToKafkaBolt extends AbstractBatchTickBolt {

	//private Producer<String, String> producer;
	private String topic;
    private String propPath;
    public String getTopic() {
		return topic;
	}

	public void setTopic(String topic) {
		this.topic = topic;
	}

	public String getPropPath() {
		return propPath;
	}

	public void setPropPath(String propPath) {
		this.propPath = propPath;
	}

	public KafkaTopicSelector getTopicSelector() {
		return topicSelector;
	}

	public void setTopicSelector(KafkaTopicSelector topicSelector) {
		this.topicSelector = topicSelector;
	}

	public String getFields() {
		return fields;
	}

	public void setFields(String fields) {
		this.fields = fields;
	}

	public String getSeparator() {
		return separator;
	}

	public void setSeparator(String separator) {
		this.separator = separator;
	}

	public String getFieldSeparator() {
		return fieldSeparator;
	}

	public void setFieldSeparator(String fieldSeparator) {
		this.fieldSeparator = fieldSeparator;
	}

	private KafkaTopicSelector topicSelector;
    private static final long serialVersionUID = 1L;
    private static KafkaProducer producer = null;
    public static String[] fieldNames;
    private String fields;
    private String separator = DATA_SEPARATOR_DEFAULT;
    private String fieldSeparator = FIELD_SEPARATOR_DEFAULT;
	@Override
	public void prepare() {
		super.prepare();
		 Properties props = new Properties();
		try {
			props.load(this.getClass().getClassLoader()
	                    .getResourceAsStream(propPath));
			if(null == producer){
				try {
					producer = new KafkaProducer(props);
				} catch (Exception e) {
					// TODO Auto-generated catch block
					e.printStackTrace();
				}
			}
			//separator=separator.replaceAll("//", "/");
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
		if(StringUtils.isNotBlank(fields)){
			fieldNames = StringUtils.split(fields, fieldSeparator);
		}
		if(this.separator.equals("\\001")){
			this.separator="\001";
		}else if(this.separator.equals("\\t")){
			this.separator="	";
		}
     
	

	}
    
	@Override
	public Map<String, Object> process(List<Map<String, Object>> tupleList) {
		// TODO Auto-generated method stub
		if(tupleList!=null&&tupleList.size()>0){
			List<Object> sendList=new ArrayList();
			for(Map<String, Object> mapTemp:tupleList){
				StringBuffer data = new StringBuffer();
				for(int i=0; i<fieldNames.length; i++){
					if(i==0){
						data.append(mapTemp.get(fieldNames[i]).toString());
					}else{
						data.append(this.separator);
						if(mapTemp.get(fieldNames[i])==null||mapTemp.get(fieldNames[i]).equals("")){
						   data.append("0");
						}else{
						   data.append(mapTemp.get(fieldNames[i]).toString());
						}
					}
					
				}
				sendList.add(data.toString());
			}
			 System.out.println("BaseLogsToKafkaBolt1");
			if(sendList!=null&&sendList.size()>0){
				System.out.println("BaseLogsToKafkaBolt2");
				producer.batchSend(topic, sendList);
			}
		}
		return null;
	}


}
