package com.sf.shiva.oms.jstorm.bolt;

import java.util.Map;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.sf.shiva.oms.jstorm.common.utils.SpringContext;
import com.sf.shiva.oms.jstorm.common.enumtype.FieldKeyEnum;
import com.sf.shiva.oms.jstorm.entity.demo.PackageStatusEntity;
import com.sf.shiva.oms.jstorm.service.datasend.SendKakfaService;
import com.sf.shiva.oms.jstorm.service.datasend.impl.SendKafkaServiceImpl;

import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.IRichBolt;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.tuple.Tuple;

/**
 * 
 * 描述：发送kafka bolt
 *
 * <pre>HISTORY
 * ****************************************************************************
 *  ID   DATE           PERSON          REASON
 *  1    2018年4月3日      01369626         Create
 * ****************************************************************************
 * </pre>
 * @author 01369626
 * @since 1.0
 */
public class SendKafkaBolt implements IRichBolt{
    
    /**  */
    private static final long serialVersionUID = 1L;
    
    private static final Logger logger = LoggerFactory.getLogger(SendKafkaBolt.class);
    
    private transient OutputCollector collector;
    private transient SendKakfaService sendKafkaServiceImpl;

    @Override
    public void prepare(@SuppressWarnings("rawtypes") Map stormConf, TopologyContext context, OutputCollector collector) {
        this.collector = collector;
        sendKafkaServiceImpl = SpringContext.getInstance().getBean(SendKafkaServiceImpl.class);
    }

    @Override
    public void execute(Tuple input) {
        try {
            PackageStatusEntity entity = (PackageStatusEntity)input.getValueByField(FieldKeyEnum.CUSTOM_FIELD_2.getKey());
            if(entity != null) {
                sendKafkaServiceImpl.send(entity);
            }
            collector.ack(input);//发送成功，响应ack
        }catch (Exception e) {
            logger.error("SendKafkaBolt execute error.", e);
            collector.fail(input);
        }
    }

    @Override
    public void cleanup() {
      //在拓扑停止时执行，可以用来实现数据库连接等资源释放
    }

    @Override
    public void declareOutputFields(OutputFieldsDeclarer declarer) {
        //
        
    }

    @Override
    public Map<String, Object> getComponentConfiguration() {
        return null;
    }

}
