package com.qyer.search.storm.bolt;

import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.IRichBolt;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.tuple.MessageId;
import backtype.storm.tuple.Tuple;
import com.qyer.search.storm.ESClient.ESClientHolder;
import com.qyer.search.storm.Model.ProductEntity;
import com.qyer.search.storm.exception.DataNotMatchException;
import com.qyer.search.storm.logging.ErrorLogger;
import com.qyer.search.storm.logging.InfoLogger;
import com.qyer.search.storm.logging.TimeCostLogger;
import com.qyer.search.storm.logging.WarnLogger;
import com.qyer.search.storm.util.ConfigUtils;
import com.qyer.search.storm.util.EntityTags;
import com.qyer.search.storm.util.ZTag2;
import org.elasticsearch.action.bulk.BulkItemResponse;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.client.Client;
import org.joda.time.DateTime;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.List;

/**
 * Created by tangzheng on 16/7/8.
 */
public class ZIndexBolt implements IRichBolt {

  private static final long serialVersionUID = -1L;
  private Map<String, Object> _config;
  private TopologyContext _topologyContext;
  private OutputCollector _outputCollector;
  private int _taskId;
  private String _componentId;
  private String _boltPrefix;
  private DateTime _boltDateTime;
  private String _boltTimeStr;
  private long _boleMilliSecond;
  private String _prefix;
  private String _tagStream;
  private String _infoStream;
  private String _ztagStream;

  private int _needIndex;
  private int _successIndex;
  private int _failIndex;

  private Map<Integer, HashMap<Integer, EntityTags>> _tagsMap;
  private Map<Integer, HashMap<Integer, ProductEntity>> _infoMap;
  private Map<Integer, HashMap<Integer, List<ZTag2>>> _ztagMap;
  private Map<Integer, Tuple> _tagsTupleMap;
  private Map<Integer, Tuple> _infoTupleMap;
  private Map<Integer, Tuple> _ztagTupleMap;

  private Client _client;
  private BulkRequestBuilder _bulkRequest;

  private String _index;
  private String _type;
  private String _version;
  private boolean _refresh;

  private InfoLogger dataLog;
  private InfoLogger infoLogger;
  private InfoLogger processLogger;
  private ErrorLogger errorLogger;
  private WarnLogger failLogger;
  private WarnLogger successLogger;
  private TimeCostLogger timeCostLogger;

  private ESClientHolder _ESClientHolder;

  public ZIndexBolt(String boltPrefix, String tagStream, String infoStream){

    _needIndex = 0;
    _successIndex = 0;
    _failIndex = 0;

    _boltPrefix = boltPrefix;
    _boleMilliSecond = 0;
    _taskId = -1;
    _componentId = "undefined".toUpperCase();
    _prefix = "TaskId:-1,ComponentId:UNDEFINED";
    _tagStream = tagStream;
    _infoStream = infoStream;
  }

  public ZIndexBolt(String boltPrefix, String tagStream, String infoStream, String ztagStream, String version, boolean refresh){

    _needIndex = 0;
    _successIndex = 0;
    _failIndex = 0;

    _boltPrefix = boltPrefix;
    _boleMilliSecond = 0;
    _taskId = -1;
    _componentId = "undefined".toUpperCase();
    _prefix = "TaskId:-1,ComponentId:UNDEFINED";
    _tagStream = tagStream;
    _infoStream = infoStream;
    _ztagStream = ztagStream;
    _version = version;
    _refresh = refresh;
  }


  @Override public void prepare(Map map, TopologyContext topologyContext,
                                OutputCollector outputCollector) {

    String directory = "/Data/search/ZIndexBolt/";
    String filename = directory + topologyContext.getStormId()+"/"
      + topologyContext.getThisComponentId() +"/"+topologyContext.getThisTaskId();
    String loggername = IndexBolt.class.getSimpleName() + "@" + topologyContext.getThisTaskId();

    InfoLogger tmpLog = new InfoLogger(filename + "_tmp", loggername + "_tmp");
    dataLog = new InfoLogger(filename + "_data", loggername + "_data");
    infoLogger = new InfoLogger(filename + "_info", loggername + "_info");
    errorLogger = new ErrorLogger(filename +"_error", loggername + "_error");
    timeCostLogger = new TimeCostLogger(filename + "_timecost", loggername + "_timecost");
    successLogger = new WarnLogger(filename + "_success", loggername + "_success");
    failLogger = new WarnLogger(filename + "_fail", loggername + "_fail");
    processLogger = new InfoLogger(filename + "_process", loggername + "_process");

    _config = (Map<String, Object>)map;
    _topologyContext = topologyContext;
    _outputCollector = outputCollector;
    _taskId = topologyContext.getThisTaskId();
    _componentId = topologyContext.getThisComponentId();
    _prefix = _boltPrefix + ",TaskId:"+ _taskId + ",ComponentId:" + _componentId;

    _tagsMap = new HashMap<>();
    _infoMap = new HashMap<>();
    _ztagMap = new HashMap<>();
    _tagsTupleMap = new HashMap<>();
    _infoTupleMap = new HashMap<>();
    _ztagTupleMap = new HashMap<>();

    try {
      String storminfo = topologyContext.getStormId()+"_"
        + topologyContext.getThisComponentId() +"_"+topologyContext.getThisTaskId();
      _ESClientHolder = ESClientHolder.getInstance(_config, _boltPrefix, storminfo);
      _client = _ESClientHolder.getClient();

    }catch (Exception e){
    }
    currentTime(_prefix + "IndexBolt prepare");

  }

  @Override public void execute(Tuple tuple) {
    MessageId messageId = tuple.getMessageId();
    infoLogger.info("MESSAGEID:{}", messageId.toString());

    Object payload;
    HashMap<Integer, ProductEntity> infoMap;
    HashMap<Integer, EntityTags> tagsMap;
    HashMap<Integer, List<ZTag2>> ztagMap;
    String streamId = tuple.getSourceStreamId();
    if(_tagStream.equals(streamId)){
      _needIndex++;
      _index = tuple.getString(0);
      _type = tuple.getString(1);
      tagsMap = (HashMap<Integer, EntityTags>)tuple.getValue(2);
      int msgId = tuple.getInteger(3);
      if(_infoMap.containsKey(msgId) && _ztagMap.containsKey(msgId)){
        index(msgId, _infoMap.get(msgId), tagsMap, _ztagMap.get(msgId), tuple, 1);
      }else {
        _tagsMap.put(msgId, tagsMap);
        _tagsTupleMap.put(msgId, tuple);
      }
    }else if(_infoStream.equals(streamId)){
      _needIndex++;
      _index = tuple.getString(0);
      _type = tuple.getString(1);
      infoMap = (HashMap<Integer, ProductEntity>)tuple.getValue(2);
      int msgId = tuple.getInteger(3);
      /*******************************************************************/
      dataLog.info("info stream:{},msgId:{},index:{},type:{}", ConfigUtils.IN, msgId, _index, _type);
      /*******************************************************************/

      if(_tagsMap.containsKey(msgId) && _ztagMap.containsKey(msgId)){
        index(msgId, infoMap, _tagsMap.get(msgId), _ztagMap.get(msgId), tuple, 2);
      }else {
        _infoMap.put(msgId, infoMap);
        _infoTupleMap.put(msgId, tuple);
      }
    }else if(_ztagStream.equals(streamId)){
      _needIndex++;
      _index = tuple.getString(0);
      _type = tuple.getString(1);
      ztagMap = (HashMap<Integer, List<ZTag2>>)tuple.getValue(2);
      int msgId = tuple.getInteger(3);
      if(_infoMap.containsKey(msgId) && _tagsMap.containsKey(msgId)){
        index(msgId, _infoMap.get(msgId), _tagsMap.get(msgId), ztagMap, tuple, 3);
      }else {
        _ztagMap.put(msgId, ztagMap);
        _ztagTupleMap.put(msgId, tuple);
      }
    }
  }

  @Override public void cleanup() {
    if(_ESClientHolder != null)
      _ESClientHolder.shutdown();
  }

  @Override public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {

  }

  @Override public Map<String, Object> getComponentConfiguration() {
    return null;
  }

  public void currentTime(String tag){
    long _start = _boleMilliSecond;
    _boltDateTime = DateTime.now();
    _boleMilliSecond = _boltDateTime.getMillis();
    _boltTimeStr = _boltDateTime.toString(ConfigUtils.DATE_FORMAT);

  }

  public void index(int msgId, Map<Integer, ProductEntity> eim, Map<Integer, EntityTags> etm, Map<Integer, List<ZTag2>> ztags, Tuple tuple, int from){
    try {
      int _bulkSize = 0;
      _bulkRequest = _client.prepareBulk();

      Iterator<Map.Entry<Integer, ProductEntity>> entries = eim.entrySet().iterator();
      while (entries.hasNext()) {
        Map.Entry<Integer, ProductEntity> entry = entries.next();
        int lid = entry.getKey();
        ProductEntity ei = entry.getValue();

        if(etm.get(lid) == null)
          ei.setTags(null);
        else
          ei.setTags(etm.get(lid).getTags());
        if(ztags.get(lid) == null){
          ei.setZ_tags(null);
        }else if(ztags.get(lid).size() == 0){
          ei.setZ_tags(null);
        }else {
          ei.setZ_tags(ztags.get(lid));
        }
        ei.setVersion(_version);
        //增加自定义序列化
        String source = ConfigUtils.DEFAULT_GSON.toJson(ei);

        _bulkRequest.add(_client.prepareIndex(_index, _type, String.valueOf(lid)).setSource(source));
        processLogger.info("msgId:{},product id:{}-->prepare to index.", msgId, lid);
        dataLog.info("{},msgId:{},product id:{}-->prepare to index.", ConfigUtils.OUT, msgId, lid);
      }

      if((_bulkSize = _bulkRequest.numberOfActions()) == 0){
        _successIndex++;
        processLogger.info("msgId:{} index successfully,total index count:{},size = 0.", msgId, _successIndex);
        _outputCollector.ack(tuple);
        if(from == 2) {
          _outputCollector.ack(_tagsTupleMap.get(msgId));
          _outputCollector.ack(_ztagTupleMap.get(msgId));
        }
        else if(from == 1) {
          _outputCollector.ack(_infoTupleMap.get(msgId));
          _outputCollector.ack(_ztagTupleMap.get(msgId));
        }else {
          _outputCollector.ack(_infoTupleMap.get(msgId));
          _outputCollector.ack(_tagsTupleMap.get(msgId));
        }
        return;
      }

      BulkResponse bulkResponse = _bulkRequest.setRefresh(_refresh).get();

      if(bulkResponse.hasFailures()) {
        _failIndex++;
        failLogger.info("failIndex total:{}", _failIndex);
        Iterator<BulkItemResponse> iter = bulkResponse.iterator();
        while (iter.hasNext()) {
          BulkItemResponse itemResponse = iter.next();
          if (itemResponse.isFailed()) {
            failLogger.info("msgId:{},fail info:{}", msgId, itemResponse.getFailureMessage());
          }
        }
        Thread.sleep(5000);
        throw new DataNotMatchException("Index failed.");
      }else {
        _successIndex++;
        processLogger.info("msgId:{} index successfully,total index count:{} size:{} and bulk response:{}."
          , msgId, _successIndex, _bulkSize, bulkResponse);
        _outputCollector.ack(tuple);
        if(from == 2) {
          _outputCollector.ack(_tagsTupleMap.get(msgId));
          _outputCollector.ack(_ztagTupleMap.get(msgId));
        }
        else if(from == 1) {
          _outputCollector.ack(_infoTupleMap.get(msgId));
          _outputCollector.ack(_ztagTupleMap.get(msgId));
        }else {
          _outputCollector.ack(_infoTupleMap.get(msgId));
          _outputCollector.ack(_tagsTupleMap.get(msgId));
        }
      }
    }catch (Exception e){
      _failIndex++;
      e.printStackTrace();
      _outputCollector.fail(tuple);
      if(from == 1) {
        _outputCollector.ack(_infoTupleMap.get(msgId));
        _outputCollector.ack(_ztagTupleMap.get(msgId));
      }else if(from == 2){
        _outputCollector.ack(_tagsTupleMap.get(msgId));
        _outputCollector.ack(_ztagTupleMap.get(msgId));
      }else{
        _outputCollector.ack(_infoTupleMap.get(msgId));
        _outputCollector.ack(_tagsTupleMap.get(msgId));
      }
    }finally {
      _infoMap.remove(msgId);
      _infoTupleMap.remove(msgId);
      _tagsMap.remove(msgId);
      _tagsTupleMap.remove(msgId);
      _ztagMap.remove(msgId);
      _ztagTupleMap.remove(msgId);
    }
  }
}
