package com.qyer.search.storm.bolt;

import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.IRichBolt;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Tuple;
import com.qyer.search.storm.logging.ErrorLogger;
import com.qyer.search.storm.logging.InfoLogger;
import com.qyer.search.storm.logging.TimeCostLogger;
import com.qyer.search.storm.logging.WarnLogger;
import backtype.storm.metric.api.CountMetric;

import com.qyer.search.storm.util.ConfigUtils;
import org.apache.commons.lang.StringUtils;
import com.google.gson.JsonParseException;
import com.google.gson.JsonParser;

import java.util.Map;

/**
 * Created by tangzheng on 16/2/22.
 */
public class KafkaFormatBolt implements IRichBolt {

  private OutputCollector _outputCollector;
  private TopologyContext _topologyContext;
  private int _taskId;
  private String _componentId;

  private InfoLogger dataLog;
  private InfoLogger infoLogger;
  private InfoLogger processLogger;
  private ErrorLogger errorLogger;
  private WarnLogger failLogger;
  private WarnLogger successLogger;

  private int _msgCount;

  transient CountMetric _countMetric;




  @Override public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
    _topologyContext = context;
    _outputCollector = collector;
    _taskId = context.getThisTaskId();
    _componentId = context.getThisComponentId();
    _msgCount = _taskId * 10000;
    initMetrics(context);

    //日志
    String directory = "/Data/search/KafkaFormatBolt/";
    String filename = directory + _topologyContext.getStormId()+"/"
      + _topologyContext.getThisComponentId() +"/"+_topologyContext.getThisTaskId();
    String loggername = JDBCBolt.class.getSimpleName() + "@" + _topologyContext.getThisTaskId();
    InfoLogger tmpLog = new InfoLogger(filename + "_tmp", loggername + "_tmp");
    dataLog = new InfoLogger(filename + "_data", loggername + "_data");
    infoLogger = new InfoLogger(filename + "_info", loggername + "_info");
    errorLogger = new ErrorLogger(filename +"_error", loggername + "_error");
    successLogger = new WarnLogger(filename + "_success", loggername + "_success");
    failLogger = new WarnLogger(filename + "_fail", loggername + "_fail");
    processLogger = new InfoLogger(filename + "_process", filename + "_process");
  }

  @Override public void execute(Tuple input) {

    long now = System.currentTimeMillis();
    String payload = input.getString(0).trim();
    infoLogger.info("get data:{}", payload);
    updateMetrics(payload);
    if(!isGoodJson(payload)){
      errorLogger.info("bad data with invalid format:{}", payload);
      return;
    }
    dataLog.info("IN:{}", payload);
    try {
      _outputCollector.emit(ConfigUtils.SPOUT_STREAM, input, ConfigUtils.getSpoutValuse2(payload, _msgCount));
      _outputCollector.ack(input);
      infoLogger.info("emit data:{},msgCount:{}", payload, _msgCount);
      _msgCount++;
    }catch (Exception e){
      errorLogger
        .info("msgId:{};kafkamessage:{} emit failed", _msgCount, payload);
      _outputCollector.fail(input);
    }
  }

  @Override public void cleanup() {

  }

  @Override public void declareOutputFields(OutputFieldsDeclarer declarer) {
    declarer.declareStream(
      ConfigUtils.SPOUT_STREAM,new Fields("s_index","s_type","s_eid","s_id","s_stage"));
  }

  @Override public Map<String, Object> getComponentConfiguration() {
    return null;
  }

  void initMetrics(TopologyContext context)
  {
    _countMetric = new CountMetric();

    context.registerMetric("execute_count", _countMetric, 60);

  }

  public static boolean isGoodJson(String json) {
    if (StringUtils.isBlank(json)) {
      return false;
    }
    try {
      new JsonParser().parse(json);
      return true;
    } catch (JsonParseException e) {
      return false;
    }
  }

  void updateMetrics(String word)
  {
    _countMetric.incr();
  }
}
