package demo.utils;

import demo.kafka.XdcsSparkKafkaProducer;
import demo.vo.PlayStatResultModel;
import demo.vo.PlayStatTypeEnum;
import demo.vo.StatDataPoint;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.Serializable;
import java.util.Calendar;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

/**
 * @author mandy.hu
 */
@SuppressWarnings("serial")
public class XdcsSparkUtils implements Serializable {

    private static final Logger logger = LoggerFactory.getLogger(XdcsSparkUtils.class);

    public static String[] merge(String[] source, String[] anotherSource) {

        return (String[]) ArrayUtils.addAll(source, anotherSource);
    }

    public static Map<String, String> removeEmpty(Map<String, String> map) {

        Iterator<Map.Entry<String, String>> it = map.entrySet().iterator();
        while (it.hasNext()) {
            Map.Entry<String, String> entry = it.next();
            if (StringUtils.isEmpty(entry.getValue())) {
                it.remove();
            }
        }
        return map;
    }

    public static void write(String traceId, String spanId, long callerSpanTimeStamp, long calleeSpanTimeStamp,
                             String metric, long timestamp, long value, Map<String, String> tags) {

        String data = String.format("%s::%d::%d::%s::%s::%s::%d::%d"
                , metric, timestamp, value, tags.toString(), traceId, spanId, callerSpanTimeStamp, calleeSpanTimeStamp);
        XdcsSparkFileUtils.append(data, XdcsSparkConfig.outputFileOfDataPoint());

    }

    public static void write(String metric, String timestamp, String value, Map<String, String> tags) {
        String data = String.format("%s::%s::%s::%s", metric, timestamp, value, tags.toString());
        XdcsSparkFileUtils.append(data, metric);
    }


    /***
     *
     * @param key
     * @param playStatResultModel
     */
    public static void dealWithLogResult(String key, PlayStatResultModel playStatResultModel) {
        if (StringUtils.isBlank(key)) {
            return;
        }
        //logger.info("aaaaa-"+key);
        String[] keysStrings = key.split("\\|");
        Map<String, String> tagMap = new HashMap<String, String>();
        if (keysStrings.length == 6) {
            tagMap.put(XdcsSparkConstant.TAG_APP, keysStrings[0]);
            tagMap.put(XdcsSparkConstant.TAG_HOST, keysStrings[1]);
            tagMap.put(XdcsSparkConstant.TAG_LEVELE, keysStrings[2]);
            tagMap.put(XdcsSparkConstant.TAG_LOG_TYPE, keysStrings[3]);
            tagMap.put(XdcsSparkConstant.TAG_API, keysStrings[4]);
            tagMap.put(XdcsSparkConstant.LOG_CLASSIFY_TYPE, keysStrings[5]);
            //logger.info("dddd-"+"-"+playStatResultModel.getPlayStatTypeEnum().getMetric()+"-"+playStatResultModel.getTimeStamp()+"-"+playStatResultModel.getCount()+"-"+playStatResultModel.getStatTime());
            addLogPoint(playStatResultModel.getPlayStatTypeEnum().getMetric(),
                    playStatResultModel.getStatTime(), playStatResultModel.getCount(), tagMap);
        }
    }


    public static void addLogPoint(String metric, long timestamp, long value, Map<String, String> tags, long... sampleTime) {
        if (value < 0) {
            return;
        }
        if (XdcsSparkConfig.isOutputToFile()) {
            //logger.info("dddd-");
            XdcsSparkUtils.write(metric, String.valueOf(timestamp), String.valueOf(value), tags);
        }
        if (XdcsSparkConfig.isDataPointToKafka()) {
            //logger.info("eeee-");
            if (sampleTime.length == 0) {
                logger.debug("write file" + metric);
                XdcsSparkKafkaProducer.getInstance().sendLog(new StatDataPoint(metric, timestamp, value, tags));
            } else if (sampleTime.length == 2) {
                long sampleClientTime = sampleTime[0];
                long sampleServerTime = sampleTime[1];
                XdcsSparkKafkaProducer.getInstance().sendLog(
                        new StatDataPoint(metric, timestamp, value, tags, sampleClientTime, sampleServerTime));
            }
        }
    }

    public static void dealWithResult(String key, PlayStatResultModel playStatResultModel) {
        if (StringUtils.isBlank(key)) {
            return;
        }
        //logger.info("aaaaa-"+key);
        String[] keysStrings = key.split("\\|");
        String ip = null;
        String trackId = null;
        String isMobile = null;
        String userId = null;
        if (keysStrings.length == 1) {
            if (playStatResultModel != null && playStatResultModel.getPlayStatTypeEnum() == PlayStatTypeEnum.IP) {
                ip = key;
            } else if (playStatResultModel != null && playStatResultModel.getPlayStatTypeEnum() == PlayStatTypeEnum.TRACK) {
                trackId = key;
            }
        } else if (keysStrings.length == 4) {
            trackId = keysStrings[0];
            ip = keysStrings[1];
            isMobile = keysStrings[2];
            userId = keysStrings[3];
        }
        Map<String, String> tagMap = new HashMap<String, String>();
        if (StringUtils.isNotBlank(trackId)) {
            tagMap.put(XdcsSparkConstant.TAG_TRACKID, trackId);
        }
        if (StringUtils.isNotBlank(ip)) {
            tagMap.put(XdcsSparkConstant.TAG_IP, ip);
        }
        if (StringUtils.isNotBlank(isMobile)) {
            tagMap.put(XdcsSparkConstant.TAG_ISMOBILE, isMobile);
        }
        if (StringUtils.isNotBlank(userId)) {
            tagMap.put(XdcsSparkConstant.TAG_USERID, userId);
        }
        //logger.info("cccc-"+"-"+ip+"-"+trackId+"-"+isMobile+"-"+userId);
        //logger.info("dddd-"+"-"+playStatResultModel.getPlayStatTypeEnum().getMetric()+"-"+playStatResultModel.getTimeStamp()+"-"+playStatResultModel.getCount()+"-"+playStatResultModel.getStatTime());
        addPoint(playStatResultModel.getPlayStatTypeEnum().getMetric(),
                playStatResultModel.getStatTime(), playStatResultModel.getCount(), tagMap);

    }

    public static void addPoint(String metric, long timestamp, long value, Map<String, String> tags, long... sampleTime) {
        if (value < 0) {
            return;
        }
        if (XdcsSparkConfig.isOutputToFile()) {
            XdcsSparkUtils.write(metric, String.valueOf(timestamp), String.valueOf(value), tags);
        }

        if (XdcsSparkConfig.isDataPointToKafka()) {
            if (sampleTime.length == 0) {
                XdcsSparkKafkaProducer.getInstance().send(new StatDataPoint(metric, timestamp, value, tags));
            } else if (sampleTime.length == 2) {
                long sampleClientTime = sampleTime[0];
                long sampleServerTime = sampleTime[1];
                XdcsSparkKafkaProducer.getInstance().send(
                        new StatDataPoint(metric, timestamp, value, tags, sampleClientTime, sampleServerTime));
            }
        }

    }
}