package com.duowan.realtime.scheduled.batch.reader.hyperLog;

import com.duowan.common.util.Profiler;
import com.duowan.realtime.computing.HyperLogLogClient;
import com.duowan.realtime.model.HyperloglogKey;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Queues;
import org.apache.commons.collections.MapUtils;
import org.apache.thrift.TException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.listener.ItemListenerSupport;
import org.springframework.batch.item.ItemStreamException;
import org.springframework.beans.factory.InitializingBean;

import java.util.*;

/**
 * 提供基本的hyerlog读取方法，供继承
 *
 * @author tangsicheng
 * @version 1.0
 * @since 1.0
 */
public abstract class HyperLogReader extends ItemListenerSupport implements InitializingBean {

    private static final Logger logger = LoggerFactory.getLogger(HyperLogReader.class);
    private HyperLogLogClient hyperLogClient = new HyperLogLogClient();
    private static final int DEFAULT_TASK_NUMBERS = 5;
    private int taskNumber = DEFAULT_TASK_NUMBERS;


    private static Set<String> productSet = ImmutableSet.of("client", "clientgamelobby",
            "kk_client", "llq_yygame", "webyygame", "yyexplorer", "yygame", "yygame_300");


    public void setTaskNumber(int number) {
        this.taskNumber = number;
    }


    public final List<Map> readFromHyperLog(String hyperLogGroup, String kpiCode) {
        List<Map> resultMapList = Lists.newCopyOnWriteArrayList();
        List<Map<String, String>> hyperLogResult = null;

        Profiler.start();
        Profiler.enter("批量获取hyperLog维度信息");
        try {
            hyperLogResult = hyperLogClient.keys(hyperLogGroup, kpiCode);
        } catch (TException e) {
            logger.error(String.format("调用hyper log出错, group :%s, kpiCode:%s", hyperLogGroup, kpiCode), e);
            throw new ItemStreamException(String.format("调用hyper log出错, group :%s, kpiCode:%s", hyperLogGroup, kpiCode), e);
        }
        Profiler.release();

        Queue<Map<String, String>> jobQueue = Queues.newConcurrentLinkedQueue(hyperLogResult);
        Thread[] tasks = new Thread[taskNumber];

        Profiler.enter("查询hyperlog数据");
        for (int i = 0; i < taskNumber; i++) {
            tasks[i] = new HyperLogValueReadTask(jobQueue, resultMapList, hyperLogGroup, kpiCode);
            tasks[i].start();
        }
        for (int i = 0; i < taskNumber; i++) {
            try {
                tasks[i].join();
            } catch (InterruptedException e) {
                logger.error(e.getMessage(), e);
            }
        }
        Profiler.release();
        logger.info("查询结果的维度" + resultMapList.size() + "  " + Profiler.dump());
        return resultMapList;
    }

    private class HyperLogValueReadTask extends Thread {

        private Queue<Map<String, String>> jobQueue;
        private List<Map> resultList;
        private String hyperLogGroup;
        private String kpiCode;


        public HyperLogValueReadTask(Queue<Map<String, String>> jobQueue,
                                     List<Map> resutlList, String hyperLogGroup, String kpiCode) {
            this.jobQueue = jobQueue;
            this.resultList = resutlList;
            this.hyperLogGroup = hyperLogGroup;
            this.kpiCode = kpiCode;
        }

        @Override
        public void run() {
            while (!jobQueue.isEmpty()) {
                Map<String, String> kvMap = jobQueue.poll();
                if (MapUtils.isEmpty(kvMap)) {
                    continue;
                }

                String queryKeyWord = HyperloglogKey.serHyperloglogKey(kpiCode, kvMap);
                String product = kvMap.get("product");
                if (!productSet.contains(product)) {
                    logger.info("过滤key:" + queryKeyWord);
                    continue;
                }
                Long kpiValue;
                try {
                    kpiValue = hyperLogClient.cardinality(hyperLogGroup, queryKeyWord);
                } catch (TException e) {
                    logger.error(String.format("调用hyper log出错, group :%s, queryKeyWord:%s", hyperLogGroup, queryKeyWord), e);
                    continue;
                }
                Map<String, Object> tempMap = Maps.newHashMap();
                logger.info("Hyperlog key:" + queryKeyWord + ", value:" + kpiValue);
                tempMap.put("dimension", new TreeMap(kvMap));
                tempMap.put("kpiValue", String.valueOf(kpiValue));
                tempMap.put("kpiCode", kpiCode);
                resultList.add(tempMap);
            }

        }
    }


}
