package com.duowan.realtime.tool;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

import org.apache.commons.io.IOUtils;
import org.apache.thrift.TException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.StringUtils;

import com.duowan.datawarehouse.util.FileUtil;
import com.duowan.realtime.computing.HyperLogLogClient;
import com.duowan.realtime.server.util.MapUtils;
import com.duowan.realtime.thirft.api.HyperLogLogException;
import com.duowan.realtime.thirft.api.HyperLogLogQuery;


public class LoadHistoryToHyperloglogPoolTool {
	
	private static final Logger LOG = LoggerFactory.getLogger(LoadHistoryToHyperloglogPoolTool.class);
	
	public String hyperloglogGroup;
	private static final int DEFAULT_EXECUTE_COYP_THREAD_NUM = 15;
	private ThreadPoolExecutor thirftThreadPoolExecutor ;
	private Queue<File> standbyQueue = new ConcurrentLinkedQueue<File>();	
	
	private HyperLogLogClient client;
	
	public LoadHistoryToHyperloglogPoolTool(String hyperloglogGroup){
		client = new HyperLogLogClient();
		this.hyperloglogGroup = hyperloglogGroup;
		thirftThreadPoolExecutor = new ThreadPoolExecutor(DEFAULT_EXECUTE_COYP_THREAD_NUM, DEFAULT_EXECUTE_COYP_THREAD_NUM*2, 3L, TimeUnit.SECONDS, new ArrayBlockingQueue<Runnable>(1000), new java.util.concurrent.ThreadPoolExecutor.DiscardPolicy());
		
	}
	
	public static void main(String[] args) throws IOException {
		if (args.length != 2) {
			LOG.error("must be 2 parameter: hyperloglogGroup,filePath");
			System.exit(-1);
		}
		String hyperloglogGroup = args[0];
		String filePath = args[1];
		
		LoadHistoryToHyperloglogPoolTool tool = new LoadHistoryToHyperloglogPoolTool(hyperloglogGroup);
		tool.excuteLoad(filePath);
		
	}

	private void excuteLoad(String filePath) {
		standbyQueue.addAll(FileUtil.listFiles(filePath));
		thirftThreadPoolExecutor.execute(new ThriftHyperlogTasks());
		while (true) {
			if (thirftThreadPoolExecutor.getActiveCount()<=0) {
				shutdown();
				break;
			}
			
		}
	}
	
	public void shutdown() {
		LOG.info("all job done ! shutting down the thirftThreadPoolExecutor");
		thirftThreadPoolExecutor.shutdown();
		try {
			thirftThreadPoolExecutor.awaitTermination(10l, TimeUnit.SECONDS);
		} catch (InterruptedException e) {
			LOG.error("copyThreadPoolExecutor.awaitTermination error! ",e);
		}
	}
	
	class ThriftHyperlogTasks implements Runnable{

		@Override
		public void run() {
			while (true) {
				if(standbyQueue.size()<=0){
					break ; 
				}
				long startTime = System.currentTimeMillis();
				long lineCounter = 0L;
				LOG.info("there are " + standbyQueue.size() + " files prepare to load to hyperloglog pool");
				File file = standbyQueue.poll();
				LOG.info("load file :" + file.getName());
				Map<String,List<String>> queryMap = new HashMap<String, List<String>>();
				BufferedReader bufferedReader = null;
				try {
					bufferedReader = new BufferedReader(new FileReader(file));
					String line = bufferedReader.readLine();
					while (isNotBlank(line)) {
						allotQueryByGroup(queryMap, line);
						line = bufferedReader.readLine();
						
						lineCounter++;
					}
					excuteThriftHyperlog(queryMap);
					
				} catch (Exception e) {
					LOG.error("excute history data to hyperloglog error", e);
				} finally{
					IOUtils.closeQuietly(bufferedReader);
				}
				
				long endTime = System.currentTimeMillis();
				LOG.info("file:" + file + ",records:" + lineCounter + ",cost time:" + (endTime-startTime)/1000 + ",load history data tps:" + (lineCounter/(endTime - startTime) * 1000));
			}
//			
			
		}
		
	}
	
	public void excuteThriftHyperlog(Map<String,List<String>> queryMap) throws HyperLogLogException, TException{
		//Map<String, Integer> resultMap = new HashMap<String, Integer>();
		List<HyperLogLogQuery> hllQueryList = new LinkedList<HyperLogLogQuery>();
		Set<String> keySet = queryMap.keySet();
		for (String key : keySet) {
			hllQueryList.add(initHllQuery(queryMap, key));
		}
		client.offer(hyperloglogGroup, hllQueryList);
		//resultMap.putAll(client.offerForCardinality("history", hllQueryList));
		//return resultMap;
	}
	
	private HyperLogLogQuery initHllQuery(Map<String, List<String>> queryMap, String key) {
		HyperLogLogQuery query = new HyperLogLogQuery();
		query.setGroup(key);
		query.setValues(queryMap.get(key));
		return query;
	}

	protected void allotQueryByGroup(Map<String, List<String>> queryMap,String line) {
		Map map = convertLineToMap(line);
		if (map.size() == 0 ) {
			return;
		}
		//fieldSplit[0]=group fieldSplit[1]=value
		if (queryMap.containsKey(map.get("key"))) {
			if (isNotBlank((String)map.get("value"))) {
				queryMap.get(map.get("key")).add((String)map.get("value"));
			}
		}else {
			List<String> values = new LinkedList<String>();
			String value = (String)map.get("value");
			if (isNotBlank(value)) {
				values.add(value);
			}
			queryMap.put((String)map.get("key"), values);
		}
	}

	private boolean isNotBlank(String value) {
		return org.apache.commons.lang.StringUtils.isNotBlank(value);
	}
	
	protected Map convertLineToMap(String line) {
		String[] columns = splitLine(line);
		Map map = MapUtils.convertToMap(columns, "key","value");
		return map;
	}
	
	protected String[] splitLine(String line) {
		return StringUtils.trimArrayElements(line.split(","));
	}
	
}
