package com.duowan.realtime.uniq.algorithm.hyperloglog.util;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.OutputStream;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;

import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.duowan.datawarehouse.util.FileUtil;
import com.duowan.realtime.server.util.ExpandFileUtils;

public class HLLPFileHelper {

	private static final Logger LOG  = LoggerFactory.getLogger(HLLPFileHelper.class);

	private static final String backupBaseDir = "/data/backup/hyperloglog";
	
	
	public static void downToBackupForObject(List<Map<String, byte[]>> backupDataList){
		for (int i=0; i<backupDataList.size();i++) {
			//HLLPFileHelper.downToBackupForObject(fileName, obj);
			if (!backupDataList.get(i).isEmpty()) {
				File file = new File(backupBaseDir+ "/" + i);
				try {
					ExpandFileUtils.writeObjectToFile(file, backupDataList.get(i));
				} catch (IOException e) {
					LOG.error("excute ExpandFileUtils.writeObjectToFile error", e);
				}
			}
		}
		
		
	}


	
	public static Map<String, byte[]> loadFromBackupForObject(){
		List<File> backupFiles = FileUtil.listFiles(backupBaseDir);
		Map<String, byte[]> backupHLLPMap = new ConcurrentHashMap<String, byte[]>();
		for (File file : backupFiles) {
			try {
				backupHLLPMap.putAll((Map<String, byte[]>)ExpandFileUtils.readObjectFromFile(file));
				LOG.info("load backup hyperLogLogPlus : " + file.getName());
			} catch (IOException e) {
				LOG.error("reload backup hyperLogLogPlus get some error :", e);
			} catch (ClassNotFoundException e) {
				LOG.error("reload object error :", e);
			}
		}
		return backupHLLPMap;
	}
	
}
