package com.run.wz.test.hfile;

import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.io.hfile.FixedFileTrailer;
import org.apache.hadoop.hbase.io.hfile.HFile;

public class EntryCount {
	private String hdfs;
	private Configuration conf;
	private FixedFileTrailer trailer;
	FileSystem fileSystem = null;

	public EntryCount() throws IOException {
	}

	static ExecutorService tp = Executors.newFixedThreadPool(1);

	private void init() throws Exception {
		this.conf = new Configuration();
		Properties p = new Properties();
		p.load(EntryCount.class.getClassLoader().getResourceAsStream(
				"table_desc.properties"));
		this.conf.set("hadoop.tmp.dir", "/data1");
		hdfs = p.getProperty("hdfs");
		this.conf.set("fs.default.name", hdfs);
		conf.set("hbase.zookeeper.quorum",
				p.getProperty("hbase.zookeeper.quorum"));
		conf.set("hbase.zookeeper.property.clientPort", "2181");
		System.out.println("hdfs:" + hdfs);

		// 获取需要解析的表数量
		String tNumS = p.getProperty("table.num");
		int tNum = Integer.parseInt(tNumS);
		System.out.println("total " + tNum + " table need parse");
		String tableURL = hdfs + "/hbase/";
		for (int i = 1; i <= tNum; i++) {
			String tableName = p.getProperty("table" + i + ".name");
			System.out.println(i + " table' name => " + tableName);
			String tableCF = p.getProperty("table" + i + ".cf");
			System.out.println(i + " table' cf =>" + tableCF);
			parseTable(tableURL + tableName, tableCF);
		}

	}

	private void parseTable(String tableURL, String cfList) throws Exception {
		fileSystem = FileSystem.get(URI.create(tableURL), this.conf);
		Path path = new Path(tableURL);
		System.out.println("============BEGIN parse table " + tableURL);
		String name = path.getName();
		HTable htable = new HTable(conf,name);
		NavigableMap<HRegionInfo, ServerName> regionLocations = htable
				.getRegionLocations();
		Set<HRegionInfo> regionInfos = regionLocations.keySet();
		long tablecount = 0;
		FileStatus[] regions = fileSystem.listStatus(path);
		for (int i = 0; i < regions.length; i++) {
			String regionName = regions[i].getPath().getName();
			HRegionInfo hregionInfo = null;
			for (Iterator<HRegionInfo> iterator = regionInfos.iterator(); iterator.hasNext();) {
				hregionInfo = (HRegionInfo) iterator.next();
				if(regionName.equals(hregionInfo.getEncodedName())) {
					break;
				}
			}
			if(null!=hregionInfo)
				tablecount += new RegionCount(tableURL, hregionInfo, cfList).call();
		}
		try {
			
		} finally {
			htable.close();
		}
		/*
		 * for (Iterator iterator = fs.iterator(); iterator.hasNext();) {
		 * Future<Long> future = (Future<Long>) iterator.next();
		 * tablecount+=future.get(); }
		 */
		System.out.println("============END parse table " + tableURL
				+ " ENTRYCOUNT =>" + tablecount);
	}

	public void readFile(FileSystem fs, Path path) throws IOException {
		org.apache.hadoop.hbase.io.hfile.HFile.Reader createReader = HFile
				.createReader(fs, path, null);
		Map<byte[], byte[]> loadFileInfo = createReader.loadFileInfo();
		createReader.getDataBlockIndexReader().getRootBlockCount();
	}

	public class RegionCount implements Callable<Long> {

		String tableURL;
		String encodedName;
		String cfList;
		String regionName;

		public RegionCount(String tableURL, HRegionInfo hregionInfo, String cfList) {
			// System.out.println("REGION => "+regionName);
			this.encodedName = hregionInfo.getEncodedName();
			this.tableURL = tableURL;
			this.cfList = cfList;
			this.regionName = hregionInfo.getRegionNameAsString();
		}

		@Override
		public Long call() throws Exception {
			long regioncount = 0;
			;
			String[] cfArr = cfList.split(",");
			// System.out.println("CF NUM => "+cfArr.length);
			for (int j = 0; j < cfArr.length; j++) {
				String regionURL = tableURL + "/" + encodedName;
				String cfURL = regionURL + "/" + cfArr[j];
				FileStatus[] hfiles = fileSystem.listStatus(new Path(cfURL));
				if (hfiles == null) {
					// System.out.println(cfURL+"'s hfiles is null");continue;
					continue;
				}
				FSDataInputStream open = null;
				for (int k = 0; k < hfiles.length; k++) {
					try {
						open = fileSystem.open(hfiles[k].getPath());
						long len = fileSystem
								.getFileStatus(hfiles[k].getPath()).getLen();
						FixedFileTrailer readFromStream = FixedFileTrailer
								.readFromStream(open, len);
						// System.out.println(readFromStream.getEntryCount());
						regioncount += readFromStream.getEntryCount();
					} finally {
						open.close();
					}
				}
			}
			System.out.println("REGION " + regionName + " ENTRYCOUNT =>"
					+ regioncount);
			return regioncount;
		}

	}

	public static void main(String[] args) throws IOException, Exception,
			Exception {
		EntryCount entryCount = new EntryCount();
		entryCount.init();
	}
}