package com.levy.hadoop;

import java.io.IOException;
import java.io.OutputStream;
import java.net.InetSocketAddress;
import java.net.URI;
import java.util.Collection;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.FSConstants.DatanodeReportType;
import org.apache.hadoop.mapred.ClusterStatus;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobTracker;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.StringUtils;

/**
 * Demonstrates how to read excel styles for cells
 */
public class HadoopAPIInfo {

	public static void main(String[] args) throws Exception {		
		getJobTracker();
//		getHadoopInfo();
	}

	public static void getJobTracker() throws IOException{
   	 
    	Configuration conf = new Configuration();
		InetSocketAddress jobtrackerAddr = new InetSocketAddress("10.15.4.28",8021);
		JobClient jobClient;
		try {
			jobClient = new JobClient(jobtrackerAddr, conf);
			
			ClusterStatus cluster =jobClient.getClusterStatus(true);
			JobTracker.State state = cluster.getJobTrackerState();
			Collection<String> tracker =  cluster.getActiveTrackerNames();
			int i = cluster.getTaskTrackers();
			System.out.println("TaskTracker number : "+i);
			System.out.println("JobTracker state : "+state);
		
			Collection<String> trackers = cluster.getActiveTrackerNames();
			for (String name : trackers){
				System.out.println("TaskTracker name : "+name);
			}
			
			
			jobClient.close();
		} catch (IOException e) {
			e.printStackTrace();
		}
		
		
		//query NameNode
		InetSocketAddress namenodeAddr = new InetSocketAddress("10.15.4.28",8020);
        DFSClient client = new DFSClient(namenodeAddr, conf);
		System.out.println("\nDataNode info: ");
        DatanodeInfo[] datanodeReport = client.datanodeReport(
                DatanodeReportType.LIVE);
        for (DatanodeInfo di : datanodeReport) {
            System.out.println("Host: " + di.getHostName() +"/"+ di.getHost());
            System.out.println(di.getDatanodeReport());
        }        
    }
	
	public static void getHadoopInfo(){
		StringBuffer message = new StringBuffer();
        Configuration conf = new Configuration();
        try {
            DistributedFileSystem dfs = new DistributedFileSystem();
            dfs.initialize(URI.create("hdfs://10.15.4.28:8020"), conf);
            DistributedFileSystem.DiskStatus ds = dfs.getDiskStatus();
            long used = ds.getDfsUsed();
            long remaining = ds.getRemaining();
            long presentCapacity = used + remaining;
            System.out.println("DFS Used%: "
                    + StringUtils.limitDecimalTo2(((1.0 * used) / presentCapacity) * 100)
                    + "%");
			DatanodeInfo[] live = dfs.getClient().datanodeReport(DatanodeReportType.LIVE);
			DatanodeInfo[] dead = dfs.getClient().datanodeReport(DatanodeReportType.DEAD);

            for (DatanodeInfo dn : live) {
                System.out.println("the usage of "+dn.getHostName() +"/"+ dn.getHost() +" in hdfs is "
                        + StringUtils.limitDecimalTo2(dn.getRemainingPercent()));
            }
            for (DatanodeInfo dn : dead) {
                System.out.println("the "+dn.getHost() +" in hdfs is dead");
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
	}
}