import java.lang.InterruptedException;

import java.util.Map;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Date;
import java.text.SimpleDateFormat;

import java.net.InetAddress;
import java.net.UnknownHostException;

import java.io.IOException;

import org.apache.commons.cli.Options;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.CommandLineParser; //cmd line options handling
import org.apache.commons.cli.PosixParser;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.HelpFormatter;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;

import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.JobTracker;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobStatus;
import org.apache.hadoop.mapred.JobID;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.mapred.JobInProgress;
import org.apache.hadoop.mapred.JobHistory;
import org.apache.hadoop.mapreduce.JobContext;

public class HadoopJobView {

    final static Map<Integer, String> JobStateName =
                        new HashMap<Integer, String> () 
    {{
             put(JobStatus.FAILED, "Failed");
             put(JobStatus.SUCCEEDED, "Completed");
             put(JobStatus.KILLED, "Killed");
             put(JobStatus.PREP, "Preparing");
             put(JobStatus.RUNNING, "Running");
    }};

    String[] args_;
    String optJtHost_;
    long optInterval_ = 60000;

    Configuration conf_ = new Configuration();

    public HadoopJobView(String[] args) 
    {
        this.args_ = args;

        try {
            optJtHost_ = InetAddress.getLocalHost().getHostName();
        }
        catch (UnknownHostException exp) {
            //ignore
        }
    }

    boolean getCommandLineOptions()
    {
        Options opts = new Options();

        Option jtHostOpt = OptionBuilder.withLongOpt("host")
                              .isRequired(true)
                              .hasArg()
                              .withArgName("HOST")
                              .withDescription("host of hadoop job tracker")
                              .withValueSeparator()
                              .create();
        Option intervalOpt = OptionBuilder.withLongOpt("interval")
                              .hasArg()
                              .withArgName("INTERVAL")
                              .withDescription("interval between two runs")
                              .withValueSeparator()
                              .create();


        opts.addOption(jtHostOpt);
        opts.addOption(intervalOpt);

        CommandLineParser parser = new PosixParser();
        CommandLine line;

        try {
            line = parser.parse(opts, args_);

            optJtHost_ = line.getOptionValue("host");
            if (line.hasOption("interval")) 
                optInterval_ = 1000 * Integer.parseInt(
                                   line.getOptionValue("interval"));
        }
        catch (ParseException exp) {
            System.out.println("ParseException: " + exp.getMessage());
            return false;
        }

        return true;
    }


    String formatTime(Date date) {
        SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        return df.format(date);
    }


    void printJob(RunningJob job, long startTime) throws IOException
    {
        String jobName = job.getJobName();
        Date now = new Date();

        System.out.println("Job id=" + job.getID().toString()
            + ", name=" + jobName + "(" + jobName.length() + ")"
            + ", state=" + JobStateName.get(job.getJobState())
            + ", " + formatTime(now) 
            + " (" + ((now.getTime() - startTime) / 1000.0)  + ")"
        );
    }


    public void run()
    {
        getCommandLineOptions();

        System.out.println("Host= " + optJtHost_);

        conf_.set("mapred.job.tracker", optJtHost_ + ":8021");
        conf_.addResource(new Path("$SP_HADOOP_CONF/core-site.xml"));
        conf_.addResource(new Path("$SP_HADOOP_CONF/hdfs-site.xml"));
 
        HashSet<String> knownJobs = new HashSet<String>();

        while (true) {
        try {

            JobConf jobConf = new JobConf(conf_);
//                JobTracker jt = JobTracker.startTracker(jobConf);

//                for(JobInProgress job : jt.runningJobs()) {
//                    System.out.println(job.getJobID().toString()
//                         + " " + job.getStartTime()
//                         + " " + job.getLaunchTime()
//                         + " " + job.getFinishTime()
//                    );
//                }

            JobClient client = new JobClient(jobConf);

            for (JobStatus jobStatus : client.getAllJobs()) {
                JobID jobId = jobStatus.getJobID();
                String jobIdStr = jobId.toString();
                RunningJob job = client.getJob(jobId);

                if (jobStatus.isJobComplete()) {
                    if ( knownJobs.remove(jobIdStr) ) {
                        printJob(job, jobStatus.getStartTime());
                    }
                }
                else {
                    knownJobs.add(jobIdStr);
                    printJob(job, jobStatus.getStartTime());
                }

//                    JobContext jc = new JobContext(conf_, jobId);

//                    System.out.println(jc.getJobName()
//                        + jc.getMapperClass().getName()
//                    );
//                    System.out.println(JobHistory.getHistoryFilePath(jobId));
            }
               
            Thread.sleep(optInterval_); 
        }
        catch (InterruptedException ie) {
            System.out.println("Ctrl-C, Interrupted");

            break;
        }
        catch (IOException io) {
            System.out.println("Exception: " + io.getMessage());

            break;
        }}
    }

    public static void main(String[] args) throws Exception
    {
        HadoopJobView jobView = new HadoopJobView(args);
        jobView.run();

   }

}

