package com.sarnath.sardoop.modules.hadoop.service;

import javax.annotation.Resource;

import org.apache.commons.math.util.MathUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.mapreduce.Job;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service;

import com.sarnath.sardoop.common.utils.DateUtils;
import com.sarnath.sardoop.modules.hadoop.entity.JobInfo;
import com.sarnath.sardoop.modules.hadoop.entity.MrJobCreateEntity;
import com.sarnath.sardoop.modules.hadoop.util.JobUtils;

@Service("mapRedJob")
@Lazy(false)
@SuppressWarnings("deprecation")
public class MapRedJobService {
	private Logger logger=LoggerFactory.getLogger(getClass());
	@Resource
	private Configuration hadoopConfiguration;
	public String runJob(MrJobCreateEntity entity){
		try {
			Job job=JobUtils.getJob(hadoopConfiguration,entity);
			job.submit();
			logger.info("job:{}  {}",job.getJobID(),job.getHistoryUrl());
			return job.getJobID().toString();
		} catch (Exception e) {
			throw new RuntimeException(e);
		}
	}
	public JobInfo getJob(String jobId){
		try {
			JobInfo info=new JobInfo();
			JobClient client=new JobClient(hadoopConfiguration);
			RunningJob job = client.getJob(jobId);
			info.setJobId(jobId);
			info.setName(job.getJobName());
			info.setState(job.getJobState());
			info.setMapProgress(MathUtils.round(job.mapProgress()*100,2));
			info.setReduceProgress(MathUtils.round(job.reduceProgress()*100,2));
			info.setStartTime(DateUtils.formatDateTime(job.getJobStatus().getStartTime()));
			info.setFinishTime(DateUtils.formatDateTime(job.getJobStatus().getFinishTime()));
			return info;
		} catch (Exception e) {
			throw new RuntimeException(e);
		}
	}
}
