package com.bclz.config;

import java.io.File;
import java.time.Instant;
import java.util.function.Consumer;
import java.util.function.Supplier;

import org.apache.commons.io.FileUtils;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

/**
 * 
* @ClassName: JobRun  
* @Description: 执行job方法 
* @author xuchang  
* @date 2018年9月28日
 */
public class JobRun {
		
	
	public static void runJob(Job job,String inputDir,String outPutDir,int reduceTaskCount) {
		long ab=Instant.now().toEpochMilli();
		
		try {
			
			Path outputPath = new Path(outPutDir);
			
			//yarn运行输出结果目录必需不存在，否则会抛异常
			if(System.getProperty("os.name").indexOf("Windows")>=0) {
				
				File f=new File(outPutDir);
				if(f.exists()) {
					FileUtils.deleteDirectory(f);
				}
				
			}else {
				
				FileSystem fs = HadoopManager.getFileSystem();
				
				if(fs.exists(outputPath)) {
					fs.delete(outputPath, true);
				}
				

			}
			
			FileInputFormat.setInputPaths(job, new Path(inputDir));
			FileOutputFormat.setOutputPath(job, outputPath);
			
			job.setNumReduceTasks(reduceTaskCount);
			
			boolean a=job.waitForCompletion(true);
			
			System.out.println("Total-------------"+(Instant.now().toEpochMilli()-ab));
			
			System.exit(a?1:0);
			
			
		}catch (Exception e) {
			// TODO: handle exception
			
			System.out.println(e.getMessage());
		}
		
		
		
		
	}
	
	
	public static void runJob(Job job,String inputDir,String outPutDir,int reduceTaskCount,Runnable run) {
		long ab=Instant.now().toEpochMilli();
		try {
			
			Path outputPath = new Path(outPutDir);
			
			//yarn运行输出结果目录必需不存在，否则会抛异常
			if(System.getProperty("os.name").indexOf("Windows")>=0) {
				
				File f=new File(outPutDir);
				if(f.exists()) {
					FileUtils.deleteDirectory(f);
				}
				
			}else {
				
				FileSystem fs = HadoopManager.getFileSystem();
				
				if(fs.exists(outputPath)) {
					fs.delete(outputPath, true);
				}
				

			}
			
			FileInputFormat.setInputPaths(job, new Path(inputDir));
			FileOutputFormat.setOutputPath(job, outputPath);
			
			job.setNumReduceTasks(reduceTaskCount);
			
			boolean a=job.waitForCompletion(true);
			
			System.out.println("Total-------------"+(Instant.now().toEpochMilli()-ab));
			
			if(a) {
				run.run();
			}
			
		}catch (Exception e) {
			// TODO: handle exception
			
			System.out.println(e.getMessage());
		}
		
		
		
		
	}
	
}
