package com.bclz;



import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.MRConfig;

import com.bclz.config.JobRun;
import com.bclz.task.request.CountMap;
import com.bclz.task.request.RequestTimesMapTask;
import com.bclz.task.request.RequestTimesReduceTask;
import com.bclz.task.request.RequeTimesStep2.RequestTimesMapStep2Task;
import com.bclz.task.request.RequeTimesStep2.RequestTimesReduceStep2Task;

/**
 * 
* @ClassName: JobMainbyWin  
* @Description: win系统版本低运行可调试  ，配置HADOOP_HOME即可
* @author xuchang  
* @date 2018年9月28日
 */
public class JobMainbyWin {
	
	
	public static void main(String[] args) {
		
		
		
		System.setProperty("HADOOP_USER_NAME", "hadoop");
		Configuration p=new Configuration();
		
		//设置默认文件系统，也可不设置，默认即为本地文件系统
		p.set("fs.defaultFS", "file:///");
		//设置mapreduce运行框架,设置mapReduce在哪里运行
		//也可不设置，默认即为本地
		p.set(MRConfig.FRAMEWORK_NAME, "local");
		
		try {
			
			Job job=Job.getInstance(p);
			//由于是在编辑器中运行程序，还没打包成Jar，直接运行会报ClassNotFound异常
			//打包后设置Jar路径再运行该Main
			job.setJarByClass(JobMainbyWin.class);
//			job.setJar("D:\\git_respository\\hadoopDemo\\hadoopDemo\\target\\hadoopDemo-0.0.1-SNAPSHOT.jar");
			//本次Job 的Mapper、reduce实现类
			
			
			job.setMapperClass(RequestTimesMapTask.class);
			job.setReducerClass(RequestTimesReduceTask.class);
			
			//Map输出类型、和reduce输出类型
			job.setMapOutputKeyClass(Text.class);
			job.setMapOutputValueClass(IntWritable.class);
			
			job.setOutputKeyClass(Text.class);
			job.setOutputValueClass(IntWritable.class);
			JobRun.runJob(job, "E:\\hadoop_input\\test\\request", "E:/hadoop_input/test/output",1, ()->{
				try {
					Job job2 = Job.getInstance(p);
					job2.setJarByClass(JobMainbyWin.class);
					job2.setMapperClass(RequestTimesMapStep2Task.class);
					job2.setReducerClass(RequestTimesReduceStep2Task.class);

					job2.setMapOutputKeyClass(CountMap.class);
					job2.setMapOutputValueClass(NullWritable.class);

					job2.setOutputKeyClass(CountMap.class);
					job2.setOutputValueClass(NullWritable.class);
					JobRun.runJob(job2, "E:\\hadoop_input\\test\\output", "E:/hadoop_input/test/Compareoutput", 1);
					
					
				} catch (IOException e) {
					// TODO Auto-generated catch block
					e.printStackTrace();
				}
				
			});
			
			
			
			
			
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
	}
	
	
}
