package com.bclz;



import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.MRConfig;

import com.bclz.config.JobRun;
import com.bclz.entity.Summarizing;
import com.bclz.task.flow.FlowStatisticMap;
import com.bclz.task.flow.FlowStatisticReduce;

/**
 * 
* @ClassName: JobMain  
* @Description: 执行MapReduce程序 
* @author xuchang  
* @date 2018年9月20日
 */
public class JobMainbyLinux {
	
	
	public static void main(String[] args) {
		
		
		System.setProperty("HADOOP_USER_NAME", "hadoop");
		Configuration p=new Configuration();
		
		//AM(AppMaster)采用的就是Mapreduce计算框架
		//yarn.app.mapreduce.am.resource.mb ：AM能够申请的最大内存，默认值为1536MB
		p.set("yarn.app.mapreduce.am.resource.mb","256");
		//设置默认文件系统
		p.set("fs.defaultFS", "hdfs://master:9000");
		//设置mapreduce运行框架,设置mapReduce在哪里运行
		p.set(MRConfig.FRAMEWORK_NAME, "yarn");
		p.set("yarn.resourcemanager.hostname", "node3");
		//hadoop windows跨平台提交任务需要设置该参数
		p.set(MRConfig.MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM, "true");
		try {
			
			Job job=Job.getInstance(p);
			//由于是在编辑器中运行程序，还没打包成Jar，直接运行会报ClassNotFound异常
			//打包后设置Jar路径再运行该Main
			job.setJarByClass(JobMainbyLinux.class);
//			job.setJar("D:\\git_respository\\hadoopDemo\\hadoopDemo\\target\\hadoopDemo-0.0.1-SNAPSHOT.jar");
			
			//本次Job 的Mapper、reduce实现类
			job.setMapperClass(FlowStatisticMap.class);
			job.setReducerClass(FlowStatisticReduce.class);
			
			//Map输出类型、和reduce输出类型
			job.setMapOutputKeyClass(Text.class);
			job.setMapOutputValueClass(Summarizing.class);
			
			job.setOutputKeyClass(Text.class);
			job.setOutputValueClass(Summarizing.class);
			
			JobRun.runJob(job, "/test/flow", "/test/output", 2);
			
			
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
	}
	
	
}
