package com.lagou.sort;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;

/**
 * @author lcy
 * @description TODO
 * @date 2021/11/6 23:40
 * @vserion 1.3.3
 */
public class SortDriver {

	public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {

		// 创建Hadoop的配置文件
		Configuration conf = new Configuration();

		// 根据配置获取JOB
		Job job = Job.getInstance(conf);
		job.setJobName("SortDriver");
		job.setJarByClass(SortDriver.class);

		// 设置Mapper 和 reducer的类对象
		job.setMapperClass(SortMapper.class);
		job.setReducerClass(SortReducer.class);

		// 设置map 端和reducer端输出的类型
		job.setMapOutputKeyClass(LongWritable.class);
		job.setMapOutputValueClass(LongWritable.class);
		job.setOutputKeyClass(LongWritable.class);
		job.setOutputValueClass(Text.class);

		// 设置reducerTask的数量为1  默认也为1
		// job.setNumReduceTasks(3);
		FileInputFormat.addInputPath(job, new Path("G:\\大数据软件\\hadoop\\homework\\MR-HomePodWork\\input"));
		FileOutputFormat.setOutputPath(job, new Path("G:\\大数据软件\\hadoop\\homework\\MR-HomePodWork\\out"));

		// 提交任务即可
		boolean result = job.waitForCompletion(true);
		System.exit(result ? 0 : 1);



	}
}
