package org.hyf.inspur.LessonDesin.clear.count5;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class HighUserMain {
    public static void main(String[] args) throws Exception {
        //System.setProperty("hadoop.home.dir", "D://hadoop//hadoop1//hadoop-common-2.2.0-bin-master");
        Configuration conf = new Configuration();
        //设置map和reduce内存使用大小
        conf.set("mapreduce.map.memory.mb", "3072");
        conf.set("mapreduce.reduce.memory.mb", "2048");
        Job job = Job.getInstance(conf, "HighUserMain");
        job.setJarByClass(HighUserMain.class);
        //map与reduce设置
        job.setMapperClass(HighUserMapper.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);
        job.setReducerClass(HighUserReducer.class);
        job.setOutputKeyClass(NullWritable.class);
        job.setOutputValueClass(Text.class);
        //输入路径
        FileInputFormat.addInputPath(job, new Path("D://data/output/output5/part-r-00000"));
        //输出路径
        FileOutputFormat.setOutputPath(job, new Path("D://data/output/output6"));
        //提交作业 判断退出条件（0正常退出，1非正常退出）
        System.exit(job.waitForCompletion(true) ? 0 : 1);
    }
}
