package com.lzj.weather02;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class WeatherJob {
    public static void main(String[] args) throws Exception {
        //获取配置文件
        Configuration configuration = new Configuration(true);
        //本地模式运行
        configuration.set("mapreduce.framework.name", "local");
        //创建JOB
        Job job = Job.getInstance(configuration);
        //设置主Class
        job.setJarByClass(WeatherJob.class);
        //设置Job的名称
        job.setJobName("Weather02-" + System.currentTimeMillis());
        //设置JOB的Reduce数量
        job.setNumReduceTasks(2);
        //设置切片
        FileInputFormat.setMaxInputSplitSize(job, 4 * 1024 * 1024);
        FileInputFormat.setMinInputSplitSize(job, 4 * 1024 * 1024);
        //设置JOB的输入路径
        FileInputFormat.setInputPaths(job, new Path("/lzj/weather.txt"));
        //设置Job的输出路径
        FileOutputFormat.setOutputPath(job, new Path("/lzj/result/weather02_" + System.currentTimeMillis()));
        //设置Job-Mapper的输出key类型
        job.setMapOutputKeyClass(Weather.class);
        //设置Job-Mapper的输出value类型
        job.setMapOutputValueClass(IntWritable.class);
        //设置分区器
        job.setPartitionerClass(WeatherPartitioner.class);
        //设置分组比较器
        job.setGroupingComparatorClass(WeatherGroupingComparator.class);
        //设置Job的Mapper
        job.setMapperClass(WeatherMapper.class);
        //设置Job的Reduce
        job.setReducerClass(WeatherReducer.class);
        //等待Job的完成
        job.waitForCompletion(true);
    }
}