package com.neuedu;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
public class CityDriver {
    public static void main(String[] args) throws Exception {
//1. 设置环境变量 hadoop用户root
        System.setProperty("HADOOP_USER_NAME", "root");
//2. 参数的配置
        Configuration configuration = new Configuration();
//3. 声明job对象，就是一个应用
        Job job = Job.getInstance(configuration);
//4. 指定当前执行job类
        job.setJarByClass(CityDriver.class);
//5. 指定job的mapper类
        job.setMapperClass(CityMapper.class);
//6. 指定Mapper的输出类型
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(NullWritable.class);
//7. 指定job的reducer类
        job.setReducerClass(CityReducer.class);
//8. 指定最终的输出
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);
//9. 动态输入路径
        FileInputFormat.addInputPath(job, new Path(args[0]));
//10. 动态输出的路径
        Path dst = new Path(args[1]);
//11. 判断是否存在
        FileSystem fs = FileSystem.get(configuration);
        if (fs.exists(dst)) {
            fs.delete(dst, true);
        }
//12. 输出
        FileOutputFormat.setOutputPath(job, dst);
//13. 提交job执行
        System.exit(job.waitForCompletion(true) ? 0 : 1);
    }
}
