package com.test.test3;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class Driver3 {
    public static void main(String[] args) throws Exception {
        // 1.获取job对象
        Configuration config = new Configuration();
        Job job = Job.getInstance(config);
        // 2.设置jar包路径
        job.setJarByClass(Driver3.class);
        // 3.关联Mapper和reducer
        job.setMapperClass(Mapper3.class);
        job.setReducerClass(Reducer3.class);
        // 4.设置Mapper输出的kv类型
        job.setMapOutputKeyClass(LongWritable.class);
        job.setMapOutputValueClass(Text.class);
        // 5.设置最终输出的kv类型
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);
        // 6.设置输入输出路径
        FileInputFormat.setInputPaths(job,new Path("D:\\develop\\ideaProjects\\mptest\\input\\data2.csv"));
        Path path = new Path("D:\\develop\\ideaProjects\\mptest\\output\\test3");
        FileOutputFormat.setOutputPath(job,path);
        FileSystem fileSystem = FileSystem.get(config);
        if (fileSystem.exists(path)) {
            fileSystem.delete(path, true);
        }
        // 7.提交job
        boolean result = job.waitForCompletion(true);
        System.exit(result?0:1);
    }
}
