package com.huan.wc;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;

import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;

public class Driver {
    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {

       args = new String[] { "E:\\Project\\Hadoop\\Mapreduce\\data\\input\\huan.txt","E:\\Project\\Hadoop\\Mapreduce\\data\\output" };


        //TODO 获取job任务
        Configuration conf = new Configuration();
        Job job = Job.getInstance( conf );
        //TODO 设置jar存储位置
        job.setJarByClass(Driver.class  );
        //TODO 关联Map和Reduce
        job.setMapperClass( MyMapper.class );
        job.setReducerClass( MyReduce.class );
        //TODO 设置Mapper阶段输出数据的key和value类型
        job.setMapOutputKeyClass( Text.class );
        job.setMapOutputValueClass( IntWritable.class );
        //TODO 设置最终数据输出的key和value类型
        job.setOutputKeyClass( Text.class );
        job.setOutputValueClass( IntWritable.class );
        //TODO 设置输入路径和输出路径
        FileInputFormat.setInputPaths( job,new Path( args[0] ) );
        FileOutputFormat.setOutputPath( job,new Path( args[1] ) );
        //TODO 提交job
        boolean result = job.waitForCompletion( true );
        System.exit( result ? 0 : 1 );
    }
}
