package com.huan.kv;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.KeyValueLineRecordReader;
import org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;

public class KVTestDriver {
    public static void main(String[] args) throws Exception {

        args = new String[]{"E:\\Project\\Hadoop\\Mapreduce\\data\\input\\kv1.txt","E:\\Project\\Hadoop\\Mapreduce\\data\\output"};
        //TODO 获取job
        Configuration conf = new Configuration();

        // TODO 设置切割符
        conf.set( KeyValueLineRecordReader.KEY_VALUE_SEPERATOR, "\t");

        Job job = Job.getInstance( conf );

        job.setJarByClass( KVTestDriver.class );
        job.setMapperClass( KVTextMapper.class );
        job.setReducerClass( KVTestReduce.class );

        job.setMapOutputKeyClass( Text.class );
        job.setMapOutputValueClass( IntWritable.class );

        job.setOutputKeyClass( Text.class );
        job.setOutputValueClass( IntWritable.class );
        //TODO  设置输入格式
        job.setInputFormatClass( KeyValueTextInputFormat.class);
        FileInputFormat.setInputPaths( job,new Path( args[0] ) );
        FileOutputFormat.setOutputPath( job,new Path( args[1] ) );

        boolean result =   job.waitForCompletion(true  );

        System.exit( result? 0:1 );
    }
}
