package com.huan.partitioner_sort;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class FlowCountSortDriver {

    public static void main(String[] args) throws Exception{

        args = new String[] {"E:\\Project\\Hadoop\\Mapreduce\\data\\input\\part-r-00000",
        "E:\\Project\\Hadoop\\Mapreduce\\data\\outout1"};

        //TODO 获取conf对象
        Configuration conf = new Configuration();
        //TODO 获取job对象
        Job job = Job.getInstance( conf );
        //TODO 获取jar包位置
        job.setJarByClass( FlowCountSortDriver.class );
        //TODO 获取map类和reduce类
        job.setMapperClass( FlowCountMapper.class );
        job.setReducerClass( FlowCountSortReducer.class );
        //TODO 获取map输出类型
        job.setMapOutputKeyClass( FlowBean.class );
        job.setMapOutputValueClass( Text.class );
        //TODO 获取总输出类型
        job.setOutputKeyClass( Text.class );
        job.setOutputValueClass( FlowBean.class );
        //TODO 获取分区的类型
        job.setPartitionerClass( ProvincePartitioner.class );
        //TODO 设置分区的数量
        job.setNumReduceTasks( 5 );
        //TODO 输入输出类型
        FileInputFormat.setInputPaths( job,new Path( args[0] ) );
        FileOutputFormat.setOutputPath( job,new Path( args[1] ) );
        //TODO 提交
        boolean result = job.waitForCompletion( true );
        System.exit(result?0:1);
    }
}
