package org.example.mr1;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

/**
 * 数据进来之后把数据写给 reducer
 * LongWritable, Text
 */
public class FruitDriver implements Tool {

    //定义一个Configuration
    private Configuration conf = null;

    @Override
    public int run(String[] args) throws Exception {

        //1、获取JOb对象
        Job job = Job.getInstance(conf);
        //2、设置驱动类路径
        job.setJarByClass(FruitDriver.class);
        //3、设置MApper&Mapper输出的KV类型
        job.setMapperClass(FruitMapper.class);
        job.setMapOutputKeyClass(LongWritable.class);
        job.setMapOutputValueClass(Text.class);
        //4、设置Reducer 类
//        job.setReducerClass(FruitReducer.class);
        TableMapReduceUtil.initTableReducerJob(args[1],
                FruitReducer.class,
                job);

        //5、设置输入参数
        FileInputFormat.setInputPaths(job, new Path(args[0]));

        //6、提交任务
        boolean result = job.waitForCompletion(true);

        return result ? 0 : 1;
    }

    @Override
    public void setConf(Configuration configuration) {
        conf = configuration;
    }

    @Override
    public Configuration getConf() {
        return conf;
    }

//    @Override
//    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
//        context.write(key, value); //直接写出去
//    }

    public static void main(String[] args) {
        try {
            Configuration configuration = new Configuration();
            int run = ToolRunner.run(configuration, new FruitDriver(), args);
            System.exit(run);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

}
