package com.atguigu.guli.etl.map;

import com.atguigu.util.HdfsUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;

/**
 * @ClassName GuliDriver
 * @DeScription GuliDriver
 * @Author 剧情再美终是戏
 * @Date 2019/11/30 10:22
 * @Version 1.0
 **/
public class GuliDriver {

    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {

        // 输入输出位置
        Path inputPath = new Path("F:\\file\\尚硅谷相关资料\\课件资料-ok\\Hive\\2.资料\\gulivideo\\video\\2008\\0222");
        Path outputPath = new Path("F:\\file\\尚硅谷相关资料\\课件资料-ok\\Hive\\2.资料\\gulivideo\\video\\2008\\0222output");

        // 清除输出文件路径
        Configuration conf = new Configuration();
        FileSystem fs = HdfsUtil.getFileSystem(conf);
        HdfsUtil.delete(fs, outputPath, true);

        // 初始化job
        Job job=Job.getInstance(conf);

        // 设置mapper,reducer
        job.setMapperClass(GuliMapper.class);
        job.setNumReduceTasks(0);

        // 设置输入输出目录
        FileInputFormat.setInputPaths(job, inputPath);
        FileOutputFormat.setOutputPath(job, outputPath);

        // 设置本地模式下块大小为128
//        conf.setLong("mapred.min.split.size",splitSize);

        // 提交job
        job.waitForCompletion(true);

    }
}
