package com.company.recruit.clean.demo1;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.springframework.stereotype.Component;

import java.awt.*;
import java.io.IOException;

/**
 * @author wanghengfeng
 * @date 2021年/12月/02日  12:20:56
 */
public class MyPositionDrives {
    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
        Configuration conf = new Configuration();
        Job job=Job.getInstance();
        job.setMapperClass(MyPositionMapper.class);
        job.setJarByClass(MyPositionDrives.class);
        job.setReducerClass(MyPositionReduce.class);
        SetJob(job);
        Path path = SetPath(job,args);
        //outputformat输出到mysql中，自定义输出类
        // job.setOutputFormatClass(MySQLTextOutputFormat.class);
        path.getFileSystem(conf).delete(path,true);
        System.exit(job.waitForCompletion(true)?0:1);
    }

    private static Path SetPath(Job job,String[] args) throws IOException {
        FileInputFormat.addInputPath(job,new Path("F:\\bigData\\bigdata\\demo\\input\\recruit_mongo.csv"));


        Path path=new Path("F:\\bigData\\bigdata\\demo\\output");

//        FileInputFormat.addInputPath(job,new Path(args[0]));
//        Path path=new Path(args[1]);

        FileOutputFormat.setOutputPath(job,path);
        return path;
    }

    private static void SetJob(Job job) {
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(Text.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);
    }
}
