package org.example.mpareduce.wordcount;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

public class MineWordCountJob {
    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
        //1.连接hdfs
//        System.load("D:\\hadoop\\hadoop-2.6.0-cdh5.9.3\\bin\\hadoop.dll");
//        System.load("D:\\hadoop\\hadoop-2.6.0-cdh5.9.3\\bin\\winutils.exe");
//        System.setProperty("hadoop.home.dir","D:\\hadoop\\hadoop-2.6.0-cdh5.9.3");
        String hdfsUrl = "hdfs://gsy-cloud1";
        Configuration configuration = new Configuration();
        configuration.set("fs.defaultFs",hdfsUrl);
        configuration.set("dfs.client.use.datanode.hostname","true");
        configuration.set("mapreduce.app-submission.cross-platform", "true");
//        configuration.set("user","gsy");

        //2.创建任务  map/reduce 核心
        Job job = Job.getInstance(configuration);

        job.setJarByClass(MineWordCountJob.class);
        //3.读取数据
//        Path workingDirectoryBak = job.getWorkingDirectory();
//        job.setWorkingDirectory(new Path("/"));
        FileInputFormat.addInputPath(job,new Path(hdfsUrl+"/buaa/moss"));
//        job.setWorkingDirectory(workingDirectoryBak);
        //4.map计算
        job.setMapperClass(MineWordCountMap.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);
        //5.reduce计算
        job.setReducerClass(MineWordCountReduce.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);
        //6.写出计算结果

        FileOutputFormat.setOutputPath(job,new Path(hdfsUrl+"/test/output"));
        //7.提交任务
        job.waitForCompletion(true);
    }
}
