package cn.dwj.hadoop.mr.wordcount;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;
import java.net.URI;


/**
 * Created by Administrator on 2017/12/25 0025.
 * 用来描述特定的作业
 * 比如该作业使用哪个逻辑处理的map，哪个作为reduce
 * 还可以指定该作业要处理的数据的路径
 * 还可以指定该作业输出的结果放到哪个路径
 */
public class WCRunner {
    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
        System.setProperty("hadoop.home.dir","E:/project/study/hadoop-2.6.5");
        Configuration conf = new Configuration();
        //conf.set("fs.defaultFS", "hdfs://192.168.110.151:9000/");
        Job wcJob = Job.getInstance(conf, "word count");
        //设置整个job所在的那些类在哪个jar包中
        wcJob.setJarByClass(WCRunner.class);
        //本job使用的map和reduce的类
        wcJob.setMapperClass(WCMapper.class);
        wcJob.setReducerClass(WCReduce.class);

        //指定输出数据的类型，对mapper和reduce的类型都起作用
        wcJob.setOutputKeyClass(Text.class);
        wcJob.setOutputValueClass(LongWritable.class);

        //map的输出数据的kv类型
        wcJob.setMapOutputKeyClass(Text.class);
        wcJob.setMapOutputValueClass(LongWritable.class);

        //指定原始数据存放的路径
        //FileInputFormat.setInputPaths(wcJob, new Path[]{new Path("/wc/input/")});//可以多个路径，或者一个文件夹
        //FileInputFormat.setInputPaths(wcJob, new Path("hdfs://192.168.110.152:9000/wc/input/wc.txt"));
        //FileOutputFormat.setOutputPath(wcJob, new Path("hdfs://192.168.110.151:9000/wc/output/"));//指定处理结果输出数据存放在路径
        FileInputFormat.setInputPaths(wcJob, new Path("E:/hadoop/wc/input/wc.txt"));
        FileOutputFormat.setOutputPath(wcJob, new Path("E:/hadoop/wc/output4/"));//指定处理结果输出数据存放在路径

        wcJob.waitForCompletion(true);//将job提交给集群运行过程要不要提示出来
    }
}
