package com.test;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;

/**
 * @author:wesley
 */
public class WordCount {


    public static void main(String[] args)
            throws IOException, ClassNotFoundException, InterruptedException {

        //设置hadoop环境变量
        System.setProperty("hadoop.home.dir","D:\\DEV_Tools\\hadoop-2.7.6");

        //删除out文件目录
        FileUtil.deleteDir("output");


        Configuration conf = new Configuration();

        String[] otherArgs = new String[]{"input/dream.txt", "output/word"};
        if (otherArgs.length != 2) {
            System.err.println("Usage:Merge and duplicate removal <in> <out>");
            System.exit(2);
        }

        //job名
        Job job = Job.getInstance(conf, "WordCount");

        job.setJarByClass(WordCount.class);
        job.setMapperClass(WordCountMapper.class);
        job.setReducerClass(WordCountReduce.class);

        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);

        FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
        FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));

        System.exit(job.waitForCompletion(true) ? 0 : 1);
    }

//    public static class TokenizerMapper extends
//            Mapper<Object, Text, Text, IntWritable> {
//
//
//        public static final IntWritable one = new IntWritable(1);
//
//        private Text word = new Text();
//
//        public void map(Object key, Text value, Context context)
//                throws IOException, InterruptedException {
//            StringTokenizer itr = new StringTokenizer(value.toString());
//            while (itr.hasMoreTokens()) {
//                this.word.set(itr.nextToken());
//                context.write(this.word, one);
//            }
//        }
//
//    }

//    public static class IntSumReduce extends
//            Reducer<Text, IntWritable, Text, IntWritable> {
//
//        private IntWritable result = new IntWritable();
//
//        public void reduce(Text key, Iterable<IntWritable> values,
//                           Context context)
//                throws IOException, InterruptedException {
//            int sum = 0;
//            IntWritable val;
//            for (Iterator i = values.iterator(); i.hasNext(); sum += val.get()) {
//                val = (IntWritable) i.next();
//            }
//            this.result.set(sum);
//            context.write(key, this.result);
//        }
//    }

}
