package com.bigdata;

import java.io.IOException;
import java.util.Date;
import java.util.Calendar;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class LogCountDriver {

	public static void main(String[] args) throws Exception {

		// 通过Job来封装本次mr的相关信息
		Configuration conf = new Configuration();

		// 判断output文件夹是否存在，如果存在则删除
		Path path = new Path("D:\\test\\output");// 取第1个表示输出目录参数（第0个参数是输入目录）
		FileSystem fileSystem = path.getFileSystem(conf);// 根据path找到这个文件
		if (fileSystem.exists(path)) {
			fileSystem.delete(path, true);// true的意思是，就算output有东西，也一带删除
		}

		Calendar cal = Calendar.getInstance();

		Date begin = cal.getTime();
		System.out.println(begin);

		// 即使没有下面这行,也可以本地运行
		// 因\hadoop-mapreduce-client-core-2.7.4.jar!\mapred-default.xml 中默认的参数就是
		// local
		// conf.set("mapreduce.framework.name","local");
		Job job = Job.getInstance(conf);

		// 指定本次mr job jar包运行主类
		job.setJarByClass(LogCountDriver.class);

		// 指定本次mr 所用的mapper reducer类分别是什么
		job.setMapperClass(LogCountMapper.class);
		job.setReducerClass(LogCountReducer.class);

		// 指定本次mr mapper阶段的输出 k v类型
		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(LongWritable.class);

		// 指定本次mr 最终输出的 k v类型
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(LongWritable.class);

		// job.setNumReduceTasks(3); //ReduceTask个数

		// 如果业务有需求，就可以设置combiner组件
		job.setCombinerClass(LogCountReducer.class);

		// 指定本次mr 输入的数据路径 和最终输出结果存放在什么位置
		FileInputFormat.setInputPaths(job, "D:\\test\\ip.txt");
		FileOutputFormat.setOutputPath(job, new Path("D:\\test\\output"));
		// 如果出现0644错误或找不到winutils.exe,则需要设置windows环境和相关文件.

		// 上面的路径是本地测试时使用，如果要打包jar到hdfs上运行时，需要使用下面的路径。
		// FileInputFormat.setInputPaths(job,"/LogCount/input");
		// FileOutputFormat.setOutputPath(job,new Path("/LogCount/output"));

		// job.submit(); //一般不要这个.
		// 提交程序 并且监控打印程序执行情况
		boolean b = job.waitForCompletion(true);

		cal = Calendar.getInstance();
		Date end = cal.getTime();
		System.out.println(end);
		System.out.println(end.getTime() - begin.getTime());

		System.exit(b ? 0 : 1);
	}

}

class LogCountMapper extends Mapper<LongWritable, Text, Text, LongWritable> {

	LongWritable one = new LongWritable(1);

	@Override
	protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, LongWritable>.Context context)
			throws IOException, InterruptedException {
		context.write(value, one);
	}

}

class LogCountReducer extends Reducer<Text, LongWritable, Text, LongWritable> {

	// Text ip;
	// LongWritable max = new LongWritable(0);

	@Override
	protected void reduce(Text key, Iterable<LongWritable> value,
			Reducer<Text, LongWritable, Text, LongWritable>.Context context) throws IOException, InterruptedException {

		long count = 0;
		for (LongWritable intWritable : value) {
			count += intWritable.get();
		}

		// System.out.println("key="+key+",count="+count);
		// if (count > max.get()) {
		// max.set(count);
		// ip = key;
		// }
		context.write(key, new LongWritable(count));

	}

//	@Override
//	protected void cleanup(Reducer<Text, LongWritable, Text, LongWritable>.Context context)
//			throws IOException, InterruptedException {
//		context.write(ip, max);
//	}
}
