package job07;

import java.io.IOException;
import java.util.StringTokenizer;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Partitioner;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.Reducer.Context;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;

import job01.TProperties;

public class job07 {
	public static void main(String[] args) {
		try {
			//创建配置信息
			Configuration conf = new Configuration();
			//map内存设置
			conf.set("mapreduce.map.memory.mb", "5120");
			conf.set("mapreduce.reduce.memory.mb", "5120");
			//不检查超时，由于集群环境不稳定有超时现象，所以设置为不检查，但需要保证程序无死循环
			conf.set("mapred.task.timeout", "0");
			//集群机器少的时候可以设置：客户端在写失败的时候，是否使用更换策略
			conf.set("dfs.client.block.write.replace-datanode-on-failure.policy","NEVER"); 
			conf.set("dfs.client.block.write.replace-datanode-on-failure.enable","true"); 
			// 获取命令行的参数
			String[] otherArgs = {"F://sparkData//test//06.DxProMatch//part-r-00*", "F://sparkData//test//07.DxProCountPV"};
			//String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
			// 创建任务
			Job job = new Job(conf, "Dx_ProCountPV");
			// 打成jar包运行，这句话是关键
			job.setJarByClass(job07.class);
			// 自定义Mapper类和设置map函数输出数据的key和value的类型
			job.setMapperClass(DxProCountPVMapper.class);
			job.setReducerClass(DxProCountPVReducer.class);
			 //设置map输出的key类型
			job.setMapOutputKeyClass(Text.class);
			//设置map输出的value类型
			job.setMapOutputValueClass(IntWritable.class);
			//设置输出的key类型
			job.setOutputKeyClass(NullWritable.class);
			//设置输出的value类型
			job.setOutputValueClass(Text.class);
			// 分组函数
			//job.setPartitionerClass(KeyPartitioner.class);
			// 分组函数
			//job.setGroupingComparatorClass(KeyGroupingComparator.class);
			//输入输出路径
			FileInputFormat.setInputPaths(job, otherArgs[0]);
			FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
			//提交作业 判断退出条件（0正常退出，1非正常退出）
			System.exit(job.waitForCompletion(true) ? 0 : 1);
		} catch (Exception e) {
			e.printStackTrace();
		}
	}
	public static class DxProCountPVMapper extends Mapper<LongWritable, Text, Text, IntWritable>{
		private Text okey = new Text();
		private IntWritable ovalue = new IntWritable(1);
		
		public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
			StringTokenizer itr = new StringTokenizer(value.toString());
			while (itr.hasMoreTokens()) {
				String str = itr.nextToken().toString();
				String[] splits = str.split(TProperties.getValue("fileoutsplit"));
				System.out.println(str.toString());
				okey.set(splits[2] + TProperties.getValue("outfilesplit") + splits[0] + TProperties.getValue("outfilesplit") + splits[1]);
				context.write(okey, ovalue);
			}
		}
	}
	
	public static class DxProCountPVReducer extends Reducer<Text, IntWritable, NullWritable, Text> {
		private IntWritable cnt = new IntWritable();
		private Text result = new Text();
		
		public void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException{
			int sum = 0;
			for (IntWritable val : values) {
				sum += val.get();
			}
			cnt.set(sum);
			String[] splits = key.toString().split(TProperties.getValue("fileoutsplit"));
			String tmp = splits[0] + 
					TProperties.getValue("outfilesplit") + splits[1] + 
					TProperties.getValue("outfilesplit") + cnt.toString() + 
					TProperties.getValue("outfilesplit")+ splits[2];
			result.set(tmp);
			context.write(NullWritable.get(), result);
		}
	}
}
