package UserStatistic;

import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.StringTokenizer;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.Reducer.Context;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;

import UserStatistic.TProperties;


public class UserStatistic {
	public static void main(String[] args) throws IOException, InterruptedException{
		try {
			//创建配置信息
			Configuration conf = new Configuration();
			//map内存设置
			conf.set("mapreduce.map.memory.mb", "5120");
			conf.set("mapreduce.reduce.memory.mb", "5120");
			//不检查超时，由于集群环境不稳定有超时现象，所以设置为不检查，但需要保证程序无死循环
			conf.set("mapred.task.timeout", "0");
			//集群机器少的时候可以设置：客户端在写失败的时候，是否使用更换策略
			conf.set("dfs.client.block.write.replace-datanode-on-failure.policy","NEVER"); 
			conf.set("dfs.client.block.write.replace-datanode-on-failure.enable","true"); 
			// 获取命令行的参数
			String[] otherArgs = {"F://sparkData//test//02.DxFileMatch/part-m-00*", "F://sparkData//test//10.DxPerCountPV"};
			//String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
			// 创建任务
			Job job = new Job(conf, "Dx_PerCountPV");
			// 打成jar包运行，这句话是关键
			job.setJarByClass(UserStatistic.class);
			// 自定义Mapper类和设置map函数输出数据的key和value的类型
			job.setMapperClass(PerCountPVMapper.class);
			job.setReducerClass(PerCountPVReducer.class);
			 //设置map输出的key类型
			job.setMapOutputKeyClass(Text.class);
			//设置map输出的value类型
			job.setMapOutputValueClass(IntWritable.class);
			//设置输出的key类型
			job.setOutputKeyClass(NullWritable.class);
			//设置输出的value类型
			job.setOutputValueClass(Text.class);
			// 分组函数
			//job.setPartitionerClass(KeyPartitioner.class);
			// 分组函数
			//job.setGroupingComparatorClass(KeyGroupingComparator.class);
			//输入输出路径
			FileInputFormat.setInputPaths(job, otherArgs[0]);
			FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
			//提交作业 判断退出条件（0正常退出，1非正常退出）
			System.exit(job.waitForCompletion(true) ? 0 : 1);
		} catch (Exception e) {
			e.printStackTrace();
		}
	}
	public static class PerCountPVMapper extends Mapper<LongWritable, Text, Text, IntWritable>{
		private final static IntWritable flag = new IntWritable(0);
		private Text word = new Text();
		@Override
		public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
			String temp[]=value.toString().split(TProperties.getValue("fileoutsplit"));
	        int num=0;
	        int flag = Integer.parseInt(temp[4]);
	        if(flag == 1){
	            num = 1;
	            context.write(new Text(temp[1]),new IntWritable(num));
	        }
	        if(flag == 2){
	            num = 2;
	            context.write(new Text(temp[1]),new IntWritable(num));
	        }
		}
	}
	
	
	
	public static class PerCountPVReducer extends Reducer<Text, IntWritable, NullWritable, Text> {
		private Text result = new Text();
		
		public void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException{
			Iterator<IntWritable> it=values.iterator();
	        int sum1=0;
	        int sum2=0;
	        int sum3=0;
	        String str1 = "";
	        String str2 = "";
	        String str = "";
	        while(it.hasNext()){
	            IntWritable value=it.next();
	            int num=value.get();
	            if(num == 1){
	                str1 = "1";
	                sum1 = sum1 + 1;
	            }
	            if(num == 2){
	                str2 = "2";
	                sum2 = sum2 + 1;
	            }
	            if(num == 0){
	                sum3 = sum3 + 1;
	            }
	        }
	        if(sum1 == 0){
	            str = key.toString() + "|"  +  str2 + ":" + sum2;
	        }
	        else if(sum2 == 0){
	            str = key.toString() + "|" + str1 + ":" + sum1;
	        }
	        else {
	            str = key.toString() + "|" + str1 + ":" + sum1 + "," +  str2 + ":" + sum2;
	        }
	        //str = key.toString() + "|" + str1 + ":" + sum1 + "," +  str2 + ":" + sum2;
	        context.write(NullWritable.get(), new Text(str));
		}
	}
}
