package com.sqk.task10;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Partitioner;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import com.sqk.dxpro.utils.TProperties;

public class DxProBehave {
	public static void main(String[] args) throws Exception{
		//创建配置信息
		Configuration conf = new Configuration();
		//map内存设置
		conf.set("mapreduce.map.memory.mb", "3072");
		//不检查超时，由于集群环境不稳定有超时现象，所以设置为不检查，但需要保证程序无死循环
		conf.set("mapred.task.timeout", "0");
		//集群机器少的时候可以设置：客户端在写失败的时候，是否使用更换策略
		conf.set("dfs.client.block.write.replace-datanode-on-failure.policy","NEVER"); 
		conf.set("dfs.client.block.write.replace-datanode-on-failure.enable","true"); 
		conf.set("mapreduce.reduce.memory.mb", "2048");
		// 获取命令行的参数
		//String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
		// 创建任务
		Job job = new Job(conf, "Dx_ProBehave");
		// 打成jar包运行，这句话是关键
		job.setJarByClass(DxProBehave.class);
		// 1.1 设置输入目录和设置输入数据格式化的类
		FileInputFormat.addInputPath(job, new Path("F:\\SparkTask\\DxMatch\\part-m-00*"));
		FileInputFormat.addInputPath(job, new Path("F:\\SparkTask\\data\\t_dx_product_msg_addr.txt"));
		//job.addCacheFile(new Path("file:///D:/data/dx/configData/t_dx_product_msg_addr.txt").toUri());
		// 1.2 设置自定义Mapper类和设置map函数输出数据的key和value的类型
		job.setMapperClass(DxPBMapper.class);
		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(Text.class);
		//分区函数
		job.setPartitionerClass(KeyPartitioner.class);
		//分组函数
		job.setGroupingComparatorClass(KeyGroupingComparator.class);
		job.setReducerClass(DxPBReducer.class);
		job.setOutputKeyClass(NullWritable.class);
		job.setOutputValueClass(Text.class);

		//  设置reduce数量为1
		job.setNumReduceTasks(1);
		FileOutputFormat.setOutputPath(job, new Path("F:\\SparkTask\\DxProBehave"));
		//提交作业 判断退出条件（0正常退出，1非正常退出）
		System.exit(job.waitForCompletion(true) ? 0 : 1);
	}
	
	/**
	 * map端join
	 * */
	public static class DxPBMapper extends Mapper<LongWritable, Text, Text, Text> {
		private Text okey = new Text();
		private Text ovalue = new Text();

		protected void map(LongWritable key, Text value, Context context) 
				throws IOException, InterruptedException {
			
			FileSplit fileSplit = (FileSplit) context.getInputSplit();
			
			String path = fileSplit.getPath().toString();
			String[] values = value.toString().split(TProperties.getValue("fileoutsplit"));
			StringBuffer sb = new StringBuffer();
			
			if (path.indexOf(TProperties.getValue("proaddress")) >= 0) {
				//行为ID + 数据类型标识
				okey = new Text(values[0] + ",1");
				//1#（数据标识，用于区分数据）,产品类型、产品ID、产品名称、品牌/区域、价格、型号、车系、手\自
				sb.append("1#" + values[1])
						.append(TProperties.getValue("outfilesplit")).append(values[2])
						.append(TProperties.getValue("outfilesplit")).append(values[3])
						.append(TProperties.getValue("outfilesplit")).append(values[4])
						.append(TProperties.getValue("outfilesplit")).append(values[5])
						.append(TProperties.getValue("outfilesplit")).append(values[6])
						.append(TProperties.getValue("outfilesplit")).append(values[7])
						.append(TProperties.getValue("outfilesplit")).append(values[8]);
				ovalue = new Text(sb.toString());
				context.write(okey, ovalue);
				//行为ID，1 1#产品类型、产品ID、产品名称、品牌/区域、价格、型号、车系、手\自
			}
			else {
				if("1".equals(values[2])) {
					//行为ID + 数据类型标识
					okey = new Text(values[0] + ",2");
					//用户ID
					sb.append(values[1]);
					ovalue = new Text(sb.toString());
					context.write(okey, ovalue);
					//行为ID，2用户ID
				}
			}
		}
	}
	
	/**
	 * reduce端join
	 **/
	public static class DxPBReducer extends Reducer<Text, Text, NullWritable, Text>{
		private Text result = new Text();
		
		public void reduce(Text key, Iterable<Text> values, Context context) 
				throws IOException,InterruptedException {
			String promatch = "";
			int i = 1;
			//数据循环
			for(Text val:values) {
				//判断是否首条数据
				if (i==1) {
					//首次加载，取value数据，promatch是字典数据
					promatch = val.toString();
					//判断是否是配置数据，如果不是程序退出（无匹配结果）
					if(promatch.indexOf("1#") == -1) {
						return;
					}
					//更改首条数据标识
					i = 2;
				}
				else {
					//首条数据后的数据，关联配置数据
					//用户id，产品相关，行为id
					//第二条及之后的数据val 是任务2里的输出结果数据
					result = new Text(val.toString() + TProperties.getValue("outfilesplit") + promatch.substring(2)
							+ TProperties.getValue("outfilesplit") + key.toString().split(",")[0]);
					context.write(NullWritable.get(), result);
				}
			}
		}
	}
	
	public static abstract class KeyPartitioner extends Partitioner<Text, Text>{
		public int getPartition(Text key,Text value,int numPartitions) {
			System.err.println(key + ";" + value + ";");
			return Math.abs(key.toString().split(",")[0].hashCode() * 127) % numPartitions;
		}
	}
	
	public static class KeyGroupingComparator extends WritableComparator {
		protected KeyGroupingComparator() {
			super(Text.class,true);
		}
		public int compare(WritableComparable w1, WritableComparable w2) {
			Text ip1 = (Text) w1;
			Text ip2 = (Text) w2;
			String l = ip1.toString().split(",")[0];
			String r = ip2.toString().split(",")[0];
			return l.equals(r) ? 0 : 1;
		}
	}
}
