package job11;

import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.StringTokenizer;
import java.util.regex.*;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;

import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.Reducer.Context;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;

import job01.TProperties;

public class job11 {
	public static void main(String[] args) throws IOException, InterruptedException{
		try {
//			String[] arg = new String[5];
//			arg[0] = "3";
//			arg[1] = "file:///F://sparkData//test";
//			arg[2] = "20210602";
//			arg[3] = "file:///F://sparkData//test//20210602//09.DxPerson";
//			arg[4] = "file:///F://sparkData//test//20210602//11.DxLiveTime";
			//创建配置信息
			Configuration conf = new Configuration();
			//map内存设置
			conf.set("mapreduce.map.memory.mb", "5120");
			conf.set("mapreduce.reduce.memory.mb", "5120");
			//不检查超时，由于集群环境不稳定有超时现象，所以设置为不检查，但需要保证程序无死循环
			//conf.set("mapred.task.timeout", "0");
			//集群机器少的时候可以设置：客户端在写失败的时候，是否使用更换策略
			//conf.set("dfs.client.block.write.replace-datanode-on-failure.policy","NEVER"); 
			//conf.set("dfs.client.block.write.replace-datanode-on-failure.enable","true"); 
			// 获取命令行的参数
			String[] otherArgs = {"3", "F://sparkData//test", "20210702", "F://sparkData//test//20210702//10.DxPerCountPV", "F://sparkData//test//20210702//11.DxLiveTime"};
			//String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
			conf.set("cyc", otherArgs[0]);
			// 创建任务
			Job job = new Job(conf, "Dx_LiveTimeData");
			// 打成jar包运行，这句话是关键
			job.setJarByClass(job11.class);
			// 自定义Mapper类和设置map函数输出数据的key和value的类型
			job.setMapperClass(LiveTimeDataMapper.class);
			job.setReducerClass(LiveTimeDataReducer.class);
			 //设置map输出的key类型
			job.setMapOutputKeyClass(Text.class);
			//设置map输出的value类型
			job.setMapOutputValueClass(Text.class);
			//设置输出的key类型
			job.setOutputKeyClass(NullWritable.class);
			//设置输出的value类型
			job.setOutputValueClass(Text.class);
			// 分组函数
			//job.setPartitionerClass(KeyPartitioner.class);
			// 分组函数
			//job.setGroupingComparatorClass(KeyGroupingComparator.class);
			//fs
			FileSystem fs = FileSystem.get(new Configuration());
			//结果数据路径
			FileStatus[] status = fs.listStatus(new Path(otherArgs[1]));
			String pathname = "";
			int datacyc = 0;
			//循环结果数据集，取上一周期数据
			for (FileStatus file : status) {
				String pname = file.getPath().toString();
				// 日期8位数字，并且不等于当前执行周期
				if (pname.matches(".*\\/(\\d{8})$") && !pname.endsWith(otherArgs[2])) {
					String[] str = pname.split("\\/");
					//找小于当前周期的最大值周期
					if (Integer.parseInt(str[str.length - 1]) > datacyc && 
						Integer.parseInt(str[str.length - 1]) < Integer.parseInt(otherArgs[2])) {
						datacyc = Integer.parseInt(str[str.length - 1]);
						pathname = pname;
					}
				}
			}
			//加载上一周其统计数据
			if (!"".equals(pathname)) {
				FileInputFormat.addInputPath(job, new Path(pathname + "/11.DxLiveTime"));
			}
			//输入输出路径
			FileInputFormat.addInputPath(job, new Path(otherArgs[3]));
			FileOutputFormat.setOutputPath(job, new Path(otherArgs[4]));
			//提交作业 判断退出条件（0正常退出，1非正常退出）
			System.exit(job.waitForCompletion(true) ? 0 : 1);
		} catch (Exception e) {
			e.printStackTrace();
		}
	}
	public static class LiveTimeDataMapper extends Mapper<LongWritable, Text, Text, Text>{
		private Text okey = new Text();
		private Text ovalue = new Text();
		private final static IntWritable one = new IntWritable(1);
		
		@Override
		public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
			String[] str = value.toString().split(TProperties.getValue("fileoutsplit"));
			FileSplit filesplit = (FileSplit) context.getInputSplit();
			String path = filesplit.getPath().toString();
			System.out.println("path:" + path);
//			System.out.println("str[0]" + str[0]);
//			System.out.println("str[1]" + str[1]);
//			System.out.println("str[2]" + str[2]);
//			System.out.println("str[3]" + str[3]);
//			System.out.println("str[4]" + str[4]);
			//前一周期统计
			if (path.indexOf("DxLiveTime") >= 0) {
				okey = new Text(str[0]);
				//增加数据：存活天数，加入天数，连续未访问天数，是否死亡（生存，死亡，无效），特征值（k:v）
				ovalue = new Text("0" + TProperties.getValue("outfilesplit") + 
							str[1] + TProperties.getValue("outfilesplit") + 
							str[2] + TProperties.getValue("outfilesplit") + 
							str[3] + TProperties.getValue("outfilesplit") + 
							str[4] + TProperties.getValue("outfilesplit") + 
							str[5]
						);
				context.write(okey, ovalue);
			} else {
				//当前周期用户产品统计
				okey = new Text(str[0]);
				//数据标识区分两类数据， 特征值
				ovalue = new Text("1" + TProperties.getValue("outfilesplit") + str[1]);
				
				context.write(okey, ovalue);
			}
		}
	}
	
	public static class LiveTimeDataReducer extends Reducer<Text, Text, NullWritable, Text> {
		private int cyc;
		
		@Override
		protected void setup(Reducer<Text, Text, NullWritable, Text>.Context context) 
				throws IOException, InterruptedException {
			cyc = Integer.parseInt(context.getConfiguration().get("cyc"));
		}
		
		private Text result = new Text();
		
		public void reduce(Text key, Iterable<Text> values, Context context) 
				throws IOException, InterruptedException {
			int liveDays = 0;
			int joinDays = 0;
			int noVisitDays = 0;
			// 0存活,1死亡,2无效
			int flag = 2;
			// 判断标识
			int doflag = 0;
			// 特征值
			Map<String, String> map = new HashMap();
			// 特征值临时存储字串
			String temp = "";
			// 上一周期数据状态
			String liveStr = "";
			for (Text val : values) {
				String[] str = val.toString().split(TProperties.getValue("fileoutsplit"));
				//0上一周期，1当前周期
				if ("0".equals(str[0])) {
					// 取上一周期存活状态
					flag = Integer.parseInt(str[4]);
					// 执行标识++
					doflag = doflag + 1;
					// 上一周期数据
					liveStr = val.toString();
					// 特征值
					map.put("0", str[5]);
				} else if ("1".equals(str[0])) {
					// 执行标识+2
					doflag = doflag + 2;
					map.put("1", str[1]);
				}
			}
			// 获取上一周其的存活数据
			String[] res = liveStr.split(TProperties.getValue("fileoutsplit"));
			// 根据执行标识：1只有上周期，2只有当前周期，3都有
			switch (doflag) {
			case 1:
				temp = map.get("0");
				if (flag == 1) {
					// 死亡的不变
					liveDays = Integer.parseInt(res[1]);
					joinDays = Integer.parseInt(res[2]);
					noVisitDays = Integer.parseInt(res[3]);
					flag = 1;
				} else {
					// 没死亡
					joinDays = Integer.parseInt(res[2]) +1;
					noVisitDays = Integer.parseInt(res[3]) +1;
					//小于阈值
					if (joinDays < cyc) {
						liveDays = Integer.parseInt(res[1]) + 1;
						flag = 2;
					} else {
						//大于阈值 判断连续未访问天数
						if (noVisitDays >= cyc) {
							//死亡
							liveDays = Integer.parseInt(res[1]);
							flag = 1;
						} else {
							//存活
							liveDays = Integer.parseInt(res[1]) + 1;
							flag = 0;
						}
					}
				}
				break;
			case 2:
				temp = map.get("1");
				//新增用户
				liveDays = 1;
				joinDays = 1;
				noVisitDays = 0;
				flag = 2;
				break;
			case 3:
				temp = map.get("1");
				if (flag == 1) {
					//死亡的人不再变化
					liveDays = Integer.parseInt(res[1]);
					joinDays = Integer.parseInt(res[2]);
					noVisitDays = Integer.parseInt(res[3]);
					flag = 1;
				} else {
					// 未死亡，数据天数增加
					liveDays = Integer.parseInt(res[1]) + 1;
					joinDays = Integer.parseInt(res[2]) + 1;
					noVisitDays = 0;
					// 判断加入时间
					if (joinDays < cyc) {
						flag = 2;
					} else {
						flag = 0;
					}
				}
				break;
			}
			
			StringBuffer sb = new StringBuffer();
			sb.append(key.toString()).append(TProperties.getValue("outfilesplit"))
					.append(liveDays).append(TProperties.getValue("outfilesplit"))
					.append(joinDays).append(TProperties.getValue("outfilesplit"))
					.append(noVisitDays).append(TProperties.getValue("outfilesplit"))
					.append(flag).append(TProperties.getValue("outfilesplit"))
					.append(temp);
			
			result = new Text(sb.toString());
			context.write(NullWritable.get(), result);
		}
	}
}
