package com.knight.hadoop.day12.zuoye;

import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;

import org.apache.commons.beanutils.BeanUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import com.knight.hadoop.day10.preprocess.WebLogObject;

/**
 * 第一步，规整日志文件。将ip相同的放到一起,并按照访问时间排序，并将sessionId添加上去。
 * 添加规则，如果是同一个ip且两次访问的间隔时间小于30分钟就为同一个session
 * 
 * @author
 *
 */
public class LogParse {

	static NullWritable v = NullWritable.get();

	static class LogParseMapper extends Mapper<LongWritable, Text, LogBean, LogBean> {
		static Text k = new Text();

		@Override
		protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {

			String line = value.toString();

			LogBean logBean = LogBeanParse.parse(line);

			if (logBean.isValid()) {
				context.write(logBean, logBean);
			}
		}
	}

	static class LogParseReducer extends Reducer<LogBean, LogBean, Text, NullWritable> {

		static Text k = new Text();
		static NullWritable v = NullWritable.get();

		@Override
		protected void reduce(LogBean key, Iterable<LogBean> values, Context context)
				throws IOException, InterruptedException {

			/*
			 * List<LogBean> listBean = new ArrayList<>(); for (LogBean bean :
			 * values) { LogBean webLogBean = new LogBean(); try {
			 * BeanUtils.copyProperties(webLogBean, bean); } catch
			 * (IllegalAccessException e) { e.printStackTrace(); } catch
			 * (InvocationTargetException e) { e.printStackTrace(); }
			 * listBean.add(webLogBean); }
			 * 
			 * String sessionId = UUID.randomUUID().toString();
			 * 
			 * for (LogBean bean : listBean) { k.set(bean.toString());
			 * context.write(k, v); }
			 */

			List<LogBean2> listBean = new ArrayList<>();
			for (LogBean bean : values) {
				LogBean2 logBean2 = new LogBean2();
				logBean2.set(bean);
				listBean.add(logBean2);
			}

			String sessionId = UUID.randomUUID().toString();

			if (listBean != null && listBean.size() > 0) {
				if (listBean.size() == 1) {
					LogBean2 logBean2 = listBean.get(0);
					k.set(logBean2.getDate() + "\001" + logBean2.getIp() + "\001" + logBean2.getRemoteUser() + "\001"
							+ sessionId + "\001" + logBean2.getRequestURL() + "\001" + logBean2.getReferal());
					context.write(k, v);
				} else {
					for (int i = 0; i < listBean.size(); i++) {
						if (i == 0) {
							// 第一个不输出
							continue;
						}
						LogBean2 currentBean = listBean.get(i);
						LogBean2 preBean = listBean.get(i - 1);

						// 比较访问间隔时间，如果大于等于30分钟返回true
						boolean result = Utils.compareDate(currentBean.getDate(), preBean.getDate());

						k.set(preBean.getDate() + "\001" + preBean.getIp() + "\001" + preBean.getRemoteUser() + "\001"
								+ sessionId + "\001" + preBean.getRequestURL() + "\001" + preBean.getReferal() + "\001"
								+ preBean.getStatus() + "\001" + preBean.getBodyBytesSend() + "\001"
								+ preBean.getUserAgent());

						if (result) {
							sessionId = UUID.randomUUID().toString();
						}
						context.write(k, v);

						if (i == listBean.size() - 1) {
							// 最后一个
							k.set(currentBean.getDate() + "\001" + currentBean.getIp() + "\001"
									+ currentBean.getRemoteUser() + "\001" + sessionId + "\001"
									+ currentBean.getRequestURL() + "\001" + currentBean.getReferal() + "\001"
									+ currentBean.getStatus() + "\001" + currentBean.getBodyBytesSend() + "\001"
									+ currentBean.getUserAgent());
							context.write(k, v);
						}
					}
				}
			}
		}
	}

	public static void main(String[] args) throws Exception {
		Configuration conf = new Configuration();
		Job job = Job.getInstance(conf);
		job.setJarByClass(WebLogObject.class);
		job.setMapperClass(LogParseMapper.class);
		job.setReducerClass(LogParseReducer.class);
		// 设置分组方式
		job.setGroupingComparatorClass(GroupLogComparator.class);

		job.setMapOutputKeyClass(LogBean.class);
		job.setMapOutputValueClass(LogBean.class);

		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(NullWritable.class);

		FileInputFormat.setInputPaths(job, new Path("E:\\home\\webLog\\input"));
		FileOutputFormat.setOutputPath(job, new Path("E:\\home\\webLog\\output1"));
		//FileInputFormat.setInputPaths(job, new Path("C:\\application\\book\\Hadoop\\input\\weblog\\step1"));
		//FileOutputFormat.setOutputPath(job, new Path("C:\\application\\book\\Hadoop\\output\\weblog\\step1"));
		
		boolean res = job.waitForCompletion(true);

		System.exit(res ? 0 : 1);
	}
}
