package mr.flow.weblog.mr;

import java.io.IOException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import java.util.UUID;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import mr.flow.weblog.bean.IpGroupingComparator;
import mr.flow.weblog.bean.WeblogBean;

/**
 * 将清洗过后的数据梳理出点击流pageViews模型数据
 * 输入的数据是经过预处理之后的数据
 * 
 * 区分每一次会话，给每一次会话打上sessionId
 * 梳理出每一次会话所访问的每个页面  (请求时间，url,停留时长，以及该页面在这次session中的序号)
 * 保留http_referer  body_bytes_sent   http_user_agent
 * 
 * @author 汤小萌
 *
 */
public class ClickStream {

	
	static class ClickStreamMapper extends Mapper<LongWritable, Text, WeblogBean, Text> {
		WeblogBean k = new WeblogBean();
		Text v = new Text();
		@Override
		protected void map(LongWritable key, Text value, Context context)
				throws IOException, InterruptedException {
			String[] fields = value.toString().split("\001");
			if(fields.length < 9) return;
			k.set("true".equals(fields[0]) ? true : false, fields[1], fields[2], fields[3], fields[4], fields[5], 
					fields[6], fields[7], fields[8]);
			if(k.isValid()) {  // 只有有效果的数据才会进入后续的处理
				context.write(k, v);
			}
			
		}
	}
	
	
	/**
	 * 需要生成的数据：
	 * 
	 *  sessionId  ip  time_local  request  step  http_referer  Http_user_agent  http_user_agent body_bytes_sent  status
	 * 
	 * @author 汤小萌
	 *
	 */
	static class ClickStreamReducer extends Reducer<WeblogBean, Text, Text, NullWritable> {
		
		Text k = new Text();
		NullWritable v = NullWritable.get();
		@Override
		protected void reduce(WeblogBean beanKey, Iterable<Text> values, Context context) 
				throws IOException, InterruptedException {
			
			// System.out.println(beanKey);
			
			System.out.println("---------------");
			
			int step = 1;	// 这个页面在这个session是第几次访问的
			String sessionId = UUID.randomUUID().toString();	// 生成sessionId
			
			
			String lastTimeStr = null;
			String lastSaveStr = null;   // 需要保留上一条记录的后面字符串
			String lastIpAndUser = null; // 需要保留的上一条记录的ip地址和用户属性
			String lastUrl = null;		 // 需要保留的上一条记录的访问的url
			Long stayTime = 0L;		 // 前后两次停留的时间
			
			for(Text value : values) {
				// System.out.println(beanKey);
				
				/*k.set(sessionId+"\001"+beanKey.toString()+"\001"+beanKey.getRemote_user() + "\001" +
						beanKey.getTime_local() + "\001" + beanKey.getRequest() + "\001" + step + "\001" + (60) + 
						"\001" + beanKey.getHttp_referer() + "\001" + beanKey.getHttp_user_agent() + "\001" + 
						beanKey.getBody_bytes_sent() + "\001"
								+ beanKey.getStatus());
				context.write(k, v);*/
				
				if(lastTimeStr != null) {
					try {
						// beanKey又是下一次的记录了  lastTimeStr 保留的是上一条记录的访问时间
						// stayTime = toDate(beanKey.getTime_local()).getTime() - toDate(lastTimeStr).getTime();
						stayTime = timeDiff(beanKey.getTime_local(), lastTimeStr);
					} catch (ParseException e) {
						e.printStackTrace();
					}
					
					if(stayTime < 30*60*1000) {  // 同一个IP访问的时间差  <  30 min 认为是同一个 session
						k.set(sessionId + "\001" + lastIpAndUser + "\001" + lastUrl + "\001" + lastTimeStr + "\001" + 
								step + "\001" + (stayTime/1000) + "\001" + lastSaveStr);
						// 往外写数据了
						context.write(k, v);
						step++;
					} else {   // 同一个IP访问的时间差  > 30min  认为是不同的session   上一条记录的访问时间 是 60
						k.set(sessionId + "\001" + lastIpAndUser + "\001" + lastUrl + "\001" + lastTimeStr + "\001" + 
								step + "\001" + (60) + "\001" + lastSaveStr);
						context.write(k, v);   // 这一系的ip的最后一条数据  在这里是不输出的，  还要继续往下走
						// 输出完上一条之后，重置step编号
						step = 1;
						// session 也要重新生成
						sessionId = UUID.randomUUID().toString();
					}
					
					
				}
				// 初识的设置
				lastTimeStr = beanKey.getTime_local();
				lastSaveStr = beanKey.getHttp_referer() + "\001" + beanKey.getHttp_user_agent() + "\001" + 
						beanKey.getBody_bytes_sent() + "\001" + beanKey.getStatus();
				lastUrl = beanKey.getRequest();
				lastIpAndUser = beanKey.getRemote_addr() + "\001" + beanKey.getRemote_user() ;
			}
			
			// 下面的这条数据是最后一条数据
			k.set(sessionId + "\001" + lastIpAndUser + "\001" + lastUrl + "\001" + lastTimeStr + "\001" + 
					step + "\001" + (60) + "\001" + lastSaveStr);
			context.write(k, v);
			System.out.println("---------------");
			
		}
		
		//  **********************工具方法************************
		private String toStr(Date date) {
			SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss",Locale.US);
			return sdf.format(date);
		}
		
		private Date toDate(String timeStr) throws ParseException {
			SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss",Locale.UK);
			return sdf.parse(timeStr);
		}
		
		// 算时间差
		private long timeDiff(String time1, String time2) throws ParseException {
			Date d1 = toDate(time1);
			Date d2 = toDate(time2);
			return d1.getTime() - d2.getTime();
		}
	}
	
	
	public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
		Configuration conf = new Configuration();
		Job job = Job.getInstance(conf);
		
		job.setJarByClass(ClickStream.class);
		
		job.setMapperClass(ClickStreamMapper.class);
		job.setReducerClass(ClickStreamReducer.class);
		
		job.setMapOutputKeyClass(WeblogBean.class);
		job.setMapOutputValueClass(Text.class);
		
		// out
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(NullWritable.class);
		
		
		job.setGroupingComparatorClass(IpGroupingComparator.class);
		
		FileInputFormat.setInputPaths(job, new Path("f:/weblog_2/output"));
		FileOutputFormat.setOutputPath(job, new Path("f:/weblog_2/pageviews"));
		
		FileSystem fs = FileSystem.get(conf);
		if(fs.exists(new Path("f:/weblog_2/pageviews"))) {
			fs.delete(new Path("f:/weblog_2/pageviews"), true);
		}

		job.waitForCompletion(true);
		
		
		
		
	}

}
