package com.liu.hadoop.clickStreamETL.mapreduce;

import java.io.IOException;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import com.liu.hadoop.clickStreamETL.entity.AccessLogBean;
import com.liu.hadoop.clickStreamETL.utils.CodeFormatter;
import com.liu.hadoop.clickStreamETL.utils.IpParser;

/**
 * @Description: MR的Mapper类，负责对apache日志进行解析
 * @author 刘章程
 */
public class ClickStreamMapper extends Mapper<LongWritable, Text, Text, Text> {
	// 10.19.251.84 - - [07/Dec/2017:09:30:36 +0800] "POST /lzmh_operate_web/o2o/unSettlement/list HTTP/1.1" 302 -
	// 10.19.251.84 - - [07/Dec/2017:09:30:36 +0800] "POST /lzmh_operate_web/o2o/unSettlement/list HTTP/1.1" 302 -
	
	/**
	 * @Fields APACHE_LOG_REGEX : 识别Apache access_log的日正则
	 */
	public static final String APACHE_LOG_REGEX = "^([\\d.]+|-) (\\S+|-) (\\S+|-) \\[([\\w:/]+\\s[+\\-]\\d{4})\\] \"(.+?)\" (\\d{3}|-) (\\d+|-) \"([^\"]+)\" \"([^\"]+)\"";

	/**
	 * @Fields NUM_FIELDS : 正常的access_log日志的fields为9。
	 */
	public static final int NUM_FIELDS = 9;

	/**
	 * @Fields SPLIT_CHAR : 字符拆分格式
	 */
	public static final String SPLIT_CHAR = "\t";

	@Override
	protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, Text>.Context context)
			throws IOException, InterruptedException {

		// 初始化声明变量。
		AccessLogBean logBean = new AccessLogBean("nothing");

		// 默认会带domain前缀,在这里将他替换掉。(根据情况而定)
		String logEntryLine = CodeFormatter.transformTextToUTF8(value, "UTF-8").toString().replace("bbs.moonseo.cn ",
				"");
		
		// 正则匹配
		Pattern pattern = Pattern.compile(APACHE_LOG_REGEX);
		Matcher matcher = pattern.matcher(logEntryLine);
		
		// 判断是否解析成功，且解析出来的字段不与NUM_FIELDS一致
		if (!matcher.matches() || NUM_FIELDS != matcher.groupCount()) {
			System.err.println("正则解析日志失败。");
			System.err.println(logEntryLine);
			return;
		}
		
		// 通过正则，来解析数据
		// ip地址
		logBean.setIpAddress(matcher.group(1));
		// 日期
		logBean.setDateTime(matcher.group(4));
		// 请求url
		logBean.setRequests(matcher.group(5));
		// 响应码
		logBean.setResponseStats(matcher.group(6));
		// 发送的字节数
		logBean.setBytesSent(matcher.group(7));
		// 跳转的url
		if (!matcher.group(8).equals("-")) {
			logBean.setReferer(matcher.group(8));
		}
		// 浏览器类型
		logBean.setBrowser(matcher.group(9));
		
		IpParser ipParser = new IpParser();
		try {
			String[] address = ipParser.parse(logBean.getIpAddress()).split(" ");
			// 对方所在地址
			logBean.setAreaAddress(address[0]);
			// 本地地址
			logBean.setLoaclAddress(address[1]);
		} catch (Exception e) {
			e.printStackTrace();
		}
		
		DateFormat df = new SimpleDateFormat("dd/MMM/yyyy:HH:mm:ss Z", Locale.US);
		try {
			Date date = df.parse(logBean.getDateTime());
			// 获得指定时间(接收数据时间戳)
			logBean.setReceiveTime(Long.toString(date.getTime()));
		} catch (ParseException e) {
			e.printStackTrace();
		}
		
		// key格式化输出（接收数据时间戳）
		String mapOutKey = logBean.getReceiveTime();
		// valeu格式输出
		String mapOutValue = logBean.combineString(SPLIT_CHAR);
		
		context.write(new Text(mapOutKey), new Text(mapOutValue));
	}

}
