package etllog;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;

/**
 * @program: WCount
 * @description: 日志清晰Mapper阶段
 * @author: Qiang.Ye
 * @create: 2019-05-22 08:00
 **/
public class LogMapper extends Mapper<LongWritable, Text,Text, NullWritable> {

    Text k = new Text();

    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        //所有数据值
        String line = value.toString();

        //2.解析日志
        //context 联系上下文 ，功能：计数器
        boolean res = parseLog(line, context);

        //3.如果数据长度小于11的话，就返回
        if(!res){
            System.out.println("长度不够");
            return;
        }

        k.set(line);

        //4.联系上下文
        context.write(k,NullWritable.get());

    }

    /** 
    * @Description: 解析日志
     * 日志长度大于11的话，就加入计数
     *
     *
    * @Param: [line, context] 
    * @return: boolean 
    * @Author: Qiang.Ye
    * @Date: 2019/5/22 
    */ 
    private boolean parseLog(String line, Context context) {
        //1.截取
        //60.208.6.156 - - [18/Sep/2013:06:49:48 +0000] "GET /wp-content/uploads/2013/07/rcassandra.png HTTP/1.0" 200 185524 "http://cos.name/category/software/packages/" "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.66 Safari/537.36"
        String[] filds = line.split(" ");
        //2.判断是数据字段长度大于11
        if(filds.length > 11){
            context.getCounter("map","true").increment(1);
            return true;
        }else {
            context.getCounter("map","false").increment(1);
            return  false;
        }

        //2.
        

    }
}
