package com.example.hadoop.mapreduce.logenhance;

import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;
import java.util.HashMap;
import java.util.Map;

/**
 * Created with IntelliJ IDEA.
 *
 * @Auther: Brian
 * @Date: 2020/04/28/22:42
 * @Description: 日志增强，将不同的url存放在不同的路径下
 */
public class LogEnhance {


    static class LogEnhanceMapper extends Mapper<LongWritable, Text, Text, NullWritable> {
        Map<String, String> ruleMap = new HashMap<>();
        Text k = new Text();

        //从数据库中获取知识库信息
        @Override
        protected void setup(Context context) throws IOException, InterruptedException {
            try {
                DBLoader.dbLoader(ruleMap);
            } catch (Exception e) {
                e.printStackTrace();
            }
        }

        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            Counter counter = context.getCounter("malformed", "malformedline");
            String line = value.toString();
            String[] splits = StringUtils.split(line, "\t");
            try {
                String url = splits[26];
                String content_tag = ruleMap.get(url);
                if (content_tag != null) {
                    //知识库有，则增强（加上一些额外的标签）
                    k.set(line + "\t" + content_tag + "\n");
                    context.write(k, NullWritable.get());
                } else {
                    //不在知识库，设置flag，需要爬
                    k.set(url + "\ttocrawl");
                    context.write(k, NullWritable.get());
                }
            } catch (Exception e) {
                counter.increment(1);
            }
        }
    }

    public static void main(String[] args) throws Exception {
        //通过命令行输入参数
        if (null == args || args.length == 0) {
            args = new String[2];
            args[0] = "hdfs://shizhan:9000/logenhance/input";
            args[1] = "hdfs://shizhan:9000/logenhance/output";
        }
        //加载默认配置信息
        Configuration conf = new Configuration();

        //生成一个Job
        Job job = Job.getInstance(conf);
        //当程序运行时，通过类加载器获取类的所在路径
        job.setJarByClass(LogEnhance.class);
        job.setMapperClass(LogEnhanceMapper.class);

        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(NullWritable.class);

        job.setNumReduceTasks(0);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(NullWritable.class);

        job.setOutputFormatClass(LogEnhanceOutputFormat.class);
        FileInputFormat.setInputPaths(job, new Path(args[0]));
        Path outputPath = new Path(args[1]);
        FileSystem fileSystem = FileSystem.get(conf);
        if (fileSystem.exists(outputPath)) {
            fileSystem.delete(outputPath, true);
        }
        FileOutputFormat.setOutputPath(job, outputPath);

        //将job中配置的相关参数，以及job所用的java类所在的jar包，提交给yarn去运行
        /*job.submit();*/
        boolean successfully = job.waitForCompletion(true);
        System.out.println("Successfully? -->" + successfully);
        System.exit(successfully ? 0 : 1);
    }

}
