package com.huang.etl.mr.ald;
import com.huang.common.EventLogConstants;
import com.huang.common.GlobalConstants;
import com.huang.util.TimeUtil;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Logger;

import java.io.IOException;

/**
 * describe: TODO 编写mapreduce的runner类
 * @creat_user: c_huangzhijun
 * creat_date: 2018/2/8
 * creat_time: 15:35
 **/
public class AnalyserLogDataRunner implements Tool {
    private static final Logger logger = Logger.getLogger(AnalyserLogDataRunner.class);
    private Configuration conf = null;

    public static void main(String[] args) {
        try {
            ToolRunner.run(new Configuration(),new AnalyserLogDataRunner(),args);
        } catch (Exception e) {
            logger.error("执行日志解析job异常", e);
            throw new RuntimeException(e);
        }
    }
    @Override
    public int run(String[] args) throws Exception {
        Configuration conf = this.getConf();
        this.processArgs(conf,args);
        Job job = Job.getInstance(conf,"analyser_logdata");
        job.setJarByClass(AnalyserLogDataRunner.class);
        job.setMapperClass(AnalyserLogDataMapper.class);
        job.setMapOutputKeyClass(NullWritable.class);
        job.setMapOutputValueClass(Put.class);

        TableMapReduceUtil.initTableReducerJob(EventLogConstants.HBASE_NAME_EVENT_LOGS,null,job);
        job.setNumReduceTasks(0);
        //设置输入路径
        this.setJobInputPaths(job);
        return job.waitForCompletion(true)?0:-1;
    }

    @Override
    public void setConf(Configuration conf) {
        this.conf = HBaseConfiguration.create(conf);
//        this.conf = HuaweiApprove.getConfig();
    }
    @Override
    public Configuration getConf() {
        return this.conf;
    }

    /**
     * 处理参数
     * @param conf
     * @param args
     */
    private void processArgs(Configuration conf,String[] args){
        String date = null;
        for(int i=0;i<args.length;i++){
            if("-d".equals(args[i])){
                if(i+1<args.length){
                    date = args[++i];
                    break;
                }
            }
        }
        logger.info("date:" + date);
        //要求date格式为：yyyy-MM-dd
        if(StringUtils.isBlank(date) || !TimeUtil.isValidateRunningDate(date)){
            //date是一个无效的时间数据
            date = TimeUtil.getYesterday();//默认时间是昨天
        }
        conf.set(GlobalConstants.RUNNING_DATE_PARAMES,date);
    }
    private void setJobInputPaths(Job job){
        Configuration conf = job.getConfiguration();
        FileSystem fs = null;
        try {
            fs = FileSystem.get(conf);
            String date = conf.get(GlobalConstants.RUNNING_DATE_PARAMES);
            logger.info("setJobInputPaths中的date:" + date);
//            Path inputPath = new Path("/logs" + TimeUtil.parseLong2String(TimeUtil.parseString2Long(date),"MM/dd/"));
            Path inputPath = new Path("/cpic/bigdata/huang/" + TimeUtil.parseLong2String(TimeUtil.parseString2Long(date),"MM/dd/"));
//            Path inputPath = new Path(GlobalConstants.HDFS_LOG_PATH + TimeUtil.parseLong2String(TimeUtil.parseString2Long(date),"MM/dd/"));
//            Path inputPath = new Path(GlobalConstants.HDFS_LOG_PATH);
            if(fs.exists(inputPath)){
                FileInputFormat.addInputPath(job,inputPath);
            }else {
                throw new RuntimeException("文件不存在" + inputPath);
            }
        } catch (IOException e) {
            throw new RuntimeException("设置job的mapreduce输入路径出现异常", e);
        }finally {
            if(fs!=null){
                try {
                    fs.close();
                } catch (IOException e) {
                }
            }
        }
    }

}
