package com.zyh.demo2;

import com.zyh.WordCountJob;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import java.io.IOException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;

public class FilterData extends Configured implements Tool {

    public static void main(String[] args) throws Exception{
        ToolRunner.run(new FilterData(),args);
    }

    @Override
    public int run(String[] strings) throws Exception {
        /**
         * 组装job任务
         */
        //String input = "/mr/demo1/user-pay-2021-9-9.log";
        String input = "/mr/demo1/dataclean.log";
        String output = "/mr/out/demo4/";

        //1 初始化配置
        Configuration conf = new Configuration();//自动加载classpath下的配置文件。
        conf.set("fs.defaultFS","hdfs://192.168.193.10:9000");

        //2 创建job
        Job job = Job.getInstance(conf);
        job.setJarByClass(FilterData.class);

        //3 设置输入
        FileInputFormat.setInputPaths(job,new Path(input));

        //4 设置mapper
        job.setMapperClass(WordCountMapper.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(Text.class);
        //5 设置reducer
        job.setNumReduceTasks(0);
        //6 设置输出路径(输出路径不能存在)
        FileOutputFormat.setOutputPath(job,new Path(output));
        //如果输出路径存在则删除。
        if(FileSystem.get(conf).exists(new Path(output))){
            FileSystem.get(conf).delete(new Path(output),true);
        }


        //7 启动job
        boolean b = job.waitForCompletion(true);
        return b == true ? 0:1;// 成功返回0 ，失败返回1
    }
}

/**
 * LongWritable：偏移量
 * Text：传递整行参数
 * Text：输出后的key为Text类型，字符串类型
 * IntWritable：输出结果value的类型
 */
class WordCountMapper extends Mapper<LongWritable,Text, Text, IntWritable> {
    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        String[] split = value.toString().split("\\s+");

        if(check(split)){
           context.write(value,null);
           context.getCounter("g1","成功").increment(1L);
        }else {
            context.getCounter("g1","失败").increment(1L);
        }
        context.getCounter("g1","总共").increment(1L);
    }
    private boolean check(String[] strings){
        if(strings.length==9){
            for (String string : strings) {
                if("null".equals(string)){
                    return false;
                }
            }
            return true;
        }else {
            return false;
        }
    }
}

