package com.galeno.htmlcount;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;
/**
 * @author galeno
 * @Title:
 * @Description:
 * @date 2021/7/2711:54
 */
public class HtmlCount {
    static class Index01HtmlMap extends Mapper<LongWritable, Text,Text, IntWritable> {
        String fileNmae=null   ;

        @Override
        protected void setup(Context context) throws IOException, InterruptedException {
            FileSplit inputSplit = (FileSplit)context.getInputSplit();
            fileNmae = inputSplit.getPath().getName();
        }
        Text k=new Text();
        IntWritable v=new IntWritable(1);

        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String line = value.toString();
            String[] words = line.split("\\s+");
            for (String word : words) {
                String res=word+"-"+fileNmae;
                k.set(res);
                context.write(k,v);
            }
        }
    }
    static class Index01Reducer extends Reducer<Text,IntWritable,Text,IntWritable>{
        IntWritable v=new IntWritable();
        @Override
        protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
           int count=0;
            for (IntWritable value : values) {
                count++;
            }
            v.set(count);
            context.write(key,v);
        }
    }
    static  class  Index02Map extends Mapper<LongWritable,Text,Text,Text>{
        Text k=new Text();
        Text v=new Text();
        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String line = value.toString();
            String word =  line.split("-")[0];
            String nameAndcnt = line.split("-")[1];
            String vstr = nameAndcnt.replace("\t","-") ;
            k.set(word) ;
            v.set(vstr) ;
            context.write(k,v);
        }
    }
    static class  Index02Reducer extends Reducer<Text,Text,Text,Text>{
        Text v = new Text() ;

        @Override
        protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
            StringBuilder sb = new StringBuilder();
            for (Text value : values) {
                String str = value.toString();
                String[] split = str.split("-");
                String fileName = split[0] ;
                int cnt = Integer.parseInt(split[1]) ;
                sb.append(value.toString()+" ");
            }
            String str = sb.toString().trim();
            v.set(str);
            context.write(key , v);
        }
    }

    public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
        init1();
        init2();

    }
    static  void init1() throws IOException, InterruptedException, ClassNotFoundException {
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS","file:/");
        Job job = Job.getInstance(conf, "index01");
        job.setMapperClass(Index01HtmlMap.class);
        job.setReducerClass(Index01Reducer.class);

     /*   job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);*/
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);
        //job.setNumReduceTasks(1);
        //处理数据的输入路径设置成文件夹  加载文件夹下所有的文件
        FileInputFormat.setInputPaths(job , new Path("D:\\mrdata\\index\\input"));
        FileOutputFormat.setOutputPath(job,new Path("D:\\mrdata\\index\\index_out01"));
        job.waitForCompletion(true) ;

    }
    static  void  init2() throws IOException, InterruptedException, ClassNotFoundException {
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS","file:/");
        Job job = Job.getInstance(conf, "index01");
        job.setMapperClass(Index02Map.class);
        job.setReducerClass(Index02Reducer.class);

     /*   job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);*/
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);
        //job.setNumReduceTasks(1);
        //处理数据的输入路径设置成文件夹  加载文件夹下所有的文件
        FileInputFormat.setInputPaths(job , new Path("D:\\mrdata\\index\\index_out01"));
        FileOutputFormat.setOutputPath(job,new Path("D:\\mrdata\\index\\index_res"));
        job.waitForCompletion(true) ;
    }
}
