package com.allen.mapreduce.wordcount;

import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.*;

import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.map.InverseMapper;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;

public  class WordCountMapper extends Mapper<LongWritable, Text, Text, IntWritable> {

    String regex = "[.,\"!--;:?'\\]]"; //remove all punctuation
    Text word = new Text();
    final static IntWritable one = new IntWritable(1);
    HashSet<String> stopWordSet = new HashSet<String>();

    /**
     * 将停词从文件读到hashSet中
     * */
    private void parseStopWordFile(String path){
        try {
            String word = null;
            BufferedReader reader = new BufferedReader(new FileReader(path));
            while((word = reader.readLine()) != null){
                stopWordSet.add(word);
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    /**
     * 完毕map初始化工作
     * 主要是读取停词文件
     * */
    public void setup(Context context) {

        Path[] patternsFiles = new Path[0];
        try {
            patternsFiles = DistributedCache.getLocalCacheFiles(context.getConfiguration());
        } catch (IOException e) {
            e.printStackTrace();
        }
        if(patternsFiles == null){
            System.out.println("have no stopfile\n");
            return;
        }

        //read stop-words into HashSet
        for (Path patternsFile : patternsFiles) {
            parseStopWordFile(patternsFile.toString());
        }
    }

    /**
     *  map
     * */
    public void map(LongWritable key, Text value, Context context)
            throws IOException, InterruptedException {

        String s = null;
        String line = value.toString().toLowerCase();
        line = line.replaceAll(regex, " "); //remove all punctuation

        //split all words of line
        StringTokenizer tokenizer = new StringTokenizer(line);
        while (tokenizer.hasMoreTokens()) {
            s = tokenizer.nextToken();
            if(!stopWordSet.contains(s)){
                word.set(s);
                context.write(word, one);
            }
        }
    }
}
