package TFIDF;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.*;
import java.net.URI;
import java.util.HashMap;
import java.util.Map;

public class MapTest03 extends Mapper<LongWritable, Text, NullWritable, ResultData> {
    ResultData k = new ResultData();
    //存储总的评论数
    Map<String, Double> countMap = new HashMap<String, Double>();
    //存储TF
    Map<String, Double> tfcount = new HashMap<String, Double>();

    @Override
    protected void setup(Context context) throws IOException, InterruptedException {
        URI[] uris = context.getCacheFiles();
        //总评论数
        BufferedReader br1 = new BufferedReader(new InputStreamReader(new FileInputStream(new File(uris[0]))));
        String line1 = null;
        while ((line1 = br1.readLine()) != null) {
            String datas[] = line1.split("\t");
            countMap.put(datas[0], Double.parseDouble(datas[1]));
        }
        BufferedReader br2 = new BufferedReader(new InputStreamReader(new FileInputStream(new File(uris[1]))));
        String line2 = null;
        while ((line2 = br2.readLine()) != null) {
            String datas[] = line2.split("\t");
            tfcount.put(datas[0], Double.parseDouble(datas[1]));
        }
    }

    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        double idf;
        double tf;
        String[] datas = value.toString().split("\t");
       // idf = countMap.get("count") / Double.parseDouble(datas[1]);
        idf = Math.log10(countMap.get("count") / (Double.parseDouble(datas[1]) + 1));
        tf = tfcount.get(datas[0]);
        k.set(datas[0].split("_")[0], datas[0].split("_")[1], tf, idf, tf * idf);
        context.write(NullWritable.get(), k);
    }
}
