package cn.lsh.mapper;

import cn.lsh.main.HotCommentDriver;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;

import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.net.URI;
import java.text.NumberFormat;
import java.util.HashMap;
import java.util.Map;
import java.util.StringTokenizer;

public class HotComment3Mapper extends Mapper<Text, Text, Text, Text> {
	private double fileTotal = 0;
	private Map<String, Double> wordFileCount = new HashMap<>();
	private final Text mkey = new Text();
	private final Text mval = new Text();
	private final NumberFormat nf = NumberFormat.getInstance();

	@Override
	protected void setup(Context context) throws IOException, InterruptedException {
		//设置double取5位小数
		nf.setMaximumFractionDigits(5);
		//从各节点服务器本地读取part-r-00003和part-r-00004文件
		URI[] uris = context.getCacheFiles();
		if (uris != null && uris.length > 0) {
			for (URI uri :uris) {
				String file = uri.getPath();
				boolean isFileTotal;
				if (file.endsWith(HotCommentDriver.TOTAL_FILE)) {
					isFileTotal = true;
				} else if (file.endsWith(HotCommentDriver.WORD_COUNT_FILE)) {
					isFileTotal = false;
				} else {
					continue;
				}
				//本地跑需要配置本地文件路径
				file = "G:\\学习\\大数据\\hadoop\\项目\\tf-idf" + file.substring(5);
				BufferedReader reader = new BufferedReader(new FileReader(file));
				String line;
				try {
					if (isFileTotal) {
						line = reader.readLine();
						StringTokenizer st = new StringTokenizer(line, "\t");
						st.nextToken();
						fileTotal = Double.parseDouble(st.nextToken());
					} else {
						while (reader.ready()) {
							line = reader.readLine();
							StringTokenizer st = new StringTokenizer(line, "\t");
							wordFileCount.put(st.nextToken(), Double.parseDouble(st.nextToken()));
						}
					}
				} catch (IOException e) {
					e.printStackTrace();
				} catch (NumberFormatException e) {
					e.printStackTrace();
				} finally {
					reader.close();
				}
			}
		}
	}

	@Override
	protected void map(Text key, Text value, Context context) throws IOException, InterruptedException {
		//输入样本数据
		//好听_10033640	0.14285714285714285

		FileSplit fs = (FileSplit) context.getInputSplit();
		String filename = fs.getPath().getName();
		if (filename.contains(HotCommentDriver.TOTAL_FILE) || filename.contains(HotCommentDriver.WORD_COUNT_FILE)) {
			return;
		}
		double tf = Double.parseDouble(value.toString());
		StringTokenizer st = new StringTokenizer(key.toString(), "_");
		String word = st.nextToken();
		String id = "";
		while (st.hasMoreTokens()) {
			id = st.nextToken();
		}
		Double wfc = wordFileCount.get(word);
		if (wfc == null) {
			wfc = 1.0;
		}
		//计算idf
		double idf = Math.log(fileTotal/wfc);
		double tf_idf= tf * idf;
		mkey.set(id + "_" + nf.format(tf_idf));
		mval.set(word);
		context.write(mkey, mval);
	}
}
