package com.bigdata.bookinfo.mapper;

import org.ansj.domain.Result;
import org.ansj.splitWord.analysis.BaseAnalysis;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;

public class TitleKeyWordMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        String[] line = value.toString().split("\t");
        String title = line[0];
        Result analysisedResult = BaseAnalysis.parse(title);
        String[] keywords = analysisedResult.toString().split(",");
        for (String keyword : keywords) {

            if (keyword.equals("版/n")
            ||keyword.equals("年级/n")
            ||keyword.equals("书/n")
            ||keyword.equals("教材/n")
            ||keyword.equals("题/n")
            ||keyword.equals("上册/n")
            ||keyword.equals("中册/n")
            ||keyword.equals("下册/n")
            ||keyword.equals("人/n")){
                continue;
            }

            if (keyword.contains("/n")){
                String replace = keyword.replace("/n", "");
                context.write(new Text(replace), new IntWritable(1));
            }
        }
    }
}
