package com.company.MovieAnalyse.analyse1;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
/*
  四个泛型解释:
    KEYIN:  K2类型
    VALULEIN: V2类型

    KEYOUT: K3类型
    VALUEOUT:V3类型
 */

public class WordCountReducer extends Reducer<Text,Text,Text,Text>{
    @Override
    protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
        /**
         * 影片
         */
        for (Text value : values) {
            String[] split = value.toString().split("\t");
            String word = split[2];
            String introduce = split[6];
            //影片名字
            if (!split[2].equals(null)){
                context.write(key,new Text(word+"_____"+introduce));
            }

        }
    }
}

