package TFIDF;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Partitioner;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.log4j.BasicConfigurator;
import org.wltea.analyzer.core.IKSegmenter;
import org.wltea.analyzer.core.Lexeme;

import java.io.*;
import java.net.URI;
import java.util.HashMap;
import java.util.Map;

public class MRDriver {
    /**
     * 接收到的数据
     * 肖申克的救赎   希望让人自由
     * 输出  <count,1>    <希望_肖申克的救赎,1>
     */
    public static class MapTest01 extends Mapper<LongWritable, Text, Text, DoubleWritable> {
        DoubleWritable v = new DoubleWritable(1);

        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String datas[] = value.toString().split(",");
            if ("none".equalsIgnoreCase(datas[5])) return;
            StringReader sr = new StringReader(datas[5]);
            StringReader sr1 = new StringReader(datas[5]);
            //统计每个分词的数量
            int count = 0;
            IKSegmenter ik1 = new IKSegmenter(sr1, true);
            Lexeme l = null;
            while ((l = ik1.next()) != null) {
                count++;
            }
            //将评论进行分词
            IKSegmenter ik = new IKSegmenter(sr, true);
            Lexeme lex = null;
            while ((lex = ik.next()) != null) {
                //输出k是分词加上电影名，v是1
                context.write(new Text(lex.getLexemeText() + "_" + datas[1] + "_" + count), v);
            }
            context.write(new Text("count"), v);
        }
    }

    /**
     * 接收<肖申克的救赎_希望,TF>
     * 输出<肖申克的救赎_希望,1>
     */
    public static class MapTest02 extends Mapper<LongWritable, Text, Text, IntWritable> {
        IntWritable v = new IntWritable(1);
        Text k = new Text();

        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String[] datas = value.toString().split("\t");
            k.set(datas[0]);
            context.write(k, v);
        }
    }

    public static class MapTest03 extends Mapper<LongWritable, Text, NullWritable, ResultData> {
        ResultData k = new ResultData();
        //存储总的评论数
        Map<String, Double> countMap = new HashMap<String, Double>();
        //存储TF
        Map<String, Double> tfcount = new HashMap<String, Double>();

        @Override
        protected void setup(Context context) throws IOException, InterruptedException {
            URI[] uris = context.getCacheFiles();
            //总评论数
            FileSystem fileSystem1 = FileSystem.get(uris[0],context.getConfiguration());
            InputStream inputStream1 = fileSystem1.open(new Path(uris[0]));
            BufferedReader br1 = new BufferedReader(new InputStreamReader(inputStream1));
            String line1 = null;
            while ((line1 = br1.readLine()) != null) {
                String datas[] = line1.split("\t");
                countMap.put(datas[0], Double.parseDouble(datas[1]));
            }
            FileSystem fileSystem2 = FileSystem.get(uris[1],context.getConfiguration());
            InputStream inputStream2 = fileSystem2.open(new Path(uris[1]));
            BufferedReader br2 = new BufferedReader(new InputStreamReader(inputStream2));
            String line2 = null;
            while ((line2 = br2.readLine()) != null) {
                String datas[] = line2.split("\t");
                tfcount.put(datas[0], Double.parseDouble(datas[1]));
            }
        }

        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            double idf;
            double tf;
            String[] datas = value.toString().split("\t");
            // idf = countMap.get("count") / Double.parseDouble(datas[1]);
            idf = Math.log10(countMap.get("count") / (Double.parseDouble(datas[1]) + 1));
            tf = tfcount.get(datas[0]);
            k.set(datas[0].split("_")[0], datas[0].split("_")[1], tf, idf, tf * idf);
            context.write(NullWritable.get(), k);
        }
    }

    public static class MyGroup extends WritableComparator {
        @Override
        public int compare(WritableComparable a, WritableComparable b) {
            Text t1 = (Text) a;
            Text t2 = (Text) b;
            return t1.toString().split("_")[1].compareTo(t2.toString().split("_")[1]);
        }

        public MyGroup() {
            super(Text.class, true);
        }
    }

    public static class MyPartition extends Partitioner<Text, DoubleWritable> {
        @Override
        public int getPartition(Text text, DoubleWritable intWritable, int numPartitions) {
            if ("count".equals(text.toString())) {
                return 0;
            }
            return 1;
        }
    }

    /**
     * 接受的数据  <count,1>         <希望_肖申克的救赎,1>
     * 输出      <count,sum>         <希望_肖申克的救赎,TF>
     */
    public static class RedTest01 extends Reducer<Text, DoubleWritable, Text, DoubleWritable> {
        Text k = new Text();
        DoubleWritable v = new DoubleWritable();

        @Override
        protected void reduce(Text key, Iterable<DoubleWritable> values, Context context) throws IOException, InterruptedException {
            double sum = 0;
            //求出总评论数，或者求出一个词在一个评论里的次数
            for (DoubleWritable value : values) {
                sum += value.get();
            }
            if ("count".equals(key.toString())) {
                v.set(sum);
                //输出总评论数
                context.write(key, v);
            } else {
                String[] datas = key.toString().split("_");
                double commonLen = Double.parseDouble(datas[2]);
                v.set(sum / commonLen);
                k.set(datas[1] + "_" + datas[0]);
                context.write(k, v);
            }
        }
    }

    public static class RedTest02 extends Reducer<Text, IntWritable, Text, IntWritable> {
        IntWritable v = new IntWritable();

        @Override
        protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
            int sum = 0;
            for (IntWritable value : values) {
                sum += value.get();
            }
            v.set(sum);
            context.write(key, v);
        }
    }

    public static class RedTest03 extends Reducer<NullWritable, ResultData, ResultData, NullWritable> {
        @Override
        protected void reduce(NullWritable key, Iterable<ResultData> values, Context context) throws IOException, InterruptedException {
            for (ResultData v : values) {
                context.write(v, NullWritable.get());
            }
        }
    }

    public static class ResultData implements Writable {
        private String FilmName;
        private String word;
        private double TF;
        private double IDF;
        private double TF_IDF;

        @Override
        public void write(DataOutput dataOutput) throws IOException {
            dataOutput.writeUTF(FilmName);
            dataOutput.writeUTF(word);
            dataOutput.writeDouble(TF);
            dataOutput.writeDouble(IDF);
            dataOutput.writeDouble(TF_IDF);
        }

        @Override
        public void readFields(DataInput dataInput) throws IOException {
            FilmName = dataInput.readUTF();
            word = dataInput.readUTF();
            TF = dataInput.readDouble();
            IDF = dataInput.readDouble();
            TF_IDF = dataInput.readDouble();
        }

        public void set(String filmName, String word, double TF, double IDF, double TF_IDF) {
            FilmName = filmName;
            this.word = word;
            this.TF = TF;
            this.IDF = IDF;
            this.TF_IDF = TF_IDF;
        }

        public String getFilmName() {
            return FilmName;
        }

        public void setFilmName(String filmName) {
            FilmName = filmName;
        }

        public String getWord() {
            return word;
        }

        public void setWord(String word) {
            this.word = word;
        }

        public double getTF() {
            return TF;
        }

        public void setTF(double TF) {
            this.TF = TF;
        }

        public double getIDF() {
            return IDF;
        }

        public void setIDF(double IDF) {
            this.IDF = IDF;
        }

        public double getTF_IDF() {
            return TF_IDF;
        }

        public void setTF_IDF(double TF_IDF) {
            this.TF_IDF = TF_IDF;
        }

        @Override
        public String toString() {
            return
                    FilmName + '\t' + word + '\t' + +TF + '\t' + IDF + '\t' + TF_IDF;
        }
    }

    public static class receiveData implements Writable {
        private String FilmName;
        private String common;

        @Override
        public void write(DataOutput dataOutput) throws IOException {
            dataOutput.writeUTF(FilmName);
            dataOutput.writeUTF(common);
        }

        @Override
        public void readFields(DataInput dataInput) throws IOException {
            FilmName = dataInput.readUTF();
            common = dataInput.readUTF();
        }


        @Override
        public String toString() {
            return FilmName + "\t" + common;
        }

        public void set(String filmName, String common) {
            FilmName = filmName;
            this.common = common;
        }

        public String getFilmName() {
            return FilmName;
        }

        public void setFilmName(String filmName) {
            FilmName = filmName;
        }

        public String getCommon() {
            return common;
        }

        public void setCommon(String common) {
            this.common = common;
        }
    }


    public static void main(String[] args) throws Exception {
        BasicConfigurator.configure();
        Configuration conf = new Configuration();
        conf.set("fs.default", "hdfs://192.168.0.155:9000/");
        Job job = Job.getInstance(conf);
        job.setJarByClass(DriTest.class);

        job.setMapperClass(MapTest01.class);
        job.setReducerClass(RedTest01.class);

        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(DoubleWritable.class);

        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(DoubleWritable.class);

        job.setPartitionerClass(MyPartition.class);
        job.setNumReduceTasks(2);
        FileInputFormat.setInputPaths(job, "/TFIDF/input");
        FileOutputFormat.setOutputPath(job, new Path("/TFIDF/output1"));
        boolean b1 = job.waitForCompletion(true);
        if (b1) {
            Configuration conf2 = new Configuration();
            conf2.set("fs.default", "hdfs://192.168.0.155:9000/");
            Job job2 = Job.getInstance(conf2);
            job2.setJarByClass(DriTest.class);

            job2.setMapperClass(MapTest02.class);
            job2.setReducerClass(RedTest02.class);

            job2.setMapOutputKeyClass(Text.class);
            job2.setMapOutputValueClass(IntWritable.class);

            job2.setOutputKeyClass(Text.class);
            job2.setOutputValueClass(IntWritable.class);

            job2.setGroupingComparatorClass(MyGroup.class);

            FileInputFormat.setInputPaths(job2, "/TFIDF/output1/part-r-00001");
            FileOutputFormat.setOutputPath(job2, new Path("/TFIDF/output2"));
            boolean b2 = job2.waitForCompletion(true);
            if (b2) {
                Configuration conf3 = new Configuration();
                conf3.set("fs.default", "hdfs://192.168.0.155:9000/");
                Job job3 = Job.getInstance(conf3);
                job3.setJarByClass(DriTest.class);

                job3.setMapperClass(MapTest03.class);
                job3.setReducerClass(RedTest03.class);

                job3.setMapOutputKeyClass(NullWritable.class);
                job3.setMapOutputValueClass(ResultData.class);

                job3.setOutputKeyClass(ResultData.class);
                job3.setOutputValueClass(NullWritable.class);

                URI uri[] = new URI[2];
                uri[0] = new URI("hdfs://192.168.0.155:9000/TFIDF/output1/part-r-00000");
                uri[1] = new URI("hdfs://192.168.0.155:9000/TFIDF/output1/part-r-00001");
                job3.setCacheFiles(uri);
                FileInputFormat.setInputPaths(job3, "/TFIDF/output2/part-r-00000");
                FileOutputFormat.setOutputPath(job3, new Path("/TFIDF/output3"));
                System.exit(job3.waitForCompletion(true) ? 0 : 1);
            }
        }
    }
}
