package com.hadoop.mr.pagerank;

import org.apache.commons.io.IOUtils;
import org.apache.commons.io.LineIterator;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;
import java.io.OutputStream;
import java.util.Arrays;

/**
 * <p>pageRank计算的主方法</p>
 *
 * @author Andy
 * @date 2017/10/26
 */
public class PageRankMain {

    public static void main(String[] args) throws InterruptedException, IOException, ClassNotFoundException {
        iterate(args[0], args[1]);
    }

    public static int getNumNodes(Configuration conf, Path file) throws IOException {
        FileSystem fs = file.getFileSystem(conf);
        return IOUtils.readLines(fs.open(file), "UTF8").size();
    }

    public static int createInputFile(Configuration conf, Path file, Path targetFile) throws IOException {
        FileSystem fs = file.getFileSystem(conf);

        int numNodes = getNumNodes(conf, file);
        double initialPageRank = 1.0 / numNodes;

        OutputStream os = fs.create(targetFile);

        LineIterator lineIterator= IOUtils.lineIterator(fs.open(file), "UTF8");
        while (lineIterator.hasNext()) {
            String line = lineIterator.nextLine();
            System.out.println("createInputFile is method ==>" + line);

            String[] parts = StringUtils.split(line);
            Node node = new Node().setPageRank(initialPageRank).setAdjacentNodeNames(Arrays.copyOfRange(parts, 1, parts.length));
            IOUtils.write(parts[0] + '\t' + node.toString() + '\n', os);
        }
        os.close();
        return numNodes;
    }

    public static void iterate(String input, String output) throws IOException, InterruptedException, ClassNotFoundException {
        Configuration conf = new Configuration();
        Path outPath = new Path(output);
        outPath.getFileSystem(conf).delete(outPath, true);
        outPath.getFileSystem(conf).mkdirs(outPath);

        Path inputPath = new Path(outPath, "input.txt");

        int numNodes = createInputFile(conf, new Path(input), inputPath);

        int iterate = 0;
        double desiredConvergence = 0.01;
        while (true) {
            Path jobOutputPath = new Path(outPath, String.valueOf(iterate));
            System.out.println("======================================");
            System.out.println("=  Iteration:    " + iterate);
            System.out.println("=  Input path:   " + inputPath);
            System.out.println("=  Output path:  " + jobOutputPath);
            System.out.println("======================================");

            if (calcPageRank(conf, inputPath, jobOutputPath, numNodes) < desiredConvergence) {
                System.out.println("Convergence is below " + desiredConvergence + ", we're done");
                break;
            }
            inputPath = jobOutputPath;
            iterate++;
        }
    }

    public static double calcPageRank(Configuration conf, Path inputPath, Path outputPath, int numNodes) throws IOException, ClassNotFoundException, InterruptedException {
        conf.setInt(PageRankReduce.CONF_NUM_NODES_GRAPH, numNodes);
        Job job = Job.getInstance(conf);

        job.setJarByClass(PageRankMain.class);
        job.setMapperClass(PageRankMap.class);
        job.setReducerClass(PageRankReduce.class);

        job.setInputFormatClass(KeyValueTextInputFormat.class);

        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(Text.class);

        FileInputFormat.setInputPaths(job, inputPath);
        FileOutputFormat.setOutputPath(job, outputPath);

        if (!job.waitForCompletion(true)) {
            throw new RuntimeException("Job failed");
        }

        long summedConvergence = job.getCounters().findCounter(PageRankReduce.Counter.CONV_DELTAS).getValue();
        double convergence = summedConvergence / PageRankReduce.CONVERGENCE_SCALING_FACTOR / numNodes;
        System.out.println("======================================");
        System.out.println("=  Num nodes:           " + numNodes);
        System.out.println("=  Summed convergence:  " + summedConvergence);
        System.out.println("=  Convergence:         " + convergence);
        System.out.println("======================================");
        return convergence;
    }
}
