/*
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package com.bff.gaia.examples.java.graph;

import com.bff.gaia.api.common.functions.*;
import com.bff.gaia.api.common.restartstrategy.RestartStrategies;
import com.bff.gaia.api.common.time.Time;
import com.bff.gaia.api.java.DataSet;
import com.bff.gaia.api.java.ExecutionEnvironment;
import com.bff.gaia.api.java.functions.FunctionAnnotation;
import com.bff.gaia.api.java.operators.IterativeDataSet;
import com.bff.gaia.api.java.tuple.Tuple1;
import com.bff.gaia.api.java.tuple.Tuple2;
import com.bff.gaia.api.java.utils.ParameterTool;
import com.bff.gaia.core.AbstractCompensationFunc;
import com.bff.gaia.core.fs.FileSystem;
import com.bff.gaia.examples.java.graph.util.PageRankCompensationFunc;
import com.bff.gaia.examples.java.graph.util.PageRankData;
import com.bff.gaia.util.Collector;

import java.util.ArrayList;
import java.util.concurrent.TimeUnit;

import static com.bff.gaia.api.java.aggregation.Aggregations.SUM;

/**
 * A basic implementation of the Page Rank algorithm using a bulk iteration.
 *
 * <p>This implementation requires a set of pages and a set of directed links as input and works as follows. <br>
 * In each iteration, the rank of every page is evenly distributed to all pages it points to.
 * Each page collects the partial ranks of all pages that point to it, sums them up, and applies a dampening factor to the sum.
 * The result is the new rank of the page. A new iteration is started with the new ranks of all pages.
 * This implementation terminates after a fixed number of iterations.<br>
 * This is the Wikipedia entry for the <a href="http://en.wikipedia.org/wiki/Page_rank">Page Rank algorithm</a>.
 *
 * <p>Input files are plain text files and must be formatted as follows:
 * <ul>
 * <li>Pages represented as an (long) ID separated by new-line characters.<br>
 * For example <code>"1\n2\n12\n42\n63"</code> gives five pages with IDs 1, 2, 12, 42, and 63.
 * <li>Links are represented as pairs of page IDs which are separated by space
 * characters. Links are separated by new-line characters.<br>
 * For example <code>"1 2\n2 12\n1 12\n42 63"</code> gives four (directed) links (1)-&gt;(2), (2)-&gt;(12), (1)-&gt;(12), and (42)-&gt;(63).<br>
 * For this simple implementation it is required that each page has at least one incoming and one outgoing link (a page can point to itself).
 * </ul>
 *
 * <p>Usage: <code>PageRankBasic --pages &lt;path&gt; --links &lt;path&gt; --output &lt;path&gt; --numPages &lt;n&gt; --iterations &lt;n&gt;</code><br>
 * If no parameters are provided, the program is run with default data from {@link PageRankData} and 10 iterations.
 *
 * <p>This example shows how to use:
 * <ul>
 * <li>Bulk Iterations
 * <li>Default Join
 * <li>Configure user-defined functions using constructor parameters.
 * </ul>
 */
@SuppressWarnings("serial")
public class PageRankWithoutCompensation {

	private static final double DAMPENING_FACTOR = 0.85;
//	private static final double EPSILON = 0.0001;

	// *************************************************************************
	//     PROGRAM
	// *************************************************************************

	public static void main(String[] args) throws Exception {

		ParameterTool params = ParameterTool.fromArgs(args);

		final int numPages = params.getInt("numPages", PageRankData.getNumberOfPages());
		final int maxIterations = params.getInt("iterations", 1000);

		//task2.3_iteration
		final double EPSILON = params.getDouble("EPSILON", 1.0E-20);
		final int killIterations = params.getInt("killIterations", -1); // 在哪一次迭代kill
		final String killNodes = params.get("killNodes", null); // kill的节点名称
		final String compensationDataPath = params.get("compensationDataPath", "hdfs://master:9000/iterate1.0"); // 失败后收集的用于补偿的数据
		//end

		// set up execution environment
		final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

		// make the parameters available to the web ui
		env.getConfig().setGlobalJobParameters(params);

		env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, Time.of(5, TimeUnit.SECONDS)));

		//task2.3_iteration
		env.getConfig().setCompensationKillIterations(killIterations);
		env.getConfig().setCompensationKillNodes(killNodes);
		env.getConfig().enableCompentionFunc();
		env.getConfig().setCompensationDataPath(compensationDataPath);
//		env.getConfig().isEnableDeltaIteration(); // 设置增量迭代
		//end

		env.setParallelism(20);

		// get input data
		DataSet<Long> pagesInput = getPagesDataSet(env, params);
		DataSet<Tuple2<Long, Long>> linksInput = getLinksDataSet(env, params);

		//task2.3_iteration
		final DataSet<Tuple2<Long, Long>> mapPages = (DataSet<Tuple2<Long, Long>>)pagesInput.map(new MapFunction<Long, Tuple2<Long, Long>>() { // (-1, page)
			@Override
			public Tuple2<Long, Long> map(Long value) throws Exception {
				return new Tuple2<Long, Long>(-1L, value);
			}
		});
		//end

		// assign initial rank to pages
		DataSet<Tuple2<Long, Double>> pagesWithRanks = pagesInput.
			map(new RankAssigner((1.0d / numPages)));

		// build adjacency list from link input
		DataSet<Tuple2<Long, Long[]>> adjacencyListInput =
			linksInput.groupBy(0).reduceGroup(new BuildOutgoingEdgeList());
		// set iterative data set
		IterativeDataSet<Tuple2<Long, Double>> iteration = pagesWithRanks.iterate(maxIterations);

		//task2.3_iteration
		DataSet<Tuple2<Long, Double>> newRanks = iteration
			// join pages with outgoing edges and distribute rank
			// 把自己的权值配分给邻居节点
			.leftOuterJoin(adjacencyListInput).where(0).equalTo(0).with(new JoinFunction<Tuple2<Long, Double>, Tuple2<Long, Long[]>, Tuple2<Tuple2<Long, Double>, Tuple2<Long, Long[]>>>() {
				@Override
				public Tuple2<Tuple2<Long, Double>, Tuple2<Long, Long[]>> join(final Tuple2<Long, Double> first, final Tuple2<Long, Long[]> second) throws Exception {
					return new Tuple2<Tuple2<Long, Double>, Tuple2<Long, Long[]>>(first, second);
				}
			}).flatMap(new JoinVertexWithEdgesMatch(numPages)).groupBy(0).aggregate(SUM, 1).leftOuterJoin(mapPages).where(0).equalTo(0).with(new JoinFunction<Tuple2<Long, Double>, Tuple2<Long, Long>, Tuple2<Long, Double>>() {
				@Override
				public Tuple2<Long, Double> join(final Tuple2<Long, Double> first, final Tuple2<Long, Long> second) throws Exception {
					if (second != null) {
						return new Tuple2<Long, Double>(second.f1, first.f1);
					}
					return first;
				}
			})
			// collect and sum ranks
			.groupBy(0).aggregate(SUM, 1)
			// apply dampening factor
			.map(new Dampener(DAMPENING_FACTOR, numPages));
		//end

		DataSet<Tuple2<Long, Double>> finalPageRanks = iteration.closeWith(
			newRanks,
			newRanks.join(iteration).where(0).equalTo(0)
				// termination condition
				.filter(new EpsilonFilter(EPSILON)));

		//task2.3_iteration
		AbstractCompensationFunc compensationFunc = new PageRankCompensationFunc(args);
		String jobName = "Basic Page Rank";
		//end

		// emit result
		if (params.has("output")) {
			finalPageRanks.writeAsCsv(params.get("output"), "\n", ",", FileSystem.WriteMode.OVERWRITE);
			// execute program
//			env.execute("Basic Page Rank Example");
			env.execute(jobName, compensationFunc);
		} else {
			System.out.println("Printing result to stdout. Use --output to specify output path.");
			finalPageRanks.print();
		}
	}

	// *************************************************************************
	//     USER FUNCTIONS
	// *************************************************************************

	/**
	 * A map function that assigns an initial rank to all pages.
	 */
	public static final class RankAssigner implements MapFunction<Long, Tuple2<Long, Double>> {
		Tuple2<Long, Double> outPageWithRank;

		public RankAssigner(double rank) {
			this.outPageWithRank = new Tuple2<Long, Double>(-1L, rank);
		}

		@Override
		public Tuple2<Long, Double> map(Long page) {
			outPageWithRank.f0 = page;
			return outPageWithRank;
		}
	}

	/**
	 * A reduce function that takes a sequence of edges and builds the adjacency list for the vertex where the edges
	 * originate. Run as a pre-processing step.
	 */
	@FunctionAnnotation.ForwardedFields("0")
	public static final class BuildOutgoingEdgeList implements GroupReduceFunction<Tuple2<Long, Long>, Tuple2<Long, Long[]>> {

		private final ArrayList<Long> neighbors = new ArrayList<Long>();

		@Override
		public void reduce(Iterable<Tuple2<Long, Long>> values, Collector<Tuple2<Long, Long[]>> out) {
			neighbors.clear();
			Long id = 0L;

			for (Tuple2<Long, Long> n : values) {
				id = n.f0;
				neighbors.add(n.f1);
			}
			out.collect(new Tuple2<Long, Long[]>(id, neighbors.toArray(new Long[neighbors.size()])));
		}
	}

	/**
	 * Join function that distributes a fraction of a vertex's rank to all neighbors.
	 */
	public static final class JoinVertexWithEdgesMatch implements FlatMapFunction<Tuple2<Tuple2<Long, Double>, Tuple2<Long, Long[]>>, Tuple2<Long, Double>> {

		private long numPages;

		public JoinVertexWithEdgesMatch(final long num) {
			this.numPages = num;
		}

		@Override
		public void flatMap(final Tuple2<Tuple2<Long, Double>, Tuple2<Long, Long[]>> value, final Collector<Tuple2<Long, Double>> out) {
			if (value.f1 != null) {
				final Long[] neighbors = value.f1.f1;
				final double rank = value.f0.f1;
				final double rankToDistribute = rank / neighbors.length;
				for (final Long neighbor : neighbors) {
					out.collect(new Tuple2<Long, Double>(neighbor, rankToDistribute));
				}
				out.collect(new Tuple2<Long, Double>(value.f0.f0, 0.0));
			}
			else {
				out.collect(new Tuple2<Long, Double>(-1L, value.f0.f1 / this.numPages));
			}
		}
	}

	/**
	 * The function that applies the page rank dampening formula.
	 */
	@FunctionAnnotation.ForwardedFields("0")
	public static final class Dampener implements MapFunction<Tuple2<Long, Double>, Tuple2<Long, Double>> {

		private final double dampening;
		private final double randomJump;

		public Dampener(double dampening, double numVertices) {
			this.dampening = dampening;
			this.randomJump = (1 - dampening) / numVertices;
		}

		@Override
		public Tuple2<Long, Double> map(Tuple2<Long, Double> value) {
			value.f1 = (value.f1 * dampening) + randomJump;
			return value;
		}
	}

	/**
	 * Filter that filters vertices where the rank difference is below a threshold.
	 */
	public static final class EpsilonFilter implements FilterFunction<Tuple2<Tuple2<Long, Double>, Tuple2<Long, Double>>> {

		private double EPSILON;

		public EpsilonFilter(final double epsilon) {
			this.EPSILON = epsilon;
		}

		@Override
		public boolean filter(Tuple2<Tuple2<Long, Double>, Tuple2<Long, Double>> value) {
			return Math.abs(value.f0.f1 - value.f1.f1) > EPSILON;
		}
	}

	// *************************************************************************
	//     UTIL METHODS
	// *************************************************************************

	private static DataSet<Long> getPagesDataSet(ExecutionEnvironment env, ParameterTool params) {
		if (params.has("pages")) {
			return env.readCsvFile(params.get("pages"))
				.fieldDelimiter(",")
				.lineDelimiter("\n")
				.types(Long.class)
				.map(new MapFunction<Tuple1<Long>, Long>() {
					@Override
					public Long map(Tuple1<Long> v) {
						return v.f0;
					}
				});
		} else {
			System.out.println("Executing PageRank example with default pages data set.");
			System.out.println("Use --pages to specify file input.");
			return PageRankData.getDefaultPagesDataSet(env);
		}
	}

	private static DataSet<Tuple2<Long, Long>> getLinksDataSet(ExecutionEnvironment env, ParameterTool params) {
		if (params.has("links")) {
			return env.readCsvFile(params.get("links"))
				.fieldDelimiter(",")
				.lineDelimiter("\n")
				.types(Long.class, Long.class);
		} else {
			System.out.println("Executing PageRank example with default links data set.");
			System.out.println("Use --links to specify file input.");
			return PageRankData.getDefaultEdgeDataSet(env);
		}
	}
}