package org.hit.burkun.swalk.lap.norm;

import java.util.HashMap;
import java.util.HashSet;
import java.util.Set;

import org.apache.commons.math3.optim.PointValuePair;
import org.hit.burkun.network.EdgeInfo;
import org.hit.burkun.network.SerializableGraph;
import org.hit.burkun.network.tester.ValidData;
import org.hit.burkun.swalk.FeatureExtracter;
import org.hit.burkun.swalk.RandomWalkGraph;

import edu.stanford.nlp.optimization.DiffFunction;
import edu.stanford.nlp.optimization.QNMinimizer;

/**
 * 
 * The main class where everything is being initialized and where optimization
 * is being done
 * 
 */
public class LapcLinkpredictionOptimization {

	/**
	 * @param args
	 */
	public static void main(String[] args) {
		double alpha = 0.25;
		double eps = 1e-3; //sum
		//double interlayer = 0.2; // interlayer jump coeffiecient
		double b = 1;// e-6; // WMW function parameter
		double lambda = 1; // regularization parameter
		double learningRate = 1; // learningRate

		HashMap<String, HashSet<EdgeInfo>> testTrueTrain = ValidData
				.readTestTrueTrainObj();
		HashMap<String, HashSet<EdgeInfo>> testFalseTrain = ValidData
				.readTestFalseTrainObj();
		HashMap<String, HashSet<EdgeInfo>> testTrue = ValidData
				.readTestTrueObj();
		SerializableGraph sg = ValidData.getRemovedSg(false, testTrue);
		
		HashMap<String, RandomWalkGraph> nets = SeribaleGraphAdpater.getRandomWalkTrainGraphs(testTrueTrain,
				testFalseTrain, testTrue, sg);

		//----get random graph array
		Set<String> umls = nets.keySet();
		RandomWalkGraph[] rgs = new RandomWalkGraph[nets.size()];
		int idx = 0;
		for(String uml : umls){
			rgs[idx++] = nets.get(uml);
		}
		//------
		System.out.println("build all network done! size:" + nets.size());
		//mem = 200
		QNMinimizer qn = new QNMinimizer(200, true);
		//qn.useOWLQN(true, 0.6);
		//qn.shutUp();
		qn.useMinPackSearch();
		qn.terminateOnAverageImprovement(false);
		double convergenceTolerance = 1e-5;
		double[] initialGuess = new double[FeatureExtracter.getFeatureNum()];
		int maxFunctionEvaluations = 25;
		LinkPredictionTrainer lp = new LinkPredictionTrainer(
				rgs, FeatureExtracter.getFeatureNum(), alpha, lambda, b,
				learningRate, eps);
		DiffFunction dfunction = new OptimizationFunction(lp);
		int restarts = 6;
		double[] optimum = null;
		double[] currentOptimum;
		double optimalValue = Double.MAX_VALUE;

		GradientDescent gd = new GradientDescent(lp, 25, eps, 6);
		PointValuePair opt = null;
		//----
		System.out.println("start....");
		// OPTIMIZATION START
		long start = System.nanoTime();
	
		while (restarts-- > 0) {
			System.out.println("iter...." + restarts);
			for (int i = 0; i < initialGuess.length; i++)
				initialGuess[i] = Math.random() * 12 - 6;
			
			lp.setB(1);
			lp.setLearningRate(1);
			currentOptimum = qn.minimize(dfunction, convergenceTolerance,
					initialGuess, maxFunctionEvaluations);
			
			
			System.out.println("qn solver done!");
			//获取当前最优解
			//同时使用梯度下降，在周围寻找寻找最小解
			lp.setB(1e-6);
			lp.setLearningRate(0.003);
			try {
				opt = gd.optimize(currentOptimum);
			} catch (InterruptedException e) {
				e.printStackTrace();
			}

			if (opt.getSecond() < optimalValue) {
				optimalValue = opt.getSecond();
				optimum = opt.getFirst();
			}
			
			System.out.println("Cost: " + optimalValue);
			System.out.println("PREDICTED PARAMETERS");
			for (int i = 0; i < optimum.length; i++)
				System.out.print(optimum[i] + " ");
			System.out.println();
			if(qn.wasSuccessful()){
				break;
			}
		}

		long end = System.nanoTime();
		// OPTIMIZATION END

		System.out.println();
		System.out.println("\nResults in in " + (end - start) / 60E9
				+ " minutes.");
		System.out.println("Cost: " + optimalValue);

		System.out.println("PREDICTED PARAMETERS");
		for (int i = 0; i < optimum.length; i++)
			System.out.print(optimum[i] + " ");
		System.out.println();
		
	}

}

/**
 * 
 * Optimizable function that QNMinimizer uses
 * 
 */
class OptimizationFunction implements DiffFunction {
	public LinkPredictionTrainer lp;

	public OptimizationFunction(LinkPredictionTrainer lp) {
		super();
		this.lp = lp;
	}

	@Override
	public double valueAt(double[] x) {
		return lp.getCost(x);
	}

	@Override
	public int domainDimension() {
		return lp.getParametersNumber();
	}

	@Override
	public double[] derivativeAt(double[] x) {
		double[] grad = null;
		try {
			grad = lp.getGradient(x);
		} catch (InterruptedException e) {
			e.printStackTrace();
		}

		return grad;
	}

}