/**
 *  Copyright (C) 2013  Piotr Szczepański
 *
 *   This program is free software: you can redistribute it and/or modify
 *   it under the terms of the GNU General Public License as published by
 *   the Free Software Foundation, either version 3 of the License, or
 *   (at your option) any later version.
 *
 *   This program is distributed in the hope that it will be useful,
 *   but WITHOUT ANY WARRANTY; without even the implied warranty of
 *   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 *   GNU General Public License for more details.
 *
 *   You should have received a copy of the GNU General Public License
 *   along with this program.  If not, see <http://www.gnu.org/licenses/>.
*/
package edu.pw.elka.gtsna.weka_interface;


import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import java.util.TreeSet;

import org.forester.archaeopteryx.Archaeopteryx;
import org.forester.io.parsers.PhylogenyParser;
import org.forester.io.parsers.util.ParserUtils;
import org.forester.phylogeny.Phylogeny;
import org.forester.phylogeny.PhylogenyMethods;


import edu.pw.elka.gtsna.community_detection.CommunityStructure;
import edu.pw.elka.gtsna.graph.Graph;
import edu.pw.elka.gtsna.graph_evaluators.Coverage;
import edu.pw.elka.gtsna.graph_evaluators.Modularity;
import edu.pw.elka.gtsna.weka_interface.HierarchicalClusterer;

import weka.core.DenseInstance;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.SelectedTag;

/**
 * @author Aleksy Barcz
 *
 */
public class InteractionClusterer extends HierarchicalClusterer {

	/**
	 * 
	 */
	private static final long serialVersionUID = 6339931857297642279L;
	
	
	public static final int defaultStepsNumber = 10;
	protected List<Phylogeny> trees;
	
	public InteractionClusterer(String linkage) {
		setLinkType(new SelectedTag(linkage, HierarchicalClusterer.TAGS_LINK_TYPE));
		setPrintNewick(true);
		setNumClusters(1);
	}
	
	public void buildClusterer(Instances instances, int stepsNumber, String iiType) throws Exception {
		m_DistanceFunction = new NodeDistance(instances, stepsNumber, iiType);
		if (doCalculateStats) {
			modularities = new ArrayList<Double>();
			coverages = new ArrayList<Double>();
		}
		calculatePosValue(instances);
		super.buildClusterer(instances);
	}
	
	protected List<Double> modularities;
	protected List<Double> coverages;
	
	protected boolean doCalculateStats = true;
	
	@Override
	void calculateStats(int nClusters) {
		if (!doCalculateStats) {
			return;
		}
		buildHierarchy(nClusters);
		CommunityStructure communities = getCommunities();
		Graph graph = ((NodeInstances) ((NodeDistance) m_DistanceFunction).getInstances()).getGraph();
		Modularity mod = new Modularity(graph, communities);
		modularities.add(mod.evaluate());
		Coverage cov = new Coverage(graph, communities);
		coverages.add(cov.evaluate());
	}
	
	public void buildBestClusterer(Instances instances, int stepsNumber,
			String iiType, boolean buildBest) throws Exception {
		buildClusterer(instances, stepsNumber, iiType);	// first pass, to calculate all modularities
		int minIndex = modularities.indexOf(Collections.max(modularities));
		int optimalClustersNumber = modularities.size() - minIndex;
		setNumClusters(optimalClustersNumber);
		if (buildBest) {
			doCalculateStats = false;
			buildClusterer(instances, stepsNumber, iiType); // final pass
			doCalculateStats = true;
		}
	}
	
	public void buildBestClusterer(Instances instances, int stepsNumber,
			String iiType) throws Exception {
		buildBestClusterer(instances, stepsNumber, iiType, true);
	}
	
	public List<Double> getModularities() {
		return modularities;
	}
	
	public List<Double> getCoverages() {
		return coverages;
	}
	
	public double getModularity() {
		return modularities.get(modularities.size() - getNumClusters());
	}
	
	public double getCoverage() {
		return coverages.get(coverages.size() - getNumClusters());
	}
	
	protected void calculatePosValue(Instances instances) {
		((NodeDistance) m_DistanceFunction).hideWarnings();
		double minDistance = Double.MAX_VALUE;
		for (Instance first: instances) {
			for (Instance second: instances) {
				double distance = m_DistanceFunction.distance(first, second);
				minDistance = Math.min(distance, minDistance); 
			}
		}
		((NodeDistance) m_DistanceFunction).hideWarnings();
		double posValue = Math.ceil(Math.abs(minDistance));
		//System.out.println("PosValue: " + posValue);
		((NodeDistance) m_DistanceFunction).setPosValue(posValue);
	}
	
	@Override
	public void buildClusterer(Instances instances) throws Exception {
		this.buildClusterer(instances, defaultStepsNumber, "KSTEPS");
	}
	
	public void setMaxDistance(double distance) {
		m_maxDistance = distance;
	}
	
	public double getDistance(int i, int j) {
		return ((NodeDistance) m_DistanceFunction).distance(i, j);
	}

	public List<Double> getDistances() {
		return m_distances;
	}
	
	/** Create one file for each community */
	public int toFiles(String filename) throws FileNotFoundException {
		// change locale to ENGLISH to avoid commas instead of dots in floats
		Locale lastLocale = Locale.getDefault();
		Locale.setDefault(Locale.ENGLISH);
		this.setPrintNewick(true);
		String str = this.toString();
		Integer counter = 0;
		for (String line: str.split("\n")) {
			if (line.startsWith("(")) {
				String currFilename = filename + counter.toString();
				PrintWriter writer = new PrintWriter(currFilename);
				Integer counterPlusOne = counter + 1;
				writer.print(line + " " + "Cluster " + counterPlusOne.toString());
				writer.close();
				counter++;
			}
		}
		// restore the original locale
		Locale.setDefault(lastLocale);
		return counter;
	}
	
	protected void convertToPhylogenies() throws FileNotFoundException {
		// Reading-in of (a) tree(s) from a file.
		String filename = "tree.newick.temp";
		int treesCount = this.toFiles(filename);
		trees = new ArrayList<Phylogeny>();
		for (Integer i = 0; i < treesCount; i++) {
			String currFilename = filename + i.toString();
			final File treefile = new File(currFilename);
			PhylogenyParser parser = null;
			try {
				parser = ParserUtils.createParserDependingOnFileType(treefile, true);
			}
			catch (final IOException e) {
				e.printStackTrace();
			}
			Phylogeny[] phys = null;
			try {
				phys = PhylogenyMethods.readPhylogenies(parser, treefile);
				trees.add(phys[0]);
			}
			catch (final IOException e) {
				e.printStackTrace();
			}
		}
	}
	

	/** Convert clusters to communities
	 */
	public CommunityStructure getCommunities() {
		CommunityStructure communities = new CommunityStructure();
		Set<Integer> nodeNumbers = new TreeSet<Integer>();
		for (Node cluster: m_clusters) {
			if (cluster != null) {
				Set<Integer> clusterNodes = ((NodeDistance) m_DistanceFunction).instancesToNodes(cluster.getInstances());
				communities.addCommunity(clusterNodes);
				nodeNumbers.addAll(clusterNodes);
			}
		}
		for (Instance instance: m_instances) {
			int nodeNumber = ((NodeDistance) m_DistanceFunction).getNode(((DenseInstance) instance));
			if (!nodeNumbers.contains(nodeNumber)) {
				communities.addCommunity(new LinkedHashSet<Integer>(Arrays.asList(nodeNumber)));
			}
		}
		return communities;
	}
	

	public void plot() throws FileNotFoundException {
		convertToPhylogenies();
		// Display of the tree(s) with Archaeopteryx.
		Archaeopteryx.createApplication(trees.toArray(new Phylogeny[trees.size()]));
	}

}
