/**
 * 
 */
package edu.arizona.bio5.onto.modularity;

import java.io.File;
import java.io.IOException;
import java.io.Writer;
import java.util.Collections;
import java.util.HashSet;
import java.util.Random;
import java.util.Set;

import org.semanticweb.owlapi.model.OWLAxiom;
import org.semanticweb.owlapi.model.OWLClass;
import org.semanticweb.owlapi.model.OWLEntity;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.model.OWLOntologyManager;
import org.semanticweb.owlapi.modularity.OntologySegmenter;
import org.semanticweb.owlapi.util.DLExpressivityChecker;

import uk.ac.manchester.cs.owlapi.modularity.ModuleType;
import uk.ac.manchester.cs.owlapi.modularity.SyntacticLocalityModuleExtractor;
import uk.ac.manchester.cs.util.SetUtils;
import edu.arizona.bio5.onto.decomposition.ADAlgorithm;
import edu.arizona.bio5.onto.decomposition.Atom;
import edu.arizona.bio5.onto.decomposition.AtomLabelingAlgorithm;
import edu.arizona.bio5.onto.decomposition.AtomicDecomposition;
import edu.arizona.bio5.onto.decomposition.ChiaraDecompositionAlgorithm;
import edu.arizona.bio5.onto.decomposition.OntologyProcessingTaskRunner;
import edu.arizona.bio5.onto.decomposition.MSSLabel;
import edu.arizona.bio5.onto.decomposition.MSSLabelingAlgorithm;
import edu.arizona.bio5.onto.util.OntologyUtils;

/**
 * Runs the fast module extraction evaluation on each ontology
 * 
 * @author Pavel Klinov
 *
 */
public class DecomposeAndExtractRunner implements OntologyProcessingTaskRunner {

	static final ModuleType MODULE_TYPE = ModuleType.BOT;
	
	private ADAlgorithm m_adAlgo = new ChiaraDecompositionAlgorithm(MODULE_TYPE);
	private AtomLabelingAlgorithm m_labAlgo = new MSSLabelingAlgorithm();
	private final SeedSigBasedModuleExtractor m_extractor = new SeedSigBasedModuleExtractor();
	
	/* 
	 */
	@Override
	public void process(OWLOntology ontology, OWLOntologyManager mgr, Writer csvWriter) throws IOException {
		//Remove ABox, etc.
		OntologyUtils.prepareForSyntacticModularity(ontology);		
		System.out.println(ontology.getLogicalAxiomCount() + " logical axioms");	
		//Write ontology data
		DLExpressivityChecker exprChecker = new DLExpressivityChecker(Collections.singleton(ontology));
		
		csvWriter.write(exprChecker.getDescriptionLogicName() + ",");
		csvWriter.write(ontology.getLogicalAxiomCount() + ",");
		//Skip too big ontologies
		if (ontology.getLogicalAxiomCount() > 20000) {
			
			csvWriter.write("Too big, skipping");
			
			return;
		}
		//Decompose
		AtomicDecomposition ad = m_adAlgo.decompose(mgr, ontology);
		System.out.println(ad.stats());
		csvWriter.write(ad.stats().toCommaSeparatedString());
		csvWriter.write(",");
		//Labeling
		m_labAlgo.compute(ad);
		
		printLabelingInfo(ad);

		Random rnd = new Random(System.nanoTime());
		OntologySegmenter extractor = new SyntacticLocalityModuleExtractor(mgr, ontology, MODULE_TYPE);
		//Generate random signatures of varying size
		for (int sigSize = 2; sigSize <= 10; sigSize += 1) {
			
			System.out.println("FME evaluation, base seed signatures of size " + sigSize);
			
			//int totalSeedSigSize = 0;
			double redRatio = 0d;
			int atomsPerModule = 0;
			int axPerModule = 0;
			final int REPEATS = 100;
			long totalFMEtime = 0;
			long totalMEtime = 0;
			
			for (int i = 0; i < REPEATS; i++) {
				
				Set<OWLEntity> rndSig = generateRandomQuerySignature(ontology, sigSize, rnd);
				ModuleStats mStats = testModuleExtraction(ontology, mgr, ad, rndSig, extractor);
				
				//totalSeedSigSize += rndSig.size();
				redRatio += mStats.mRedundancy;
				atomsPerModule += mStats.mAtomsPerModule;
				axPerModule += mStats.mAxiomsPerModule;
				totalFMEtime += mStats.mADTime;
				totalMEtime += mStats.mStandardTime;
			}
			
			//System.out.println("Avegare seed signature size: " + 1d * totalSeedSigSize / REPEATS);
			System.out.println("Average relative speed-up: " + ((1d * totalMEtime) / totalFMEtime));
			System.out.println("FME: " + ((1d * totalFMEtime) / (REPEATS * 1000000)));
			System.out.println("ME: " + ((1d * totalMEtime) / (REPEATS * 1000000)));
			
			csvWriter.write(/*(1d * totalSeedSigSize / REPEATS) + "," + */(redRatio*100d)/REPEATS + "," + (1d*atomsPerModule)/REPEATS + ","
					+ (1d*axPerModule)/REPEATS + "," + ((1d * totalFMEtime) / (REPEATS * 1000000)) + "," + ((1d * totalMEtime) / (REPEATS * 1000000)) + ",");
		}
	}
	
	private void printLabelingInfo(AtomicDecomposition ad) {
		//Evaluate the number of MSS (temp)
		int totalMSS = 0;
		int largestMSS = 0;
		int incomplete = 0;
		
		for (Atom atom : ad.getAtoms()) {
			
			MSSLabel label = (MSSLabel) atom.getLabel(MSSLabel.TYPE);
			int mssSize = label.getSeedSignatures().size();
			
			incomplete += label.isComplete() ? 0 : 1;
			totalMSS += mssSize;
			largestMSS = Math.max(largestMSS, mssSize);
		}
		
		double avgMSS = 1d*totalMSS / ad.getAtoms().size();
		
		System.out.println("Total number of atoms: " + ad.getAtoms().size());
		System.out.println("Total number of MSSes: " + totalMSS);
		System.out.println("Avg number of MSSes per atom: " + avgMSS);
		System.out.println("Largest MSS: " + largestMSS);
		System.out.println("Number of dirty atoms: " + incomplete);
	}

	private Set<OWLEntity> generateRandomQuerySignature(OWLOntology ontology, int sigSize, Random rnd) {

		Set<OWLEntity> rndSig = new HashSet<OWLEntity>(sigSize);
		Set<OWLClass> classes = ontology.getClassesInSignature(false);

		rndSig.addAll(SetUtils.pickRandomSubset(classes, sigSize, rnd));
		
		return rndSig;
		/*Set<OWLEntity> rndSig = new HashSet<OWLEntity>(sigSize);
		Set<OWLClass> classes = SetUtils.pickRandomSubset(ontology.getClassesInSignature(false), sigSize, rnd);

		rndSig.addAll(classes);
		
		for (OWLAxiom axiom : ontology.getAxioms()) {
			
			if (axiom.isLogicalAxiom()) {
				
				if (!SetUtils.intersection(axiom.getClassesInSignature(), classes).isEmpty()) {
					rndSig.addAll(axiom.getClassesInSignature());
				}
			}
		}
		
		return rndSig;*/
	}

	private ModuleStats testModuleExtraction(	OWLOntology ontology,
												OWLOntologyManager manager,
												AtomicDecomposition ad,
												Set<OWLEntity> rndSig,
												OntologySegmenter extractor) {		
		//Extract the LAD-based module module
		m_extractor.setDecomposition(ad);
		Signature copySig = new Signature(rndSig);
		long adTime =  System.nanoTime();
		Set<OWLAxiom> adModule = m_extractor.extract(copySig, false);
		
		adTime = System.nanoTime() -  adTime;

		long standardTime = System.nanoTime();
		Set<OWLAxiom> module = extractor.extract(rndSig);
		standardTime = System.nanoTime() - standardTime;
		
		//System.out.println("Filter time:" + 1d * SeedSigBasedModuleExtractor.FILTER_TIME / 1000000);
		//System.out.println("FME: " + ((double)adTime / 1000000) + ", ME: " + (double)standardTime / 1000000);
		
		//Check completeness
		/*for (OWLAxiom axiom : module) {
			
			if (axiom.isLogicalAxiom() && !adModule.contains(axiom)) {
				
				System.err.println("Incorrect module");
				System.err.println("Missing axiom: " + axiom);
			}
		}*/
		
		//Evaluate redundancy
		int redCnt = 0;
		
		for (OWLAxiom axiom : adModule) {
			
			if (!module.contains(axiom)) redCnt++;
		}
		
		double redRatio = !(redCnt == 0d && module.isEmpty()) ? (1d * redCnt) / module.size() : 0d;
		
		return new ModuleStats(redRatio, m_extractor.getRelevantAtomNumber(), adModule.size(), adTime, standardTime); 
	}

	@Override
	public void setOntologyFile(File file) {
		// TODO Auto-generated method stub
	}
}

class ModuleStats {
	
	double mRedundancy = 0d;
	int mAtomsPerModule = 0;
	int mAxiomsPerModule = 0;
	long mADTime = 0;
	long mStandardTime = 0;
	
	ModuleStats(double red, int apermod, int axpermod, long adTime, long standardTime) {
		
		mRedundancy = red;
		mAtomsPerModule = apermod;
		mAxiomsPerModule = axpermod;
		mADTime = adTime;
		mStandardTime = standardTime;
	}	
}
