package de.learnlib.algorithms.ttt.dfa;

import java.io.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;

import de.learnlib.oracle.equivalence.*;
import net.automatalib.automata.fsa.DFA;
import net.automatalib.serialization.dot.GraphDOT;
import net.automatalib.util.automata.Automata;
import net.automatalib.words.Alphabet;
import de.learnlib.algorithms.ttt.dfa.TTTExperiment.Config;
//import de.learnlib.algorithms.ttt.dfa.eq.PCTraceEQOracle;
//import de.learnlib.api.EquivalenceOracle;
//import de.learnlib.api.LearningAlgorithm;
import de.learnlib.api.oracle.MembershipOracle;
import de.learnlib.api.oracle.MembershipOracle.DFAMembershipOracle;
import de.learnlib.examples.LearningExample.DFALearningExample;
import de.learnlib.api.query.DefaultQuery;
import de.learnlib.oracle.membership.SimulatorOracle.DFASimulatorOracle;
import de.learnlib.filter.statistic.oracle.DFACounterOracle;
import de.learnlib.filter.statistic.oracle.CounterSymbolQueryOracle;
import de.learnlib.api.algorithm.LearningAlgorithm;
import de.learnlib.filter.cache.dfa.DFACacheOracle;
import de.learnlib.filter.cache.dfa.DFACaches;
import net.automatalib.incremental.dfa.tree.IncrementalDFATreeBuilder;
import net.automatalib.visualization.Visualization;
import net.automatalib.serialization.dot.GraphDOT;

public class ExperimentRunner {	

	public static final String LEARNING_PROFILE_KEY = "Learning";
    public static final String COUNTEREXAMPLE_PROFILE_KEY = "Searching for counterexample";
	
	private static final class RunTest<I> implements Callable<Void> {	
		private final int testId;
		private final int index;
		private final PrintStream ps;
		private final DFALearningExample<I> example;
		private final LearnerCreator learner;
		private final long seed;
	    
		public RunTest(int testId, int index, long seed, PrintStream ps, DFALearningExample<I> example, LearnerCreator learner) {
			this.testId = testId;
			this.index = index;
			this.ps = ps;
			this.example = example;
			this.learner = learner;
			this.seed = seed;
		}

		@Override
		public Void call() throws Exception {
			for(;;) {
				try {
					System.err.println("Running " + learner.getName() + " test " + testId + " on " + example.toString() + ", index = " + index);
					Result res = runTest(example.getAlphabet(), example.getReferenceAutomaton(), learner, index, seed);
					synchronized(ps) {
						ps.println(String.format("%d %d %d %f %d", index, res.totalQueries, res.totalQueriesSymbols, res.ceLength, res.totalRounds));
						ps.flush();
					}
					System.err.println(learner.getName() + " test " + testId + " on " + example.toString() + " finished");
					return null;
				}
				catch(Throwable ex) {
					ex.printStackTrace();
				}
			}
		}
	}
	
	private final Config config;
	
	public ExperimentRunner(Config config) {
		this.config = config;
	}
	
	public void run() throws FileNotFoundException, IOException {
		ExecutorService exec = Executors.newFixedThreadPool(config.numThreads);
		
		File resultDir = new File("results");
		File outputDir = new File(resultDir, config.outputName);
		
		outputDir.mkdirs();
		
		File configFile = new File(outputDir, "config");
		try(PrintStream ps = new PrintStream(configFile)) {
			config.print(ps);
		}
		File outputFile = new File(outputDir, "result.dat");
		PrintStream stream = new PrintStream(outputFile);
		Random random = new Random(config.seed);
		List<Future<?>> futures = new ArrayList<>();
		LearnerCreator[] learners = LearnerCreators.getLearners(config.learners);
		for(int i = 0; i < config.targetSystem.length; i++) {
			RealisticSystem target = new RealisticSystem(config.targetSystem[i]);
			long localSeed = random.nextLong();
			for (int k = 0; k < learners.length; k++) {
				LearnerCreator learner = learners[k];
				//PrintStream stream = outputStreams[k];
				RunTest<Integer> rt = new RunTest<>(i, k, localSeed, stream, target, learner);
				Future<?> fut = exec.submit(rt);
				futures.add(fut);
			}


			for (Future<?> f : futures) {
				try {
					f.get();
				} catch (Exception ex) {
					ex.printStackTrace();
				}
			}
		}
		System.err.println("Closing streams");
		stream.close();
		exec.shutdown();
	}
	
	
	public static <I> Result runTest(Alphabet<I> alphabet, DFA<?,I> model,
			LearnerCreator learner, int ceLength, long seed) {
		DFASimulatorOracle<I> simOracle = new DFASimulatorOracle<>(model);
		DFACounterOracle<I> simOracleStats = new DFACounterOracle<>(simOracle,"MembershipQuery");

		DFACacheOracle<I> CacheOracle = DFACaches.createTreeCache(alphabet, simOracleStats);
		//MembershipOracle<I,Boolean> effOracle = CacheOracle;
		//notice the parameter of DFAWmethodEQOracle, the second should be 0, the third should be the size of the target system
		//because the size of the target system is unknown, 
		//we set a number and determine to cease the process based on the function Automata.findSeparatingWord
		//DFAWMethodEQOracle<I> wMethod = new DFAWMethodEQOracle<>(CacheOracle, 0,model.size());
		//DFAWpMethodEQOracle<I> wMethod = new DFAWpMethodEQOracle<>(CacheOracle, 0,model.size());
		DFARefineWpMethodEQOracle<I> wMethod = new DFARefineWpMethodEQOracle<>(CacheOracle, 0,model.size());
		//DFAIncrementalWMethodEQOracle<I> wMethod = new DFAIncrementalWMethodEQOracle<>(CacheOracle, alphabet,10);
		//DFARandomWpMethodEQOracle<I> wMethod = new DFARandomWpMethodEQOracle<>(CacheOracle,0,5);

		LearningAlgorithm<DFA<?,I>, I, Boolean> dfaLearner
			= learner.createLearner(alphabet, CacheOracle);
		
		dfaLearner.startLearning();

		long rounds = 0L;
		float ceLengthTotal = 0.0f;
		 while (true) {
             //if(Automata.findSeparatingWord(model, hyp, alphabet) == null )
             //	 break;
             //System.out.println("Searching for counterexample");
			 final DFA<?,I> hyp = dfaLearner.getHypothesisModel();

			 if(hyp.size()==model.size())
				 break;

             DefaultQuery<I, Boolean> ce = wMethod.findCounterExample(hyp,alphabet);

             if (ce == null) {
                 break;
             }

             System.out.println("counterexample:" + ce.getInput().toString());
			 ceLengthTotal = ceLengthTotal + ce.getInput().length();
             // next round ...
             rounds++;
             //System.out.println("Starting round " + rounds);
            // System.out.println("Learning");

             final boolean refined = dfaLearner.refineHypothesis(ce);
             assert refined;
             //System.out.println(simOracleStats.getCount());
             //System.out.println(simOracleStats.getSymbolCount());

         }
		Result res = new Result(learner.getName());
		DFA<?, I> res_h = dfaLearner.getHypothesisModel();
		res.ceLength = wMethod.getSum();

		//System.out.println("States: " + res_h.size());
        //System.out.println("Sigma: " + alphabet.size());

		//Visualization.visualize(res_h, alphabet);
		res.totalQueries = simOracleStats.getCount();
		res.totalQueriesSymbols = simOracleStats.getSymbolCount();
		
		res.totalRounds = rounds;
		/*if (rounds!=0)
			res.ceLength = ceLengthTotal/rounds;
		else
			res.ceLength = 0;*/
        //System.out.println(res.totalRounds);
		//System.out.println(res.totalQueries);
        //System.out.println(res.totalQueriesSymbols);
        if(Automata.findSeparatingWord(model, res_h, alphabet) == null )
        	System.out.println("Learning algorithm is correct");
		else
		{
			System.out.println("Learning algorithm is error");
			System.exit(1);
		}
		return res;
	}

}
