package wsi.input.feedback;

import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.Stack;

import org.rygh.semantic.func.RdfFunctionalityController;
import org.rygh.semantic.gui.swing.input.Input;
import org.rygh.semantic.model.rdf.Entity;
import org.rygh.semantic.model.rdf.Fact;
import org.rygh.semantic.model.rdf.Predicate;
import org.rygh.semantic.sparql.RecursiveSparQL;
import org.rygh.semantic.sparql.SparQL;

import wsi.function.DirManager;
import wsi.input.TypeBasedSelector;

public class FeedbackSelector extends TypeBasedSelector {

	private final Set<Predicate> predicatePickedSet = new HashSet<>();
	private final Set<Predicate> prunedPredicates = new HashSet<>();
	private final Set<Predicate> allPredicates = new HashSet<>();

	private final Stack<Predicate> predicateStack = new Stack<>();

	private final List<FeedbackTuple> tuples = new ArrayList<>();

	public FeedbackSelector(DirManager dir) {
		super(dir, false);

		allPredicates.add(namePredicate);

		if (linkPredicate != null)
			allPredicates.add(linkPredicate);
	}

	@Override
	protected void compute() {
		if (type == null)
			return;

		computeOnline(0, 0, getNewQuery(null));

		Collections.sort(tuples);

		Set<Input> tupleSet = new HashSet<>();

		for (Input i : detector.getInputs())
			tupleSet.add(i);

		for (FeedbackTuple t : tuples) {

			Input input = t.getInput();

			if (tupleSet.contains(input))
				continue;

			// System.out.printf("add tuple %d, %s\n",
			// t.getPredicateSet().size(),
			// t.getInput().getValue());

			tupleSet.add(input);
			detector.addInput(input);
		}

		// for (Predicate p : predicatesOrder)
		// System.out.println("pred_ordered: " + p.toString());
	}

	private void computeOnline(int x, int y, SparQL query) {
		System.out.printf(
				"(pred %d, recur %d), input size %d / %d, web call %d\n", x, y,
				detector.inputSize(), detector.getExpectedSize(),
				detector.getWebcalls());

		List<Input> entities = select(query, false);

		if (entities.size() < 500) {
			/*
			 * if stack is empty, it means for the specified type, there are a
			 * limited number of entities. we process them first, then loose the
			 * query (recursively).
			 */
			if (!predicateStack.isEmpty()) {
				Predicate p = predicateStack.pop();

				// revert and prune current predicate
				System.out.printf("revert pred %s, stack size %d, "
						+ "counter size %d, input size %d\n", p.toString(),
						predicateStack.size(), entities.size(),
						detector.inputSize());

				prunedPredicates.add(p);
				predicatePickedSet.remove(p);

				// must recursively call after reverting.
				computeOnline(x - 1, y, getNewQuery(query));
				return;
			}
		}

		int c = entities.size();

		for (int i = 0; i < c && !detector.isEnough(); i++) {
			Input input = entities.get(i);

			FeedbackTuple tuple = new FeedbackTuple(input);

			if (!detect(input, -1, tuple))
				continue;

			tuple.load();
			tuples.add(tuple);

			// if there are new predicates, break;

			if (!allPredicates.containsAll(tuple.getPredicateSet())) {
				// System.out.println("has new for " + input);

				Predicate[] preds = tuple.getSortedPredicates();

				for (Predicate p : preds) {
					if (detector.isEnough())
						break;

					if (p.isTemp() || p.isType())
						continue;

					if (allPredicates.contains(p))
						continue;

					predicateStack.push(p);
					predicatePickedSet.add(p);
					allPredicates.add(p);

					System.out.printf("add new pred %s (%.2f)\n", p.getValue(),
							RdfFunctionalityController.shared()
									.getInverseFunctionality(p));

					computeOnline(x + 1, y, getNewQuery(query));
				}
			}
		}

		if (!detector.isEnough()) {
			if (predicateStack.isEmpty()) {
				// System.out.printf("counter %d, stack %s, query level %d\n",
				// counter.size(), predicateStack.size(),
				// ((RecursiveSparQL) query).getLevel());

				if (query instanceof RecursiveSparQL) {
					RecursiveSparQL rq = (RecursiveSparQL) query;

					if (rq.getLevel() < TYPE_RECURSIVE_HOP) {
						rq.setLevel(rq.getLevel() + 1);

						computeOnline(x, y + 1, query);
					}
				}
			}
		}
	}

	/**
	 * should introduce a mechanism which adds predicates gradually. some
	 * predicates are wrong, such as
	 * 
	 * ?x w:type y:album. ?x y:created ?y.
	 * 
	 * if the sampled size is decreased dramatically by a predicate, we should
	 * prune this predicate.
	 * 
	 * @return
	 */
	private SparQL getNewQuery(SparQL q) {
		SparQL query = null;

		if (isYago) {
			query = new RecursiveSparQL();

			if (q != null) {
				int level = ((RecursiveSparQL) q).getLevel();
				((RecursiveSparQL) query).setLevel(level);

				System.out.println("recursive level " + level);
			}
		} else
			query = new SparQL();

		query.addFact(getTypeFact());
		query.addFacts(getNamePredicateFacts());

		int id = 0;
		Entity ex = new Entity("?x");

		for (Predicate p : predicatePickedSet) {
			Fact fact = new Fact(ex, p, new Entity("?e" + (id++)));
			query.addFact(fact);
		}

		/*
		 * if we group by here, we cannot get ?x. so we do not group!
		 */

		// query.groupBy("?y");
		// query.having("count(distinct ?x) = 1");

		// System.out.println("fb sql: " + query.toSelectSparql());
		return query;
	}

}
