package wsi.input.random;

import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.Set;

import org.json.JSONArray;
import org.rygh.core.file.FileLineCounter;
import org.rygh.core.file.FileLineReader;
import org.rygh.semantic.gui.swing.input.Input;
import org.rygh.semantic.sparql.RecursiveSparQL;
import org.rygh.semantic.sparql.SparQL;

import wsi.function.DirManager;
import wsi.input.InputFileTuple;
import wsi.input.SimpleCandidatePool;
import wsi.input.TypeBasedSelector;

public class RandomSelector extends TypeBasedSelector {

	private boolean candidatePoolUpdated = false;

	public RandomSelector(DirManager dir) {
		super(dir, true);
	}

	@Override
	protected void compute() {
		if (type == null)
			return;

		if (offline)
			computeOffline();
		else
			computeOnline();
	}

	private void computeOffline() {
		Set<Input> cache = new HashSet<>();
		List<Input> candidateList = new ArrayList<>();

		/*
		 * process common input pool first
		 */

		for (Input input : candidatePool) {
			if (cache.contains(input))
				continue;

			cache.add(input);
			candidateList.add(input);
		}

		detect(candidateList);

		/*
		 * read from file
		 */

		File file = getFile(false);

		if (file == null) {
			System.out.println("[error] cannot fine input file.");
			return;
		} else
			System.out.printf("random offline file %s\n", file.getName());

		FileLineCounter counter = new FileLineCounter(file);
		final int n = counter.count();

		Set<Integer> indices = new HashSet<>();

		if (detector.getExpectedSize() > n) {
			System.out.println("small random pool " + n);

			for (int i = 0; i < n; i++)
				indices.add(i);

			readFile(file, indices, cache, candidateList);
			detect(candidateList);
		} else {
			Random rnd = new Random();

			while (!detector.isEnough() && cache.size() < n) {
				indices.clear();
				candidateList.clear();

				while (indices.size() < detector.getExpectedSize())
					indices.add(rnd.nextInt(n));

				System.out.println("append new random " + indices.size() + ": "
						+ indices);

				readFile(file, indices, cache, candidateList);
				detect(candidateList);
			}

			if (candidatePoolUpdated)
				SimpleCandidatePool.shared().save(type, namePredicate);
		}
	}

	private void computeOnline() {

		if (!isYago) {
			SparQL query = new SparQL();
			query.addFact(getTypeFact());

			computeOnline(query);
		} else {
			RecursiveSparQL query = new RecursiveSparQL();
			query.addFact(getTypeFact());
			query.addFacts(getNamePredicateFacts());

			for (int i = 0; i < TYPE_RECURSIVE_HOP && !detector.isEnough(); i++) {
				query.setLevel(i);
				computeOnline(query);
			}
		}
	}

	private void computeOnline(SparQL query) {
		List<Input> inputs = select(query, true);

		Collections.shuffle(inputs);

		for (int i = 0; i < inputs.size() && !detector.isEnough(); i++) {
			Input input = inputs.get(i);
			detect(input);
		}
	}

	private void detect(List<Input> inputList) {
		for (Input i : inputList)
			detect(i);
	}

	private void readFile(File file, Set<Integer> indices, Set<Input> cache,
			List<Input> candidateList) {
		FileLineReader reader = new FileLineReader(file, indices);
		List<String> values = reader.read();
		InputFileTuple tuple;
		Input input;

		for (String v : values) {
			tuple = new InputFileTuple(new JSONArray(v));

			input = new Input(tuple.getEntity(), namePredicate,
					tuple.getValue());

			if (cache.contains(input))
				continue;

			cache.add(input);
			candidateList.add(input);

			candidatePoolUpdated = true;
			candidatePool.add(input);
		}
	}

}
