package wsi.subgraph;

import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.Stack;
import java.util.logging.Logger;

import org.rygh.core.face.IDone;
import org.rygh.core.html.Html;
import org.rygh.core.html.element.HtmlAnchor;
import org.rygh.core.html.element.HtmlOl;
import org.rygh.core.html.element.HtmlWrapper;
import org.rygh.core.html.table.HtmlTable;
import org.rygh.core.html.table.TableData;
import org.rygh.core.html.table.TableTuple;
import org.rygh.core.util.UIo;
import org.rygh.core.util.counter.SimpleCounter;
import org.rygh.gui.swing.component.SWorker;
import org.rygh.semantic.gui.swing.input.Input;
import org.rygh.semantic.jena.SDataAccess;
import org.rygh.semantic.model.ex.FactSet;
import org.rygh.semantic.model.ex.SampleWorker;
import org.rygh.semantic.model.rdf.Entity;
import org.rygh.semantic.model.rdf.Fact;
import org.rygh.semantic.model.rdf.Prefix;

import wsi.dataguide.XmlTableDocument;
import wsi.dataguide.XmlTableNode;
import wsi.function.DirManager;
import wsi.function.Function;
import wsi.function.Function.Phrase;
import wsi.script.AppConfig;
import wsi.script.AppXml;

public class ExtractWorker extends SWorker<String, Void> {
	@Deprecated
	private final static Set<Entity> LARGE_DEGREE_ENTITY_SET = new HashSet<>();

	private final Logger log = Logger.getLogger(getClass().getName());

	private final double threshold;
	private final int hop;

	private final DirManager dir;
	private final Map<Input, TableData> tableDataMap = new HashMap<>();
	private final Map<Input, FactSet> tripleMap = new HashMap<>();
	private final SimpleCounter<Entity> counter = new SimpleCounter<>();

	public ExtractWorker(DirManager dir, IDone master) {
		super(master);

		this.dir = dir;
		counter.setName("# of Facts");
		threshold = AppConfig.shared().getFunctionalityThreshold();

		int hop = AppConfig.shared().getHop();

		if (hop == -1) {
			if (dir != null)
				hop = dir.getFunction().getHop();
			else
				hop = 1;
		}

		this.hop = hop;
	}

	private void addResultSet(Entity entity, FactSet facts,
			List<Entity> entities, FactSet resultSet) {
		// if (LARGE_DEGREE_ENTITY_SET.contains(entity))
		// return true;

		if (resultSet == null)
			return;

		// if (resultSet.size() < SampleWorker.getDegreeLimit()) {
		facts.add(resultSet);

		for (Fact f : resultSet) {
			Entity e = f.getObject();

			if (entities != null && !e.isLiteral()) {
				entities.add(e);
			}
		}

		// return false;
		// } else {
		// log.info(String.format(
		// "[warn] too large fact set %d for entity %s",
		// resultSet.size(), entity));
		//
		// LARGE_DEGREE_ENTITY_SET.add(entity);
		// return true;
		// }
	}

	private void compute() {
		Map<Entity, FactSet> map;
		TableData valueList;
		FactSet group;
		Entity entity;
		Input input;

		Function function = dir.getFunction();
		Input[] inputs = dir.getInputs();

		for (int i = 0; i < inputs.length; i++) {
			setProgress(100 * i / inputs.length);

			input = inputs[i];

			entity = input.getEntity();

			if (entity == null) {
				System.out
						.printf("[warn] no entity for %s\n", input.getValue());
				continue;
			}

			// File file = getFile(input.getEntity().getValue());
			// if (file.exists())
			// continue;

			map = extract(entity);

			group = new FactSet();
			for (FactSet j : map.values())
				group.add(j);

			counter.count(input.getEntity(), group.size());

			if (tripleMap.containsKey(input)) {
				System.out.printf("[warn] duplicated input value %s, "
						+ "will override the previous one.", input);
			}

			tripleMap.put(input, group);

			if (AppXml.__DEBUG) {
				valueList = print(entity, map);
				// print2(input, map);
				tableDataMap.put(input, valueList);
			}

			if (function.isSkipped(Phrase.REST)) {
				log.info("break for trial.");
				break;
			}
		}
	}

	@Override
	protected String doInBackground() throws Exception {
		process();
		return counter.toHtmlTable().toString();
	}

	private void expand(Entity entity, List<Entity> entities,
			Map<Entity, FactSet> factMap, boolean onlyLiteral) {
		if (factMap != null && factMap.containsKey(entity))
			return;

		FactSet resultSet = null;
		FactSet facts = new FactSet();

		if (factMap != null)
			factMap.put(entity, facts);

		/**/

		// boolean too_large = false;

		int limit = AppConfig.shared().getDegreeLimit();

		if (!entity.isLiteral()) {
			resultSet = SDataAccess.select(entity, onlyLiteral, threshold,
					limit);

			/* too_large = */addResultSet(entity, facts, entities, resultSet);
		}
		/**/

		if (resultSet != null)
			limit -= resultSet.size();

		if (/* !too_large && */!onlyLiteral) {
			resultSet = SDataAccess.selectInverse(entity, threshold, limit);

			addResultSet(entity, facts, entities, resultSet);
		}
	}

	/**
	 * first use BFS to expand for h hops. Then for each entity node which is h
	 * hops from the center node, add the incident literal nodes.
	 * 
	 * For example, expand from y:Enya for 1 hop. After expanding, append
	 * "female" to y:female, which is 1 hop from y:Enya.
	 * 
	 * @param str
	 * @return
	 */
	public Map<Entity, FactSet> extract(Entity entity) {
		assert entity != null;

		int h = hop;

		Map<Entity, FactSet> factMap = new HashMap<>();

		Stack<Entity> stack = new Stack<>();
		stack.push(entity);

		List<Entity> entities = new ArrayList<>();

		if (h > 1)
			System.out.printf(
					"[info] really extracting entity %s (hop = %d)\n", entity,
					h);

		while (h > 0) {
			entities.clear();

			while (!stack.isEmpty()) {
				entity = stack.pop();
				expand(entity, entities, factMap, false);
			}

			// System.out.printf("[info] h %d, extract entity size %d\n", h,
			// entities.size());
			stack.addAll(entities);
			h--;
		}

		// when the above loop ends, the entities contain all entities farest to
		// the center node.
		// System.out.println("entities " + entities);
		// log.info(String.format("%d boundary entities.", entities.size()));

		// System.out.println("factMap A " + factMap.size());

		for (Entity i : entities) {
			expand(i, null, factMap, true);
		}

		// System.out.printf("[info] %s extracted, entity size %d\n", entity,
		// factMap.size());
		return factMap;
	}

	private File getFile(File path, Input input, boolean xml) {
		return new File(path, String.format("yagohop_%s_%d_%.4f.%s",
				input.toFilename(), hop, threshold, xml ? "xml" : "json"));
	}

	public FactSet load(String fileName) {
		tripleMap.clear();

		File file = getFile(dir.getContextGraphDir(), new Input(fileName),
				false);

		if (!file.exists()) {
			log.info("file does not exist " + fileName);
			return null;
		}

		String content = UIo.readFile(file);

		FactSet facts = new FactSet();
		facts.loadJson(content);

		return facts;
	}

	private TableData print(Entity entity, Map<Entity, FactSet> map) {
		TableTuple prefix = new TableTuple();
		TableTuple head = new TableTuple();

		for (int i = 0; i < hop + 1; i++) {
			head.add("Subject", "Predicate", "Object");
		}

		TableData valueList = new TableData();
		valueList.add(head);

		print(valueList, map, entity, prefix, 0);

		valueList.sort();

		return valueList;
		// System.out.println("print " + toHtmlString());
	}

	private void print(TableData list, Map<Entity, FactSet> map, Entity entity,
			TableTuple prefix, int level) {
		if (level > hop) {
			list.add(prefix);
			return;
		}

		FactSet facts = map.get(entity);

		if (facts == null || facts.isEmpty()) {
			prefix.addBlank();
			prefix.addBlank();
			prefix.addBlank();

			print(list, map, null, new TableTuple(prefix), level + 1);

			prefix.remove(prefix.size() - 1);
			prefix.remove(prefix.size() - 1);
			prefix.remove(prefix.size() - 1);
		} else {
			for (Fact f : facts) {
				assert (f.getSubject().equals(entity));

				{
					prefix.add(f.getSubject().getValue());
					prefix.add(f.getPredicate().toString());
					prefix.add(f.getObject().getValue());

					print(list, map, f.getObject(), new TableTuple(prefix),
							level + 1);
				}

				prefix.remove(prefix.size() - 1);
				prefix.remove(prefix.size() - 1);
				prefix.remove(prefix.size() - 1);
			}
		}
	}

	void print2(String val, Map<Entity, FactSet> map) {
		Entity entity = new Entity("y:" + val);

		StringBuilder sbuilder = new StringBuilder();

		print2(sbuilder, map, entity, 0);

		System.out.println("print2\n" + sbuilder.toString());
	}

	private void print2(StringBuilder sbuilder, Map<Entity, FactSet> map,
			Entity entity, int l) {
		if (!map.containsKey(entity) || l > hop) {
			sbuilder.append("");
			return;
		}

		String indent = "\r\n";

		for (int i = 0; i < l; i++)
			indent += "\t";

		FactSet facts = map.get(entity);

		for (Fact f : facts) {
			assert f.getSubject().equals(entity);
			sbuilder.append(indent);
			sbuilder.append(f.toString());

			print2(sbuilder, map, f.getObject(), l + 1);
		}

	}

	@Deprecated
	private boolean probeExpansion(Entity entity) {
		if (LARGE_DEGREE_ENTITY_SET.contains(entity))
			return true;

		// log.info("probing size for " + entity);

		boolean too_large = false;
		int count;

		if (!entity.isLiteral()) {
			count = SDataAccess.probeSize(entity, false, threshold);

			if (count > SampleWorker.getDynamicDegreeLimit()) {
				// log.info(String.format(
				// "[warn] too large degree (%d, forward) for entity %s",
				// count, entity));

				too_large = true;
			}
		}
		/**/

		if (!too_large) {
			count = SDataAccess.probeSize(entity, true, threshold);

			if (count > SampleWorker.getDynamicDegreeLimit()) {
				// log.info(String.format(
				// "[warn] too large degree (%d, inverse) for entity %s",
				// count, entity));

				too_large = true;
			}
		}

		if (too_large)
			LARGE_DEGREE_ENTITY_SET.add(entity);

		return too_large;
	}

	/**
	 * instead of skipping nodes with large degree, we just load a partial
	 * subgraph with a limited degree
	 * 
	 * @param entity
	 * @return
	 */
	@Deprecated
	public boolean probeExtraction(Entity entity) {
		if (LARGE_DEGREE_ENTITY_SET.contains(entity))
			return true;

		assert entity != null;

		int h = hop;

		Stack<Entity> stack = new Stack<>();
		stack.push(entity);

		List<Entity> entities = new ArrayList<>();

		// System.out.printf("[info] probe extracting entity %s\n", entity);

		while (h > 0) {
			entities.clear();

			while (!stack.isEmpty()) {
				entity = stack.pop();

				if (probeExpansion(entity))
					return true;
				else
					expand(entity, entities, null, false);
			}

			stack.addAll(entities);
			h--;
		}

		return false;
	}

	public void process() {
		compute();
		save();
	}

	private void save() {
		if (AppXml.__DEBUG) {
			// UIo.saveFile(
			// new File(dir.getDebugDir(), String.format(
			// "_debug_yago_hop_%d_%.3f.html", hop, threshold)),
			// toHtmlString());
			//
			// UIo.saveFile(
			// new File(dir.getDebugDir(), String.format(
			// "_debug_yago_hop_%d_%.3f.xml", hop, threshold)),
			// toXmlString());

			File _debug = new File(dir.getContextGraphDir(), "debug");

			if (!_debug.exists())
				_debug.mkdir();

			for (Entry<Input, TableData> i : tableDataMap.entrySet()) {
				XmlTableDocument doc = new XmlTableDocument();
				Input input = i.getKey();

				for (Entry<String, String> j : Prefix.DEFAULT_PREFIX.getMap()
						.entrySet()) {
					doc.addPrefix(j.getKey(), j.getValue());
				}

				XmlTableNode node = new XmlTableNode(input.getValue(),
						i.getValue());
				node.convert();

				doc.addNode(node);

				UIo.saveFile(getFile(_debug, input, true), doc.toString());

			}

		}

		FactSet facts = new FactSet();

		for (Entry<Input, FactSet> e : tripleMap.entrySet()) {
			Input input = e.getKey();

			facts.clear();

			for (Fact f : e.getValue()) {
				// if (f.getObject().getValue().contains("Heroes"))
				// System.out.println("f.toSeparatedString() " + f.toString());

				facts.add(f);
			}

			UIo.saveFile(getFile(dir.getContextGraphDir(), input, false),
					facts.toJsonString());
		}
	}

	String toHtmlString() {
		Html html = new Html(dir.getServiceName() + "/"
				+ dir.getFunction().getName());
		HtmlTable table;
		HtmlAnchor anchor;
		HtmlOl ol = new HtmlOl();
		Map<Input, HtmlAnchor> anchorMap = new HashMap<>();

		for (Input e : tableDataMap.keySet()) {
			anchor = new HtmlAnchor(e.getValue());

			ol.addElement(anchor);
			anchorMap.put(e, anchor);
		}

		html.addElement(ol);

		for (Entry<Input, TableData> e : tableDataMap.entrySet()) {
			table = new HtmlTable(e.getKey().getValue(), e.getValue());
			table.setShowCardinality(true);

			html.addElement(new HtmlWrapper(anchorMap.get(e.getKey())
					.toAnchorString()));
			html.addElement(table);
			html.newline();
		}

		return html.toString();
	}

	@Override
	public String toString() {
		return counter.toHtmlTable().toString();
	}

	String toXmlString() {
		XmlTableDocument doc = new XmlTableDocument();

		for (Entry<String, String> e : Prefix.DEFAULT_PREFIX.getMap()
				.entrySet())
			doc.addPrefix(e.getKey(), e.getValue());

		for (Entry<Input, TableData> e : tableDataMap.entrySet()) {
			XmlTableNode node = new XmlTableNode(e.getKey().getValue(),
					e.getValue());
			node.convert();

			doc.addNode(node);
		}

		return doc.toString();
	}
}
