/* 
 * [New BSD License (BSD 3-Clause License)]
 * Copyright (c) 2012, Max Bechtold
 * All rights reserved.
 * 
 * Redistribution and use in source and binary forms, with or without
 * modification, are permitted provided that the following conditions are met:
 * 
 *     - Redistributions of source code must retain the above copyright
 *       notice, this list of conditions and the following disclaimer.
 *     - Redistributions in binary form must reproduce the above copyright
 *       notice, this list of conditions and the following disclaimer in the
 *       documentation and/or other materials provided with the distribution.
 *     - Neither the name of the Rel2Xml Project nor the
 *       names of its contributors may be used to endorse or promote products
 *       derived from this software without specific prior written permission.
 * 
 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
 * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY
 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 */
package maxbe.rel2xml.generator;

import static maxbe.rel2xml.util.Config.getBoolean;
import static maxbe.rel2xml.util.IO.copyFile;
import static maxbe.rel2xml.util.IO.emptyDir;

import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.SimpleTimeZone;

import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.sax.TransformerHandler;

import maxbe.rel2xml.generator.dml.DmlInstance;
import maxbe.rel2xml.generator.docs.HandlerController;
import maxbe.rel2xml.generator.docs.constraints.DerivedValues;
import maxbe.rel2xml.generator.docs.constraints.References;
import maxbe.rel2xml.generator.docs.constraints.ValueConstructor;
import maxbe.rel2xml.grammar.CreateTableLexer;
import maxbe.rel2xml.grammar.CreateTableParser;
import maxbe.rel2xml.grammar.DmlLexer;
import maxbe.rel2xml.grammar.DmlParser;
import maxbe.rel2xml.grammar.FillTableLexer;
import maxbe.rel2xml.grammar.FillTableParser;
import maxbe.rel2xml.grammar.MapTableLexer;
import maxbe.rel2xml.grammar.MapTableParser;
import maxbe.rel2xml.grammar.QueryBuilder;
import maxbe.rel2xml.grammar.SchemaBuilder;
import maxbe.rel2xml.util.Config;
import maxbe.rel2xml.util.IO;

import org.antlr.runtime.ANTLRStringStream;
import org.antlr.runtime.CharStream;
import org.antlr.runtime.CommonTokenStream;
import org.antlr.runtime.RecognitionException;
import org.antlr.runtime.TokenStream;
import org.apache.log4j.Logger;
import org.xml.sax.SAXException;

/**
 * Main class for generating XML documents according to SQL DDL like
 * instructions and translating SQL queries on the given schema to XQuery
 * 
 * @author Max Bechtold
 * 
 */
public class XGenerator {

	private static SimpleDateFormat SDF;
	private static final double TIME_PER_TUPLE = 0.12;

	private Logger log = Logger.getLogger(XGenerator.class);

	private File ddlFile;
	private File fillFile;
	private File mapFile;
	private File queryFile;
	private File dir;

	private String dbname;
	private SchemaBuilder sb;

	static {
		SDF = new SimpleDateFormat("HH:mm:ss.S");
		SDF.setTimeZone(new SimpleTimeZone(0, "id"));
	}

	public XGenerator(File ddlFile, File fillFile, File mapFile,
			File queryFile, File dir, String dbname) {
		this.ddlFile = ddlFile;
		this.fillFile = fillFile;
		this.mapFile = mapFile;
		this.queryFile = queryFile;
		this.dir = dir;
		this.dbname = dbname;
		sb = new SchemaBuilder();

		if (dir.exists())
			emptyDir(dir);
		else
			dir.mkdirs();
	}

	public boolean generate(boolean dryRun) {
		long start = System.currentTimeMillis();
		log.info(String.format(
				"Document generation %sfor DB '%s' according to '%s', '%s'"
						+ (mapFile != null ? ", '" + mapFile.getName() + "' "
								: " ") + "started.",
				(dryRun ? "(dry run) " : ""), dbname, ddlFile.getName(),
				fillFile.getName()));

		try {
			try {
				parseTables(sb);
				fillTables(sb);
				if (mapFile != null)
					mapTables(sb);
			} catch (Exception e) {
				log.error("An error occured during preparation.");
				if (getBoolean(Config.VERBOSE)) {
					e.printStackTrace(System.err);
				}
				throw e;
			}

			if (!dryRun) {
				try {
					prepare(sb.getTables());
					int total = estimateTotal(sb.getTables());
					log.info(String.format("Estimating %d tuples to be generated in %s.",
									total, SDF.format(new Date(
											Math.round(total * TIME_PER_TUPLE)))));

					populate(sb.getTables());
					log.info("Generation completed successfully.");
					
				} catch (Exception e) {
					log.error("An error occured during generation.");
					if (getBoolean(Config.VERBOSE)) {
						e.printStackTrace(System.err);
					}
					throw e;
				}
			}

			long end = System.currentTimeMillis();
			log.info(String.format("Generation finished (duration: %s)",
					SDF.format(new Date(end - start))));

			try {
				copyFile(ddlFile, new File(dir, ddlFile.getName()));
				copyFile(fillFile, new File(dir, fillFile.getName()));
				if (mapFile != null)
					copyFile(mapFile, new File(dir, mapFile.getName()));

			} catch (IOException e) {
				log.error(String
						.format("An error occured while copying files to directory '%s'.",
								dir.getName()));
				if (getBoolean(Config.VERBOSE))
					e.printStackTrace(System.err);
			}
		} catch (Exception e) {
			long end = System.currentTimeMillis();
			log.info(String.format("Generation aborted (duration: %s)",
					SDF.format(new Date(end - start))));
			return false;
		}

		return true;
	}

	public void translate() {
		log.info(String.format("Translating queries from '%s'...",
				queryFile.getName()));
		List<DmlInstance> dmls = null;
		String queries = null;

		try {
			queries = IO.readFileAsString(queryFile);
		} catch (IOException e) {
			log.error(String.format(
					"An error occured while reading query file '%s'.",
					queryFile.getName()));
			if (getBoolean(Config.VERBOSE))
				e.printStackTrace(System.err);
			return;
		}

		try {
			CharStream charstream = new ANTLRStringStream(queries);
			DmlLexer ctLexer = new DmlLexer(charstream);
			TokenStream tokenStream = new CommonTokenStream(ctLexer);
			DmlParser ctParser = new DmlParser(tokenStream);

			dmls = ctParser.dml_statement_list(new QueryBuilder(sb),
					String.format("fn:doc('%s.xml')/%s", dbname, dbname));

			StringBuilder sb = new StringBuilder();
			for (DmlInstance dml : dmls) {
				if (sb.length() > 0)
					sb.append("\n\n\n");
				sb.append(dml.toXQuery());
			}

			log.info(String.format(
					"Translation of %d queries completed successfully.",
					dmls.size()));

			copyFile(queryFile, new File(dir, queryFile.getName()));
			File destFile = new File(dir, dbname + "-dml.xq");
			FileWriter writer = new FileWriter(destFile);
			writer.write(sb.toString());
			writer.flush();
			writer.close();
		} catch (IOException e) {
			log.error(String
					.format("An error occured while writing/copying query files to directory '%s'.",
							dir.getName()));
			if (getBoolean(Config.VERBOSE))
				e.printStackTrace(System.err);
		} catch (Exception e) {
			log.error("An error occured during query translation.");
			if (getBoolean(Config.VERBOSE))
				e.printStackTrace(System.err);
		}

	}

	private void populate(List<Table> tables) throws IOException,
			TransformerConfigurationException, SAXException {
		HandlerController hc = new HandlerController(dir, dbname);
		TransformerHandler handler = null;

		for (Table table : tables) {
			if (table.getParent() == null) {
				// if not yet done, get handler for main document
				if (handler == null
						&& table.getMapping() == Table.Mapping.SUBTREE) {
					handler = hc.getHandler();
					handler.startDocument();
					handler.startElement(null, null, dbname, null);
				}
				table.populate(hc);
			}
		}

		if (handler != null) {
			handler.endElement(null, null, dbname);
			handler.endDocument();
			hc.terminateHandler(handler);
		}
	}

	private int estimateTotal(List<Table> tables) {
		int total = 0;
		for (Table table : tables)
			total += table.estimateCardinality();
		return total;
	}

	private void parseTables(SchemaBuilder sb) throws IOException, RecognitionException {
		String ddl = null;
		try {
			ddl = IO.readFileAsString(ddlFile);
		} catch (IOException e) {
			log.error("Unable to read file " + ddlFile);
			throw (e);
		}

		CharStream charstream = new ANTLRStringStream(ddl);
		CreateTableLexer ctLexer = new CreateTableLexer(charstream);
		TokenStream tokenStream = new CommonTokenStream(ctLexer);
		CreateTableParser ctParser = new CreateTableParser(tokenStream);

		try {
			ctParser.table_list(sb);
		} catch (RecognitionException e) {
			log.error(e);
			throw (e);
		}

	}

	private void fillTables(SchemaBuilder sb) throws IOException, RecognitionException {
		String fil = null;
		try {
			fil = IO.readFileAsString(fillFile);
		} catch (IOException e) {
			log.error("Unable to read file " + fillFile);
			throw (e);
		}

		ANTLRStringStream charstream = new ANTLRStringStream(fil);
		FillTableLexer ftLexer = new FillTableLexer(charstream);
		CommonTokenStream tokenStream = new CommonTokenStream(ftLexer);
		FillTableParser ftParser = new FillTableParser(tokenStream);

		try {
			ftParser.table_filling_list(sb);
		} catch (RecognitionException e) {
			log.error(e);
			throw (e);
		}
	}

	private void mapTables(SchemaBuilder sb) throws IOException, RecognitionException {
		String map = null;
		try {
			map = IO.readFileAsString(mapFile);
		} catch (IOException e) {
			log.error("Unable to read file " + mapFile);
			throw (e);
		}

		ANTLRStringStream charstream = new ANTLRStringStream(map);
		MapTableLexer mtLexer = new MapTableLexer(charstream);
		CommonTokenStream tokenStream = new CommonTokenStream(mtLexer);
		MapTableParser mtParser = new MapTableParser(tokenStream);

		try {
			mtParser.table_layout_list(sb);
		} catch (RecognitionException e) {
			log.error(e);
			throw (e);
		}
	}

	private void prepare(List<Table> tables) {
		setupReferences(tables);
		pregenerateReferences(tables);
	}

	public static void setupReferences(List<Table> tables) {
		for (Table table : tables) {
			for (Column column : table.getColumns()) {
				ValueConstructor vc = column.getDataConstraint()
						.getValueConstructor();

				if (!(vc instanceof References))
					continue;

				References reference = (References) vc;
				Table refTable = column.getRefColumn().getTable();

				Table refContext = reference.getRefContext();
				if (refContext != null) {
					// Determine how many refTable factors are in the context of
					// a single refContext tuple
					int aggregate = refTable.getBase()
							/ refTable.howManyIterationsThrough(refContext);

					if (refTable.getFillFactor().varies()) {
						int[] factors = refTable.getFactors();
						if (aggregate > 1) {
							// Aggregate factors
							int[] aggFactors = new int[factors.length
									/ aggregate];
							for (int i = 0; i < aggFactors.length; i++)
								for (int j = 0; j < aggregate; j++)
									aggFactors[i] += factors[i * aggregate + j];
							factors = aggFactors;
						}
						reference.setContextSizes(factors);
					} else {
						reference.setContextSize(refTable.getFactor()
								* aggregate);
					}
				}
			}
		}
	}

	public static void pregenerateReferences(List<Table> tables) {
		for (Table table : tables) {
			Column factorColumn = table.getFillFactor().getSourceColumn();
			if (factorColumn != null)
				factorColumn.prepareValues();

			for (Column column : table.getColumns()) {
				Column refColumn = column.getRefColumn();

				if (refColumn != null) {
					List<String> prepVals = refColumn.prepareValues();

					ValueConstructor vc = column.getDataConstraint()
							.getValueConstructor();
					if (column.isDerived()) {
						((DerivedValues) vc).setSourceVals(prepVals);
					} else {
						column.setRefValues(prepVals);
					}
				}
			}
		}
	}
}
