/**
 * 
 */
package ro.dta.idbi.patterns;

import java.util.ArrayList;
import java.util.EnumSet;
import java.util.Hashtable;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import ro.dta.idbi.interfaces.IExtentGenerator;
import ro.dta.idbi.interfaces.IntegrationOperation;
import ro.dta.idbi.model.AbstractPattern;
import ro.dta.idbi.model.Utils;
import uk.ac.ic.doc.automed.IntegrityException;
import uk.ac.ic.doc.automed.NotFoundException;
import uk.ac.ic.doc.automed.modelmanagement.modeldef.SQLModelDef;
import uk.ac.ic.doc.automed.reps.Schema;
import uk.ac.ic.doc.automed.reps.SchemaObject;

/**
 * <b>Normalisation pattern</b><br/>
 * 
 * Example of use:<br />
 * 
 * <code>normalise_table (&lt;&lt;origTable&gt;&gt;, &lt;&lt;newTable&gt;&gt;, &#123;a1..an&#125;, &#123;b1..bm&#125;)</code>
 * <br />
 * 
 * This macro states that the set A = {a1...an] of columns functionally
 * determines each of the columns bk, k=1..n.<br/>
 * 
 * @author Tudor Dobrila
 */
public class Normalise extends AbstractPattern {

	/**
	 * Name of the pattern
	 */
	public static final String NAME = "Normalise Table";

	/**
	 * Description of the pattern
	 */
	public static final String DESCRIPTION = "Perform a lossless decomposition of the table";

	/**
	 * Name of the command associated with the pattern
	 */
	public static final String COMMAND = "normalise_table";

	/**
	 * Integration phase during which the command is applied
	 */
	public static final EnumSet<IntegrationOperation> OPERATION = EnumSet
			.of(IntegrationOperation.CONFORMING);

	/**
	 * Name of the existing table that will be normalised
	 */
	private transient String origTable;

	/**
	 * Name of the new table created after normalisation
	 */
	private transient String newTable;

	/**
	 * Determinant set string
	 */
	private transient String detSetStr;

	/**
	 * Determinant columns list
	 */
	private transient List<String> detSet;

	/**
	 * Dependent columns set string
	 */
	private String depSetStr;

	/**
	 * Dependent columns
	 */
	private transient List<String> depCols;

	/**
	 * Definitions of the dependent columns
	 */
	private Hashtable<String, Object[]> colDefs;

	/**
	 * Normalisation pattern class constructor
	 * 
	 * @param sourceSchema
	 *            Source schema on which the normalisation will be applied
	 */
	public Normalise(final Schema sourceSchema) throws NotFoundException, IntegrityException {
		super(sourceSchema);

		pattern = Pattern.compile("^" + COMMAND + "[ ]*\\((" + TABLE_REGEX + ")" + COMMA_REGEX
				+ "(" + TABLE_REGEX + ")" + COMMA_REGEX + VAL_SEQ_REGEX + COMMA_REGEX
				+ VAL_SEQ_REGEX + "\\)$");
	}

	/**
	 * Normalisation pattern class constructor
	 * 
	 * @param sourceSchema
	 *            Name of source schema on which the normalisation will be
	 *            applied
	 */
	public Normalise(final String sourceSchema) throws NotFoundException, IntegrityException {
		this(Schema.getSchema(sourceSchema));
	}

	/**
	 * Normalisation pattern class constructor
	 * 
	 * @param sourceSchema
	 *            Source schema on which the normalisation will be applied
	 * @param origTable
	 *            Name of the existing table that will be normalised
	 * @param newTable
	 *            Name of the new table created after normalisation
	 * @param detSetStr
	 *            Determinant set as string
	 * @param depSetStr
	 *            Dependent set as a string
	 * @param colDefs
	 *            Dependent column definitions
	 */
	public Normalise(final Schema sourceSchema, final String origTable, final String newTable,
			final String detSetStr, final String depSetStr,
			final Hashtable<String, Object[]> colDefs) throws NotFoundException, IntegrityException {
		this(sourceSchema);

		this.origTable = origTable;
		this.newTable = newTable;
		this.detSetStr = detSetStr;
		this.depSetStr = depSetStr;
		this.detSet = splitValues(detSetStr);
		this.depCols = splitValues(depSetStr);
		this.colDefs = colDefs;
	}

	@Override
	protected List<String> getPrimitiveCommands() throws NotFoundException, IntegrityException {
		List<String> result = new ArrayList<String>();
		final ExtentGenerator gen = new ExtentGenerator(origTable, newTable, detSetStr, detSet,
				depCols);
		final StringBuilder cmd = new StringBuilder();

		// Step 1 - add new table
		String iql = gen.getExtentsForStep(1).get(0);
		cmd.append("add (");
		cmd.append(Utils.genTableRepresentation(newTable));
		cmd.append(", ").append(iql).append(")");
		result.add(cmd.toString());

		// Step 2 - add determinant columns to the newly created table
		Object[] pkArr = new Object[detSet.size() + 2];
		pkArr[0] = newTable + "_pk";
		pkArr[1] = newTable;
		int crtPos = 2;
		List<String> extents = gen.getExtentsForStep(2);
		for (String col : detSet) {
			final SchemaObject sourceCol = sourceSchema.getSchemaObject("<<" + origTable + ","
					+ col + ">>");
			Object[] definition = sourceCol.getSchemeDefinition();
			cmd.setLength(0);
			cmd.append("add (");
			cmd.append(Utils.genColRepresentation(newTable, col, definition[2], definition[3]));
			cmd.append(",").append(extents.get(crtPos - 2)).append(")");
			result.add(cmd.toString());
			pkArr[crtPos++] = "<<" + newTable + "," + col + ">>";
		}

		// Step 3 - add PK constraint
		if (sqlModel.isFeatureInUse(SQLModelDef.PRIMARY_KEY)) {
			cmd.setLength(0);
			cmd.append("add (");
			cmd.append(Utils.genPKRepresentation(pkArr));
			cmd.append(")");
			result.add(cmd.toString());
		}

		// Step 4 - add FK constraint
		if (sqlModel.isFeatureInUse(SQLModelDef.FOREIGN_KEY)) {
			cmd.setLength(0);
			cmd.append("add (");
			cmd.append(Utils.genFKRepresentation(origTable, newTable, detSet));
			cmd.append(")");
			result.add(cmd.toString());
		}

		// Step 5 - add dependent columns into newly created table
		extents = gen.getExtentsForStep(5);
		int crtExt = 0;
		for (String col : depCols) {
			Object[] definition = colDefs.get(col);
			cmd.setLength(0);
			cmd.append("add (");
			cmd.append(Utils.genColRepresentation(newTable, col, definition[2], definition[3]));
			cmd.append(",").append(extents.get(crtExt++)).append(")");
			result.add(cmd.toString());
		}

		// Step 6 - remove dependent columns from source table
		extents = gen.getExtentsForStep(6);
		crtExt = 0;
		for (String col : depCols) {
			Object[] remDef = colDefs.get(col);
			cmd.setLength(0);
			cmd.append("delete (");
			cmd.append(Utils.genColRepresentation(origTable, col, remDef[2], remDef[3]));
			cmd.append(",").append(extents.get(crtExt++)).append(")");
			result.add(cmd.toString());
		}

		return result;
	}

	@Override
	protected boolean verify() {
		// TODO Auto-generated method stub
		return true;
	}

	/**
	 * Parse the transformation command
	 * 
	 * @param trans
	 *            Transformation command
	 */
	@Override
	protected void parse(final String trans) {
		final Matcher matcher = pattern.matcher(trans);
		matcher.find();
		parse(matcher);
		
		for (String col : depCols) {
			SchemaObject colObj;
			String colName = "<<" + origTable + "," + col + ">>";
			try {
				colObj = sourceSchema.getSchemaObject(colName);
				colDefs.put(col, colObj.getSchemeDefinition());
			} catch (NotFoundException e) {
				throw new IllegalArgumentException("Schema object " + colName + " does not exist.");
			}
		}
	}

	/**
	 * Extract the elements of the pattern from a matcher
	 * 
	 * @param matcher
	 *            Matcher containing the elements of the pattern
	 */
	protected void parse(final Matcher matcher) {
		origTable = matcher.group(1);
		origTable = parseTableName(origTable);
		newTable = matcher.group(2);
		newTable = parseTableName(newTable);
		detSetStr = matcher.group(3);
		detSet = splitValues(detSetStr);
		depSetStr = matcher.group(7);
		depCols = splitValues(depSetStr);
		colDefs = new Hashtable<String, Object[]>();
	}

	/**
	 * @return Name of the table that is normalised
	 */
	public String getOrigTable() {
		return origTable;
	}

	/**
	 * @return Name of the new table created after normalisation
	 */
	public String getNewTable() {
		return newTable;
	}

	/*
	 * @return The determinant set
	 */
	public String getDeterminantSetString() {
		return detSetStr;
	}

	/**
	 * @return The dependent columns set
	 */
	public List<String> getDependentSet() {
		return depCols;
	}

	/**
	 * @return Dependent set expressed as a string
	 */
	public String getDependentSetString() {
		return depSetStr;
	}

	/**
	 * Extent generator for the <i>Normalisation pattern</i>
	 * 
	 * @author Tudor Dobrila
	 * 
	 */
	static class ExtentGenerator implements IExtentGenerator {

		private final String origTable;
		private final String newTable;
		private final String detSetStr;
		private final List<String> detSet;
		private final List<String> depCols;
		private final String detQuery;

		public ExtentGenerator(final String origTable, final String newTable,
				final String detSetStr, final List<String> detSet, final List<String> depCols) {
			this.origTable = origTable;
			this.newTable = newTable;
			this.detSetStr = detSetStr;
			this.detSet = detSet;
			this.depCols = depCols;

			final StringBuilder detBuilder = new StringBuilder();
			for (String col : detSet) {
				detBuilder.append("{x,").append(col).append("} <- <<").append(origTable);
				detBuilder.append(", ").append(col).append(">>");
			}
			detQuery = detBuilder.toString();
		}

		@Override
		public List<String> getExtentsForStep(int step) {
			List<String> result = new ArrayList<String>();
			StringBuilder iql = new StringBuilder();

			switch (step) {
			case 1:
				// Step 1 - add new table
				iql.append("distinct [ ");
				if (detSet.size() == 1) {
					iql.append("{");
				}
				iql.append(detSetStr);
				if (detSet.size() == 1) {
					iql.append("}");
				}
				iql.append(" | ");
				iql.append(detQuery);
				iql.append(" ]");
				result.add(iql.toString());
				break;
			case 2:
				// Step 2 - add determinant columns to the newly created table
				for (String col : detSet) {
					// Generate extent as IQL query
					iql.setLength(0);
					iql.append("distinct [ {");
					iql.append(detSetStr).append(", ").append(col).append("} | ");
					if (detSet.size() == 1) {
						iql.append("{");
					}
					iql.append(detSetStr);
					if (detSet.size() == 1) {
						iql.append("}");
					}
					iql.append(" <- <<").append(origTable).append(">> ]");
					result.add(iql.toString());
				}
				break;
			case 5:
				// Step 5 - add dependent columns into newly created table
				for (String col : depCols) {
					// Generate extent as IQL query
					iql.setLength(0);
					iql.append("distinct [ {").append(detSetStr).append(",").append(col);
					iql.append("} | ").append(detQuery).append("; ").append("{x,");
					iql.append(col).append("} <- <<").append(origTable);
					iql.append(", ").append(col).append(">> ]");
					result.add(iql.toString());
				}
				break;
			case 6:
				// Step 6 - remove dependent columns from source table
				for (String col : depCols) {
					// Generate extent as IQL query
					iql.setLength(0);
					iql.append("[ {x,").append(col).append("} | ").append(detQuery).append("; ");
					iql.append("{").append(detSetStr).append(",").append(col).append("} <- <<");
					iql.append(newTable).append(", ").append(col).append(">> ]");
					result.add(iql.toString());
				}
				break;
			default:
			}
			return result;
		}
	}
}
