package entrance;

import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import utilDataStruct.FileStruct;
import utilities.AttrConstraintHandler;
import utilities.FinalHandler;
import utilities.ResultLimitHandler;
import utilities.ColumnAdjustmentHandler;
import utilities.EigenvalueHandler;
import utilities.JoinHandler;
import utilities.Pretreatment;
import utilities.SingleTableHandler;
import utilities.UniqueFilterHandler;
import utilities.XmlConfigReader;
import dataFromXml.AttrConstraint;
import dataFromXml.DataFilter;
import dataFromXml.Eigenvalue;
import dataFromXml.FileFilter;
import dataFromXml.Join;
import dataFromXml.Match;
import dataFromXml.Modify;
import dataFromXml.ResultLimit;
import dataFromXml.Rule;
import dataFromXml.SubMatch;
import dataFromXml.Transaction;
import dataFromXml.UniqueFilter;

public class HadoopCodeGenerate {

	final String frameFolder = "src/template/";
	Transaction root;
	String targetFolder;
	int jobNum;

	public HadoopCodeGenerate(String[] args) {

		XmlConfigReader reader = new XmlConfigReader(args[0]);
		root = reader.getXMLTree();

		targetFolder = args[1];
		Pretreatment.setFilePath(targetFolder, 0);
	}

	public void handleXMLData() {

		List<Rule> ruleList = root.getRuleList();
		for (int ri = 0; ri < ruleList.size(); ri++) {

			// create folder for each rule and handle each rule separately
			String tFolder = targetFolder + File.separator + "procedure" + ri;
			Pretreatment.setFilePath(tFolder, 1);
			jobNum = 0;

			// handle match part
			List<FileStruct> fileList = new ArrayList<>();
			List<Integer> fnList = new ArrayList<>();	// record each file's number of process file
			Map<String, String> nameToType = new HashMap<>();
			
			Match matchNode = ruleList.get(ri).getMatchNode();
			List<SubMatch> smList = matchNode.getSubMatchList();
			for (int smi = 0; smi < smList.size(); smi++) {
				
				fnList.add(0);
				FileFilter ffNode = smList.get(smi).getFileFilterNode();
				fileList.add(new FileStruct(ffNode));
				
				// get the relation of attributes' name to type
				String[] attrNames = ffNode.getAttrNames().split(" ");
				String[] attrTypes = ffNode.getAttrTypes().split(" ");
				for (int nti = 0; nti < attrNames.length; nti++) {
					nameToType.put(attrNames[nti], attrTypes[nti]);
				}
				
				DataFilter dfNode = smList.get(smi).getDataFilterNode();
				if (dfNode != null) {
					List<UniqueFilter> ufList = dfNode.getUniqueFilterList();
					for (int ufi = 0; ufi < ufList.size(); ufi++) {
						int x = fnList.get(smi);
						UniqueFilterHandler.UniqueFilterMRFile(tFolder, frameFolder);
						UniqueFilterHandler.UniqueFilterProfile(tFolder, frameFolder, ffNode, ufList.get(ufi).getUniqueFilter(), jobNum++, x+1);
						fnList.set(smi, x+1);
					}
				
					List<AttrConstraint> acList = dfNode.getAttrConstraintList();
					for (int aci = 0; aci < acList.size(); aci++) {
						int x= fnList.get(smi);
						AttrConstraintHandler.AttrConstraintMRFile(tFolder, frameFolder);
						AttrConstraintHandler.AttrConstraintProfile(tFolder, frameFolder, ffNode, acList.get(aci), jobNum++, x+1);
						fnList.set(smi, x+1);
					}
				}
			}
			Join joinNode = matchNode.getJoinNode();
			String attributesList;
			if (joinNode != null) {
				
				ColumnAdjustmentHandler.ColumnAdjustmentMRFile(tFolder, frameFolder);
				String[] tableList = joinNode.getTableNames().split(" ");
				String[] attrList = joinNode.getJoinAttr().split(" ");
				
				for (int smi=0; smi<smList.size(); smi++) {
					int x = fnList.get(smi);
					FileFilter ffNode = smList.get(smi).getFileFilterNode();
					for (int ti=0; ti<tableList.length; ti++) {
						if (ffNode.getTableName().equals(tableList[ti])) {
							ColumnAdjustmentHandler.ColumnAdjustmentProfile(tFolder, frameFolder, ffNode, attrList[ti], jobNum++, x+1);
							break;
						}
					}
					fnList.set(smi, x+1);
				}

				JoinHandler.JoinMRFile(tFolder, frameFolder, fileList, joinNode.getJoinType());
				attributesList = JoinHandler.JoinProfile(tFolder, frameFolder, fileList, fnList, joinNode, jobNum++);
			} else {
				// don't need set AttrNames, but need to put the temp file into MatchResult folder
				attributesList = smList.get(0).getFileFilterNode().getAttrNames();
				SingleTableHandler.PassSingleFileIntoMatchResult(tFolder, fileList, fnList);
			}

			// handle modify part
			Modify modifyNode = ruleList.get(ri).getModifyNode();
			List<Eigenvalue> eigenList = modifyNode.getEigenvalueList();
			for (int ei = 0; ei < eigenList.size(); ei++) {
				EigenvalueHandler.EigenvalueMRFile(tFolder, frameFolder);
				EigenvalueHandler.EigenvalueProfile(tFolder, frameFolder, eigenList.get(ei), jobNum++, ei);
			}
			
			ResultLimit rlNode = modifyNode.getResultLimitNode();
			ResultLimitHandler.AttributeMRFile(tFolder, frameFolder, nameToType, attributesList, eigenList, rlNode);
			ResultLimitHandler.AttributeProfile(tFolder, frameFolder, jobNum++);
						
			FinalHandler.FinishCode(tFolder, frameFolder, nameToType, fileList, ri);
		}
	}

	public static void main(String[] args) {
		// TODO Auto-generated method stub

		if (args.length != 2) {
			// current path is "Profile/PrimitiveProfiles.xml"
			System.err.println("Usage: parameter <profile> <out>");
			System.exit(-1);
		}
		HadoopCodeGenerate demo = new HadoopCodeGenerate(args);
		demo.handleXMLData();
	}
}
