package com.etilize.importer;

import com.etilize.importer.TimeStatGenerator;
import java.sql.*;
import java.io.*;
import java.util.zip.*;
import java.util.*;

import org.apache.commons.net.PrintCommandListener;
import org.apache.commons.net.ftp.FTP;
import org.apache.commons.net.ftp.FTPClient;
import org.apache.commons.net.ftp.FTPConnectionClosedException;
import org.apache.commons.net.ftp.FTPReply;


public class DeltaImporter {

    public static final int ACCESSORIES_MASK = 8;
    public static final int DETAIL_MASK = 2;
    public static final int SIMILAR_MASK = 4;
    public static final int UPSELL_MASK = 1;

    public static String dBURL;
    public static String dB;
    public static String dBUser;
    public static String dBPassword;
    public static String dBName;
    public static String ftpUser;
    public static String ftpPass;
    public static String locale;
    public static String accessories;
    public static String detail;
    public static String upsell;
    public static String similar;
    public static String ftpLanding;
    public static String ftpMode;
    public static boolean deltaRollback;

    public static final String mysqlEngine = "MyISAM";
    public static Connection con = null;

    static int localSequence;
    static int latestSequence;
    static int fullLoadSequence;
    static int interSequence;

    // import_summary variables (move to a separate Logger class)
    static int importSummaryRecordRef = 0;

    public static final String ftpServer = "ftp.etilize.com";
    public static final boolean binaryMode = true;
    public static boolean passiveMode = true;

    public static final int threshold = 4;

    public static enum Operation {
        CREATE_IMPORT_SUMMARY, INSERT_SUMMARY, UPDATE_SUMMARY, READ_SUMMARY, SELECT_MAX_RECORD_ID, DROP, CREATE, LOAD, INDEX, RENAME, LOAD_NEW, LOAD_UPDATED, LOAD_DELETED, INSERT_NEW, DELETE_UPDATED, INSERT_UPDATED, DELETE_REMOVED, REPLACE_REMOVE, REPLACE_RENAME
    };

    public static TimeStatGenerator ftpTime;
    public static TimeStatGenerator fileOperationTime;
    public static TimeStatGenerator DBOperationTime;
    public static TimeStatGenerator DBSensitiveTime;

    public static ArrayList<String> fileList;

    static Map<String, String> fileMap = null;
    static Map<String, String> fieldMap = null;
    static Map<String, String> locationMap = null;
    static Map<String, String> indexMap = null;
    static ArrayList<String> contentTablesList = null;
    static ArrayList<String> taxonomyTablesList = null;
    
    public static final String[][] mysqlIndexList = {
        {"attributenames",
                 "ALTER TABLE $ ADD INDEX attributenames_attributeID (attributeid),"
                 + "ADD INDEX attributenames_localeID (localeid)"},
        {"category",
                 "ALTER TABLE $ ADD INDEX catery_categoryID (categoryid)"},
        {"categorydisplayattributes",
                 "ALTER TABLE $ ADD INDEX caterydisplayattributes_hID (headerid),"
                 + "ADD INDEX caterydisplayattributes_cID (categoryid),"
                 + "ADD INDEX caterydisplayattributes_aID (attributeid)"},
        {"categoryheader",
                 "ALTER TABLE $ ADD INDEX cateryheader_headerID (headerid),"
                 + "ADD INDEX cateryheader_categoryID (categoryid)"},
        {"categorynames",
                 "ALTER TABLE $ ADD INDEX caterynames_categoryID (categoryid),"
                 + "ADD INDEX caterynames_localeID (localeid)"},
        {"categorysearchattributes",
                 "ALTER TABLE $ ADD INDEX caterysearchattributes_aID (attributeid),"
                 + "ADD INDEX caterysearchattributes_cID (categoryid)"},
        {"headernames",
                 "ALTER TABLE $ ADD INDEX headernames_headerID(headerid),"
                 + "ADD INDEX headernames_localeID (localeid)"},
        {"locales",
                 "ALTER TABLE $ ADD INDEX locales_languageCode (languagecode),"
                 + "ADD CONSTRAINT locales_PK PRIMARY KEY (localeid),"
                 + "ADD INDEX locales_countryCode (countrycode)"},
        {"manufacturer",
                 "ALTER TABLE $ ADD CONSTRAINT tmanufacturer_PK PRIMARY KEY(manufacturerid)"},
        {"product",
                 "ALTER TABLE $ ADD CONSTRAINT product_PK PRIMARY KEY(productid),"
                 + "ADD INDEX product_isAccessory (isaccessory),"
                 + "ADD INDEX product_manufacturerID (manufacturerID),"
                 + "ADD INDEX product_categoryID (categoryID)"},
        {"productaccessories",
                 "ALTER TABLE $ ADD INDEX productaccessories_productID (productid),"
                 + "ADD INDEX productaccessories_isPreferred (ispreferred),"
                 + "ADD INDEX productacesories_acesoryPID (accessoryproductid)"},
        {"productattribute",
                 "ALTER TABLE $ ADD INDEX productattribute_productID (productid),"
                 + "ADD INDEX productattribute_categoryID (categoryid),"
                 + "ADD INDEX productattribute_attributeID (attributeid),"
                 + "ADD INDEX productattribute_localeID (localeid)"},
        {"productdescriptions",
                 "ALTER TABLE $ ADD INDEX productdescriptions_productID (productid),"
                 + "ADD INDEX productdescriptions_localeID (localeid)"},
        {"productimages",
                 "ALTER TABLE $ ADD INDEX productimages_productID (productid)"},
        {"productkeywords",
                 "ALTER TABLE $ ADD INDEX productkeywords_productID (productid),"
                 + "ADD INDEX productkeywords_keywords (keywords(255)),"
                 + "ADD INDEX productkeywords_localeID (localeid)"},
        {"productlocales",
                 "ALTER TABLE $ ADD INDEX productlocales_productID (productid),"
                 + "ADD INDEX productlocales_localeID (localeid),"
                 + "ADD INDEX productlocales_status (status)"},
        {"productsimilar",
                 "ALTER TABLE $ ADD INDEX productsimilar_productID (productid),"
                 + "ADD INDEX productsimilar_spID (similarproductid),"
                 + "ADD INDEX productsimilar_localeID (localeid)"},
        {"productskus",
                 "ALTER TABLE $ ADD INDEX productskus_productID (productid),"
                 + "ADD INDEX productskus_localeID (localeid)"},
        {"productupsell",
                 "ALTER TABLE $ ADD INDEX productupsell_productID (productid),"
                 + "ADD INDEX productupsell_upsellProductID (upsellproductid),"
                 + "ADD INDEX productupsell_localeID (localeid)"},
        {"unitnames",
                 "ALTER TABLE $ ADD INDEX unitnames_unitID (unitid),"
                 + "ADD INDEX unitnames_localeID (localeid)"},
        {"units",
                 "ALTER TABLE $ ADD INDEX units_baseUnitID (baseunitid)"},
        {"search_attribute_values",
                 "ALTER TABLE $ ADD CONSTRAINT search_attribute_values_PK PRIMARY KEY(valueid),"
                 + "ADD INDEX search_attrval_value (value)"},
        {"search_attribute",
                 "ALTER TABLE $ ADD INDEX search_attribute_productID (productid),"
                 +"ADD INDEX search_attribute_attributeID (attributeid),"
                 + "ADD INDEX search_attribute_valueID (valueid),"
                 + "ADD INDEX search_attribute_absoluteValue (absolutevalue),"
                 + "ADD INDEX search_attribute_isAbsolute (isabsolute),"
                 + "ADD INDEX search_attribute_localeID (localeid)"},
        {"units",
                 "ALTER TABLE $ ADD CONSTRAINT tunits_PK PRIMARY KEY(unitid)"},
    };

    public static final String[][] mysqlFieldList = {
        {"product",
                "productid INTEGER default 0 NOT NULL,"
                + "manufacturerid INTEGER default 0 NOT NULL,"
                + "isactive BOOL default 1 NOT NULL,"
                + "mfgpartno VARCHAR (70) default '' NOT NULL,"
                + "categoryid INTEGER default 0 NOT NULL,"
                + "isaccessory BOOL default 0 NOT NULL,"
                + "equivalency DOUBLE default 0 NOT NULL,"
                + "creationdate TIMESTAMP,"
                + "modifieddate TIMESTAMP,"
                + "lastupdated TIMESTAMP"},

        {"productattribute",
                "productid INTEGER default 0 NOT NULL,"
                + "attributeid BIGINT default 0 NOT NULL,"
                + "categoryid INTEGER default 0 NOT NULL,"
                + "displayvalue MEDIUMTEXT,"
                + "absolutevalue DOUBLE default 0 NOT NULL,"
                + "unitid INTEGER default 0 NOT NULL,"
                + "isabsolute BOOL default 0 NOT NULL,"
                + "isactive BOOL default 1 NOT NULL,"
                + "localeid INTEGER default 0 NOT NULL"},

        {"productdescriptions",
                "productid INTEGER default 0 NOT NULL,"
                + "description MEDIUMTEXT default '' NOT NULL,"
                + "isdefault BOOL default 0 NOT NULL,"
                + "type INTEGER default 0 NOT NULL,"
                + "localeid INTEGER default 0 NOT NULL"},

        {"productimages",
                "productid INTEGER default 0 NOT NULL,"
                + "type VARCHAR (60) default '' NOT NULL,"
                + "status VARCHAR (60) default '' NOT NULL"},

        {"productkeywords",
                "productid INTEGER default 0 NOT NULL,"
                + "keywords MEDIUMTEXT default '' NOT NULL,"
                + "localeid INTEGER default 0 NOT NULL"},

        {"productlocales",
                "productid INTEGER default 0 NOT NULL,"
                + "localeid INTEGER default 0 NOT NULL,"
                + "isactive BOOL default 1 NOT NULL,"
                + "status VARCHAR (60) default '' NOT NULL"},

        {"productskus",
                "productid INTEGER default 0 NOT NULL,"
                + "name VARCHAR (60),"
                + "sku VARCHAR (60),"
                + "localeid INTEGER default 0 NOT NULL,"
                + "addeddate TIMESTAMP,"
                + "discontinueddate TIMESTAMP"},

        {"search_attribute",
                "productid INTEGER default 0 NOT NULL,"
                + "attributeid BIGINT default 0 NOT NULL,"
                + "valueid INTEGER default 0 NOT NULL,"
                + "absolutevalue DOUBLE default 0 NOT NULL,"
                + "isabsolute BOOL default 0 NOT NULL,"
                + "localeid INTEGER default 0 NOT NULL"},

        {"search_attribute_values",
                "valueid INTEGER default 0 NOT NULL,"
                + "value VARCHAR (255) default '' NOT NULL,"
                + "absolutevalue DOUBLE default 0 NOT NULL,"
                + "unitid INTEGER default 0 NOT NULL,"
                + "isabsolute BOOL default 0 NOT NULL"},

        {"attributenames",
                "attributeid BIGINT default 0 NOT NULL,"
                + "name VARCHAR (110) default '' NOT NULL,"
                + "localeid INTEGER default 0 NOT NULL"},

        {"category",
                "categoryid INTEGER default 0 NOT NULL,"
                + "parentcategoryid INTEGER,"
                + "isactive BOOL default 1 NOT NULL,"
                + "ordernumber INTEGER default 0 NOT NULL,"
                + "catlevel TINYINT default 0 NOT NULL,"
                + "displayorder INTEGER default 0 NOT NULL,"
                + "lastupdated TIMESTAMP"},

        {"categorydisplayattributes",
                "headerid INTEGER default 0 NOT NULL,"
                + "categoryid INTEGER default 0 NOT NULL,"
                + "attributeid BIGINT default 0 NOT NULL,"
                + "isactive BOOL default 1 NOT NULL,"
                + "latetype INTEGER default 0 NOT NULL,"
                + "defaultdisplayorder INTEGER default 1 NOT NULL,"
                + "displayorder INTEGER default 0 NOT NULL,"
                + "lastupdated TIMESTAMP"},

        {"categoryheader",
                "headerid INTEGER default 0 NOT NULL,"
                + "categoryid INTEGER default 0 NOT NULL,"
                + "isactive BOOL default 1 NOT NULL,"
                + "latetype INTEGER default 0 NOT NULL,"
                + "defaultdisplayorder INTEGER default 1 NOT NULL,"
                + "displayorder INTEGER default 0 NOT NULL,"
                + "lastupdated TIMESTAMP"},

        {"categorynames",
                "categoryid INTEGER default 0 NOT NULL,"
                + "name VARCHAR (80) default '' NOT NULL,"
                + "localeid INTEGER default 0 NOT NULL"},

        {"categorysearchattributes",
                "categoryid INTEGER default 0 NOT NULL,"
                + "attributeid BIGINT default 0 NOT NULL,"
                + "isactive BOOL default 1 NOT NULL,"
                + "ispreferred BOOL default 0 NOT NULL,"
                + "lastupdated TIMESTAMP"},

        {"headernames",
                "headerid INTEGER default 0 NOT NULL,"
                + "name VARCHAR (80) default '' NOT NULL,"
                + "localeid INTEGER default 0 NOT NULL"},

        {"locales",
                "localeid INTEGER default 0 NOT NULL,"
                + "isactive BOOL default 1 NOT NULL,"
                + "languagecode VARCHAR (5) default '' NOT NULL,"
                + "countrycode VARCHAR (5) default '' NOT NULL,"
                + "name VARCHAR (80) default '' NOT NULL"},

        {"manufacturer",
                "manufacturerid INTEGER default 0 NOT NULL,"
                + "name VARCHAR (60) default '' NOT NULL,"
                + "address1 VARCHAR (60),"
                + "address2 VARCHAR (60),"
                + "city VARCHAR (30),"
                + "zip VARCHAR (10),"
                + "url VARCHAR (100),"
                + "phone VARCHAR (20),"
                + "fax VARCHAR (20),"
                + "country VARCHAR (60),"
                + "state VARCHAR (60),"
                + "lastupdated TIMESTAMP"},

        {"units",
                "unitid INTEGER default 0 NOT NULL,"
                + "name VARCHAR (60) default '' NOT NULL,"
                + "baseunitid INTEGER default 0 NOT NULL,"
                + "multiple DOUBLE default 0 NOT NULL"},

        {"unitnames",
                "unitid INTEGER default 0 NOT NULL,"
                + "name VARCHAR (80) default '' NOT NULL,"
                + "localeid INTEGER default 0 NOT NULL"},

        {"taxonomyhistory",
                "lastupdated TIMESTAMP,"
                + "revisiondate TIMESTAMP"},

        {"productaccessories",
                "productid INTEGER default 0 NOT NULL,"
                + "accessoryproductid INTEGER default 0 NOT NULL,"
                + "isactive BOOL default 1 NOT NULL,"
                + "ispreferred BOOL default 0 NOT NULL,"
                + "isoption BOOL default 0 NOT NULL,"
                + "note MEDIUMTEXT default '' NOT NULL"},

        {"productsimilar",
                "productid INTEGER default 0 NOT NULL,"
                + "similarproductid INTEGER default 0 NOT NULL,"
                + "localeid INTEGER default 0 NOT NULL"},

        {"productupsell",
                "productid INTEGER default 0 NOT NULL,"
                + "upsellproductid INTEGER default 0 NOT NULL,"
                + "localeid INTEGER default 0 NOT NULL"}
    };

    public static final String[][] mysqlAuxiliaryFieldList = {
        {"finalIDs",
                 "productid INTEGER default 0 NOT NULL,"
                 + "KEY (productid)"},

        {"newIDs",
                 "productid INTEGER default 0 NOT NULL,"
                 + "KEY (productid)"},

        {"updatedIDs",
                 "productid INTEGER default 0 NOT NULL,"
                 + "KEY (productid)"},

        {"removedIDs",
                 "productid INTEGER default 0 NOT NULL,"
                 + "KEY (productid)"}
    };

    public static final String import_summaryFieldList =
            "record_id int(11) NOT NULL auto_increment,"
            + "sequence INTEGER default 0,"
            + "catalog VARCHAR (50),"
            + "locale VARCHAR (50),"
            + "attempt_status INTEGER default 0,"
            + "attempt_complete_time timestamp,"
            + "attempt_summary text,"
            + "primary key (record_id)";

    public static final String[][] basicFileList = {
        {"product", "_B_product.csv"},
        {"productdescriptions", "_B_productdescriptions.csv"},
        {"productattribute", "_B_productattributes.csv"},
        {"productskus", "_productskus.csv"},
        {"productimages", "_B_productimages.csv"},
        {"productkeywords", "_B_productkeywords.csv"},
        {"productlocales", "_locales.csv"},
        {"search_attribute", "_B_searchattributes.csv"},
        {"search_attribute_values", "G_B_searchattributevalues.csv"},
        {"attributenames", "_attributenames.csv"},
        {"category", "G_category.csv"},
        {"categorydisplayattributes", "G_categorydisplayattributes.csv"},
        {"categoryheader", "G_categoryheader.csv"},
        {"categorynames", "_categorynames.csv"},
        {"categorysearchattributes", "G_categorysearchattributes.csv"},
        {"headernames", "_headernames.csv"},
        {"locales", "_locales.csv"},
        {"manufacturer", "G_manufacturer.csv"},
        {"units", "G_units.csv"},
        {"unitnames", "_unitnames.csv"}
    };

        public static final String[][] addonFileList = {
        {"accessories","productaccessories", "_A_productaccessories.csv"},
        {"detail", "productattribute", "_D_productattributes.csv"},
        {"upsell", "productupsell", "_U_productupsell.csv"},
        {"similar", "productsimilar", "_SIM_productsimilar.csv"}
    };
    
    // Change these to be platform agnostic and point to location where program is
    // Replace all \\ to File.pathSeparator for platform independence

    public static String propertiesFileAbsolutePath;
    public static String localSequenceAbsolutePath;
    //public static final String SQLStatements;
    public static String dataLocation;

    // Convert all methods to return Error codes and exit on wrong status

    public static void validateAndSetProperties() {

        FTPClient ftp = new FTPClient();
        BufferedReader reader = null;
        //ftp.addProtocolCommandListener(new PrintCommandListener(new PrintWriter(System.out)));

        try {

            System.out.println("Getting files from: " + System.getProperty("user.dir"));

            propertiesFileAbsolutePath = System.getProperty("user.dir") + File.separator + "importer.properties";
            localSequenceAbsolutePath = System.getProperty("user.dir") + File.separator + "local.sequence";

            File propertiesFile = new File(propertiesFileAbsolutePath);
            
            Properties parameters = new Properties();
            parameters.load(new FileReader(propertiesFile));

            dBURL = parameters.getProperty("dBURL");
            dBUser = parameters.getProperty("dBUser");
            dBPassword = parameters.getProperty("dBPassword");
            ftpUser = parameters.getProperty("ftpUser");
            ftpPass = parameters.getProperty("ftpPass");
            locale = parameters.getProperty("locale");
            accessories = parameters.getProperty("accessories");
            detail = parameters.getProperty("detail");
            upsell = parameters.getProperty("upsell");
            similar = parameters.getProperty("similar");
            dataLocation = parameters.getProperty("dataLocation");
            ftpLanding = parameters.getProperty("ftpLanding");
            ftpMode = parameters.getProperty("ftpMode");

            if(parameters.getProperty("deltaRollback").equalsIgnoreCase("enabled"))
                deltaRollback = true;
            else
                deltaRollback = false;

            System.out.println("Verifying DB parameters");
            //System.out.println("Using parameters: " + dBURL + " , " + dBUser + " , " + dBPassword);
            if (connectToDB())
                System.out.println("Connected to DB successfully!");
            else {
                System.err.println("DB parameters incorrect");
                System.exit(1);
            }
            try {
                dBName = con.getCatalog();
                dB = con.getMetaData().getDatabaseProductName();
                con.close();
            } catch (SQLException e) {e.getMessage(); System.exit(1);}


            if(dataLocation.contentEquals(" "))
                dataLocation = System.getProperty("user.dir") + File.separator + "Data" + File.separator;

            System.out.println("Using dataLocation: " + dataLocation);

            // Set ftp Mode
            if(ftpMode.equalsIgnoreCase("active"))
                passiveMode = false;
            else if (ftpMode.equalsIgnoreCase("passive"))
                passiveMode = true;
            else
                passiveMode = true;

            //System.out.println("Using " + user + ", " + pass);

            // Connect to FTP server

            ftpTime.startTimer();

            int reply;
            ftp.connect(ftpServer);
            System.out.println("Connected to " + ftpServer + ".");

            reply = ftp.getReplyCode();

            if(!FTPReply.isPositiveCompletion(reply)) {
                    ftp.disconnect();
                    System.err.println("FTP Server refused connection");
                    System.exit(1);
                }
            }

            catch (IOException e){

                if(ftp.isConnected()){
                    try{
                        ftp.disconnect();
                    }
                    catch (IOException f){
                    }

                    System.err.println("Could not connect to Server");
                    //e.printStackTrace();
                    System.exit(1);
                }
            }

        //Connect to ftp and set Local and Latest sequence
        //Improve Exception handling below

        try {
            ftp.login(ftpUser, ftpPass);
            //System.out.println("Remote System is " + ftp.getSystemName());

            if(binaryMode)
                ftp.setFileType(FTP.BINARY_FILE_TYPE);

            if(passiveMode)
                ftp.enterLocalPassiveMode();

            /*
             * Enclose both latestSequence and fullLoadSequence reads in try catch blocks with proper
             * error handling and messaging
             */


            // Load latest sequence
            reader = new BufferedReader(new InputStreamReader(ftp.retrieveFileStream("/NorthAmerica/job_1101/delta/.sequence")));
            //Need to catch a parsing exception
            latestSequence = Integer.parseInt(reader.readLine());
            //System.out.println(latestSequence);
            reader.close(); reader = null;
            
            if(!ftp.completePendingCommand()) {
                ftp.logout();
                ftp.disconnect();
                System.out.println("Abnormal exit on ftp pending");
            }

            //Load Full load sequence
            reader = new BufferedReader(new InputStreamReader(ftp.retrieveFileStream("/NorthAmerica/job_1101/.sequence")));
            //Need to catch a parsing exception
            fullLoadSequence = Integer.parseInt(reader.readLine());
            //System.out.println(fullLoadSequence);
            reader.close(); reader = null;

            if(!ftp.completePendingCommand()) {
                ftp.logout();
                ftp.disconnect();
                System.out.println("Abnormal exit on ftp pending");
            }
            
            // Stop ftp timer and display the elapsed time
            System.out.println("Time taken to read sequence from ftp: " + ftpTime.stopTimer() + " secs");
        }

        catch (FTPConnectionClosedException e){
            System.err.println("Server closed connection");
            //e.printStackTrace();
        }

        catch (IOException e) {
            System.err.println("File IO Error");
            //e.printStackTrace();
        }

        catch (NumberFormatException e) {
            System.err.println("Sequence is not a nubmber");
            //e.printStackTrace();
        }

        catch (Exception e){
            System.err.println(e.getMessage());
            //e.printStackTrace();
        }

        finally {
            if (ftp.isConnected()){
                try{
                    ftp.disconnect();
                }
                catch(IOException e){
                    //e.printStackTrace();
                }
            }
        }

        // Check to see if import_summary exists, if not then create it and set localSequence as 0
        // If it does exist, read localSequence and old attempt_summary from it
        try {

            if (!connectToDB())
                System.exit(1);

            Statement stmt = con.createStatement();
            ResultSet importSummaryResultSet;
            ResultSet tablePresenceCheck = con.getMetaData().getTables(dBName, null, "import_summary", null);
            
            if (tablePresenceCheck.next() && tablePresenceCheck.getString(3).contentEquals("import_summary")) {
                // Code to read localSequence and old attempt_summary from import_summary
                System.out.println("import_summary Table found");
                importSummaryResultSet = stmt.executeQuery(statementBuilder(Operation.READ_SUMMARY));
                if (importSummaryResultSet.next())
                    localSequence = importSummaryResultSet.getInt("sequence");

                else {
                    System.err.println("No records in import_summary table, setting localSequence to default value of Zero");
                    localSequence = 0;
                }
            }

            else {
                // Code to create import summary table, set localSequence to zero
                System.out.println("Creating import_summary table and setting localSequence to Zero");
                stmt.execute(statementBuilder(Operation.CREATE_IMPORT_SUMMARY));
                localSequence = 0;
            }

            // Sum it all up and insert all preliminary attempt_summary values into import_summary
            // Need to create a property for attempt_summary
            stmt.execute(statementBuilder(Operation.INSERT_SUMMARY));
            // Get record_id for future reference
            importSummaryResultSet = stmt.executeQuery(statementBuilder(Operation.SELECT_MAX_RECORD_ID)); importSummaryResultSet.next();
            importSummaryRecordRef = importSummaryResultSet.getInt("max(record_id)");

            con.close();

        } catch (SQLException e) {
            System.err.println(e.getMessage());
        }

        System.out.println(localSequence);
    }

    
    // Overload method to facilitate no sequencce passing 
    public static void ftpDownload(String ftpPrefix, String dBPrefix){
        int refSequence = 0;
        ftpDownload(refSequence, ftpPrefix, dBPrefix);
    }

    public static void ftpDownload(int sequence, String ftpPrefix, String dBPrefix){
        String ftpLocation;
        String refSequence;
        FTPClient ftp = new FTPClient();
        BufferedOutputStream writer = null;

        if (sequence==0)
            refSequence = "";
        else
            refSequence = Integer.toString(sequence);
        
        //ftp.addProtocolCommandListener(new PrintCommandListener(new PrintWriter(System.out)));
        
        // Start recording time taken to download
        ftpTime.startTimer();

        try {
            int reply;
            ftp.connect(ftpServer);
            System.out.println("Connected to " + ftpServer + ".");

            reply = ftp.getReplyCode();

            if(!FTPReply.isPositiveCompletion(reply)) {
                    ftp.disconnect();
                    System.err.println("FTP Server refused connection");
                    System.exit(1);
                }
            }

            catch (IOException e){

                if(ftp.isConnected()){
                    try{
                        ftp.disconnect();
                    }
                    catch (IOException f){
                    }

                    System.err.println("Could not connect to Server");
                    //e.printStackTrace();
                    System.exit(1);
                }
            }

        //Connect to ftp and set Local and Latest sequence
        
        try {
            ftp.login(ftpUser, ftpPass);
            //System.out.println("Remote System is " + ftp.getSystemName());

            if(binaryMode)
                ftp.setFileType(FTP.BINARY_FILE_TYPE);

            if(passiveMode)
                ftp.enterLocalPassiveMode();

            for (String file : fileList){
                String fileName = file.substring(file.lastIndexOf("/")+1) + dB + ".zip";

                if(refSequence.contentEquals(""))
                    ftpLocation = ftpPrefix + file + dBPrefix + ".zip";
                else
                    ftpLocation = ftpPrefix + "/" + refSequence + file + dBPrefix + ".zip";

                //System.out.println("Trying to download: " + ftpLocation);

                File outputDir = new File(dataLocation + refSequence + File.separator);
                if(!outputDir.exists())
                    outputDir.mkdirs();

                try{
                    writer = new BufferedOutputStream(new FileOutputStream( new File(dataLocation + refSequence + File.separator + fileName)));
                    System.out.println("Fetching File:" + fileName);
                    ftp.retrieveFile(ftpLocation, writer);
                }
                catch (IOException e){
                    System.out.println("Error in file getting file from FTP");
                    e.getMessage();
                }
                catch (NumberFormatException e) {
                    System.out.println("Local Sequence's first line does not contain a number");
                }
                finally{
                   writer.close();
                }
            }
        }

        catch (FTPConnectionClosedException e){
            System.err.println("Server closed connection");
            //e.printStackTrace();
        }

        catch (IOException e) {
            System.err.println("File IO Error");
            //e.printStackTrace();
        }
        
        finally {
            if (ftp.isConnected()){
                try{
                    ftp.logout();
                    ftp.disconnect();
                }
                catch(IOException e){
                    //e.printStackTrace();
                }
            }
            // Report time taken for download
            System.out.println("Total time taken for download: " + ftpTime.stopTimer() + " secs");
        }
    }
    
    // Overload method to facilitate no sequencce passing
    public static void dataUnzip(){
        int refSequence = 0;
        dataUnzip(refSequence);
    }

    public static void dataUnzip(int sequence){
         BufferedOutputStream outCSV = null;
         ZipInputStream contentZip = null;
         File zipFileList = null;
         File[] dataFileList = null;
         ZipEntry CSVFile = null;
         final int BUFFER = 2048;
         String refSequence = "";

         if(sequence==0)
             refSequence = "";
         else
             refSequence = Integer.toString(sequence);

         // Start recording File Operation time
         fileOperationTime.startTimer();

         // Check a test case if following code appends file instead of overwrite (confirmed - replaces)
         try {
             zipFileList = new File(dataLocation + refSequence);

             if (!zipFileList.exists()){
                 System.err.println("Data directory does not exist");
                 throw new IOException();
             }

             dataFileList = zipFileList.listFiles();

             for(File zipFile : dataFileList){
                 //System.out.println("On file: " + zipFile.getName());
                 //Place a check to unzip only zipped files
                 try {
                     if (zipFile.isFile() && zipFile.getName().substring(zipFile.getName().lastIndexOf(".")).contentEquals(".zip")) {

                         contentZip = new ZipInputStream(new BufferedInputStream(new FileInputStream(zipFile)));
                         System.out.println("Processing Archive: " + zipFile.getName());

                         while ((CSVFile = contentZip.getNextEntry()) != null) {
                             //System.out.println("Extracting: " + CSVFile);
                             int count = 0;
                             byte buffer[] = new byte[BUFFER];

                             outCSV = new BufferedOutputStream(new FileOutputStream(dataLocation + refSequence + File.separator + CSVFile.getName()), BUFFER);

                             try {
                                 while ((count = contentZip.read(buffer, 0, BUFFER)) != -1) {
                                     outCSV.write(buffer, 0, count);
                                 }
                             } catch (IOException e) {
                                 System.err.println("Error in processing archive: " + zipFile.getName());
                                 e.printStackTrace();
                             } finally {
                                 outCSV.flush();
                                 outCSV.close();
                             }
                         }
                         contentZip.close();
                     }
                 }

                 catch (IOException e){
                     e.printStackTrace();
                 }
             }

         }

         catch (IOException e){
             e.printStackTrace();
         }

         fileOperationTime.stopTimer();
    }
    
    // Overload method to facilitate no sequencce passing
    public static void skuCombine(){
        int refSequence = 0;
        skuCombine(refSequence);
    }

    public static void skuCombine(int refSequence){

        System.out.println("Combining SKU files");

        String skuDataLocation = "";

        if(refSequence!=0)
            skuDataLocation = dataLocation + refSequence + File.separator;
        else
            skuDataLocation = dataLocation;

        // Start recording file operation time
        fileOperationTime.startTimer();

        // Create output File
        File outSKUCSV = new File(skuDataLocation + locale + "_productskus.csv");

        if(outSKUCSV.exists()){
            System.out.println("SKU File already exists, deleting");
            outSKUCSV.delete();
        }

        int BUFFER_SIZE = 524288;
        byte[] buffer = new byte[BUFFER_SIZE];
        int offset = 0;
        int amountRead = 0;

        FileOutputStream out = null;
        FileInputStream in = null;

        try {
            out = new FileOutputStream(outSKUCSV, true);

            File CSVDir = new File(skuDataLocation);

            if (CSVDir.isDirectory()) {
                for (File CSVFile : CSVDir.listFiles()) {
                    if (CSVFile.getName().contains("SKU")) {
                        //System.out.println("Processing: " + CSVFile.getName());
                        try {
                            in = new FileInputStream(CSVFile);

                            while (true)
                                synchronized (buffer) {
                                    amountRead = in.read(buffer);
                                    if (amountRead == -1)
                                        break;
                                    out.write(buffer, 0, amountRead);
                                }

                            out.flush();

                        } catch (IOException e) {
                            e.getMessage();
                            e.printStackTrace();
                        } finally {
                            if (in != null)
                                in.close();
                        }
                    }
                }
            } else {
                System.err.println("Was expecting a directory at: " + skuDataLocation);
                System.exit(1);
            }

            if (out != null)
                out.close();

        } catch (FileNotFoundException e) {
            e.getMessage();
            e.printStackTrace();
        } catch (IOException e) {
            e.getMessage();
            e.printStackTrace();
        }

        fileOperationTime.stopTimer();
    }

    public static void fullImport(){

        String ftpPrefix = "";

        System.out.println("Creating temp Schema and loading tables");

        // Perform clean-up
        tempSchemaCleanup();

        // Download / unzip files

        /** Generalize this // provide prefix for full download **/
        //ftpPrefix = ftpLanding;
        //ftpDownload(ftpPrefix, dB);
        //dataUnzip();
        //skuCombine();

        // Create temp schema
        createTables("temp");

        //Load data
        loadDataIntoTables("temp");

        // Create indexes
        createIndexes("temp");

        DBSensitiveTime.startTimer();

        renameTables();

        DBSensitiveTime.stopTimer();

        // Perform clean-up
        tempSchemaCleanup();
        
        //If successfull, update and write localSequence
        localSequence = fullLoadSequence;
    }

    public static void deltaImport(){

        interSequence = localSequence;

        /** Generalize this **/
        String ftpPrefix = ftpLanding + "/delta";

        // Add proper references for delta import

        // Table creation references
        if(dB.equalsIgnoreCase("mysql"))
            for (int i = 0; i < mysqlAuxiliaryFieldList.length; i++)
                fieldMap.put(mysqlAuxiliaryFieldList[i][0], mysqlAuxiliaryFieldList[i][1]);

        // productID table reference
        fileMap.put("finalIDs", "_B_productid.csv");

        /*
         * Call code to pull data in tempDelta tables, use interSequence+1 to pull
         *
         */

        // Create tempDelta tables, tables will be identical. Loading will be different

        System.out.println("Creating tempDelta tables and loading data");

        // Perform clean-up
        tempSchemaCleanup();
        
        //ftpDownload(interSequence+1, ftpPrefix, dB);
        //dataUnzip(interSequence+1);
        //skuCombine(interSequence+1);


        // Create temp schema
        createTables("tempDelta");

        /*
         * Load data
         * Load will be from /sequence folder, interSequence+1 in this case
         * Atomic operation, decided in statement Builder
         *
         */
        loadDataIntoTables("tempDelta");

        // Create indexes
        createIndexes("tempDelta");

        // Everything loaded in tempDelta, advance interSequence
        interSequence++;

        // If no more deltas, goto insert or aggregate till you're there

         while (interSequence != latestSequence){
             System.out.println("Processing tempAggregate from " + (interSequence + 1));

             if (deltaRollback) {
                 if (!aggregateDeltaWithRollback())
                     break;
             }
             else
                 aggregateDelta();

             interSequence++;
         }
        
        // Determine what to delete / insert
        formulateAuxTables("", "tempDelta");

        DBSensitiveTime.startTimer();
        // Code to insert data into production tables
        insertContent("", "tempDelta");
        replaceTaxonomy("", "tempDelta");
        
        DBSensitiveTime.stopTimer();

        tempSchemaCleanup();
        
        // Set localSequence to last successful value
        localSequence = interSequence;
    }

    public static boolean aggregateDeltaWithRollback () {
        
        String ftpPrefix = ftpLanding + "/delta";
        //ftpDownload(interSequence + 1, ftpPrefix, dB);
        //dataUnzip(interSequence + 1);
        //skuCombine(interSequence + 1);
        createTables("tempAggregate");
        loadDataIntoTables("tempAggregate");
        createIndexes("tempAggregate");
        formulateAuxTables("tempDelta", "tempAggregate");
        insertContent("tempDelta", "tempAggregate");
        replaceTaxonomy("tempDelta", "tempAggregate");
        deleteTables("tempAggregate");


        return false;
        
    }
    
    public static void aggregateDelta() {

        String ftpPrefix = ftpLanding + "/delta";
        //ftpDownload(interSequence + 1, ftpPrefix, dB);
        //dataUnzip(interSequence + 1);
        //skuCombine(interSequence + 1);
        createTables("tempAggregate");
        loadDataIntoTables("tempAggregate");
        createIndexes("tempAggregate");
        formulateAuxTables("tempDelta", "tempAggregate");
        insertContent("tempDelta", "tempAggregate");
        replaceTaxonomy("tempDelta", "tempAggregate");
        deleteTables("tempAggregate");
    }


    // TO maintain a certain level of abstraction, this method understands
    // All prefixes will return same schema as without prefix
    // This will build load statement as per table prefix

    public static String statementBuilder(Operation operation, boolean isBulk, String table){

        String createStatement = "";
        String fileName = "";
        String logicalTableName = "";
        String toTablePrefix = "";
        String fromTablePrefix = "";

        if (table.startsWith("tempDelta"))
            logicalTableName = table.substring(9);
        else if(table.startsWith("tempAggregate"))
            logicalTableName = table.substring(13);
        else if(table.startsWith("temp"))
            logicalTableName = table.substring(4);
        else
            logicalTableName = table;

        if (table.startsWith("tempDelta")) {
            toTablePrefix = "";
            fromTablePrefix = "tempDelta";
        } else if (table.startsWith("tempAggregate")) {
            toTablePrefix = "tempDelta";
            fromTablePrefix = "tempAggregate";
        }

        switch (operation) {
            case DROP:
                return "drop table if exists " + table;

            case CREATE:
                 // DB taken care of in indexMap
                createStatement = "CREATE TABLE " + table + " (" + fieldMap.get(logicalTableName) + ") " + "Engine=" + mysqlEngine;
                return createStatement;

            case LOAD:
                // Create File Name
                fileName = fileMap.get(logicalTableName);

                // Add locale if applicable
                if (fileName.charAt(0) == '_')
                    fileName = locale + fileName;

                // Add sequence folder if one of deltas
                if (table.startsWith("tempDelta") || table.startsWith("tempAggregate"))
                    fileName = (interSequence + 1 ) + File.separator + fileName;

                // Add location to file name
                fileName = dataLocation + fileName;

                if (dB.equalsIgnoreCase("mysql")){

                    // If Windows, add extra \\ as mysql might take it as an escape char
                    // If same behaviour is in other dBs, move this operation out

                    if (File.separator.contentEquals("\\"))
                        fileName = fileName.replace("\\", "\\\\");

                    createStatement = "load data local infile '" + fileName + "' into table " + table
                                + " FIELDS TERMINATED BY ',' OPTIONALLY ENCLOSED BY '\"' LINES TERMINATED BY '\\r\\n'";
                }

                return createStatement;

            case INDEX:
                 // DB taken care of in indexMap
                createStatement = indexMap.get(logicalTableName).replace("$", table);
                 return createStatement;

            case RENAME:
                if (dB.equalsIgnoreCase("mysql"))
                    createStatement = "ALTER TABLE " + table + " RENAME TO " + logicalTableName;
                return createStatement;

            case LOAD_NEW:
                if (dB.equalsIgnoreCase("mysql"))
                    createStatement = "insert into " + table + " select productid from " + fromTablePrefix + "finalIDs where productid not in (select productid from " + toTablePrefix + "product)";
                return createStatement;

            case LOAD_UPDATED:
                if (dB.equalsIgnoreCase("mysql"))
                    createStatement = "insert into " + table + " select productid from " + fromTablePrefix + "finalIDs where productid in (select productid from " + toTablePrefix + "product)";
                return createStatement;

            case LOAD_DELETED:
                if (dB.equalsIgnoreCase("mysql"))
                    createStatement = "insert into " + table + " select productid from " + toTablePrefix + "product where productid not in (select productid from " + fromTablePrefix + "finalIDs)";
                return createStatement;

            case INSERT_NEW:
                if (dB.equalsIgnoreCase("mysql"))
                    createStatement = "insert into " + toTablePrefix + logicalTableName + " select * from " + table + " dt where dt.productid in (select productid from " + fromTablePrefix + "newIDs)";
                return createStatement;

            case DELETE_UPDATED:
                if (dB.equalsIgnoreCase("mysql"))
                    createStatement = "delete from " + toTablePrefix + logicalTableName + " where productid in (select productid from " + fromTablePrefix + "updatedIDs)";
                return createStatement;

            case INSERT_UPDATED:
                if (dB.equalsIgnoreCase("mysql"))
                    createStatement = "insert into " + toTablePrefix + logicalTableName + " select * from " + table + " dt where dt.productid in (select productid from  " + fromTablePrefix + "updatedIDs)";
                return createStatement;

            case DELETE_REMOVED:
                if (dB.equalsIgnoreCase("mysql"))
                    createStatement = "delete from " + toTablePrefix + logicalTableName + " where productid in (select * from " + fromTablePrefix + "removedIDs)";
                return createStatement;

            case REPLACE_REMOVE:
                if (dB.equalsIgnoreCase("mysql"))
                    createStatement = "drop table if exists " + toTablePrefix + logicalTableName;
                return createStatement;

            case REPLACE_RENAME:
                if (dB.equalsIgnoreCase("mysql"))
                    createStatement = "ALTER TABLE " + table + " RENAME TO " + toTablePrefix + logicalTableName;
                return createStatement;

            case SELECT_MAX_RECORD_ID:
                if (dB.equalsIgnoreCase("mysql"))
                    createStatement = "select max(record_id) from import_summary";
                return createStatement;

            case CREATE_IMPORT_SUMMARY:
                if (dB.equalsIgnoreCase("mysql"))
                    createStatement = "create table import_summary (" + import_summaryFieldList + ") ENGINE= " + mysqlEngine;
                return createStatement;

            case READ_SUMMARY:
                if (dB.equalsIgnoreCase("mysql"))
                    createStatement = "select sequence "
                            + "from import_summary order by record_id desc limit 1";
                return createStatement;

            case INSERT_SUMMARY:
                if (dB.equalsIgnoreCase("mysql"))
                    createStatement = "insert into import_summary values ('', '', '', '', '', '', '')";
                return createStatement;
            
            case UPDATE_SUMMARY:
                if (dB.equalsIgnoreCase("mysql"))
                    createStatement = "update import_summary "
                            + "set sequence = " + localSequence
                            // Change this to be dynamic and record errors, right now, it's success all the way
                            + ", attempt_status = " + 0
                            + ", attempt_complete_time = now() "
                            // Add attempt_summary
                            + "where record_id = " + importSummaryRecordRef;
                return createStatement;

            default:
                return null;
        }
    }

    public static String statementBuilder(Operation operation){
        return statementBuilder(operation, false, "");
    }

    /*
     *
     * Methods that help in operations with tables identified by fieldMap
     *
     */

    // Method to delete tables
    public static void deleteTables(String tablePrefix){

        //System.out.println("Performing clean-up");
        DBOperationTime.startTimer();

        if (!connectToDB())
                System.exit(1);

        try {

            Statement cleanupStatement = con.createStatement();

            // Drop temp schema
            for (String table : fieldMap.keySet()) {
                // Look for null string to throw exception as returned by statementBuilder
                //System.out.println("Deleting table if exists: " + tablePrefix + table);
                cleanupStatement.executeUpdate(statementBuilder(Operation.DROP, false, tablePrefix + table));
            }

            con.close();

        } catch (SQLException e) {
            System.err.println("Unable to delete " + tablePrefix + " tables");
            e.printStackTrace(); try { con.close(); } catch (Exception s) { }

        }
        DBOperationTime.stopTimer();
}

    // Method to create tables
    public static void createTables(String tablePrefix){

        System.out.println("Creating tableset: " + tablePrefix);
        DBOperationTime.startTimer();

        if (!connectToDB())
                System.exit(1);

        try {

            Statement stmt = con.createStatement();

            // Create table
            for (String table : fieldMap.keySet()) {
                try {
                    // Look for null string to throw exception as returned by statementBuilder
                    //System.out.println("Creating table: " + tablePrefix + table);
                    //System.out.println(statementBuilder(Operation.CREATE, false, tablePrefix + table));
                    stmt.executeUpdate(statementBuilder(Operation.CREATE, false, tablePrefix + table));
                } catch (SQLException e) {
                    System.err.println("Error creating Schema for table: " + tablePrefix + table);
                    e.printStackTrace(); tempSchemaCleanup(); con.close(); System.exit(1);
                }
            }

            con.close();

        } catch (SQLException e) {
            System.err.println("Unable to create " + tablePrefix + " tables");
            e.printStackTrace(); try { con.close(); } catch (Exception s) { }

        }
        DBOperationTime.stopTimer();
    }

    // Method to load data into tables
    public static void loadDataIntoTables(String tablePrefix){

        System.out.println("Loading data in tableset: " + tablePrefix);
        DBOperationTime.startTimer();

        if (!connectToDB())
                System.exit(1);

        try {

            Statement stmt = con.createStatement();

            // Load data into tables
            for (String table : fileMap.keySet()) {
                try {
                    //System.out.println("Loading data: " + tablePrefix + table);
                    //System.out.println(statementBuilder(Operation.LOAD, false, tablePrefix + table));
                    stmt.executeUpdate(statementBuilder(Operation.LOAD, false, tablePrefix + table));
                } catch (SQLException e) {
                    System.err.println("Error loading data into table: " + tablePrefix + table);
                    e.printStackTrace(); tempSchemaCleanup(); System.exit(1);
                }
            }

            con.close();

        } catch (SQLException e) {
            System.err.println("Unable to load " + tablePrefix + " tables");
            e.printStackTrace(); try { con.close(); } catch (Exception s) { }

        }
        DBOperationTime.stopTimer();
    }

    // Method to load data into tables
    public static void createIndexes(String tablePrefix){

        System.out.println("Creating indexes on tableset: " + tablePrefix);
        DBOperationTime.startTimer();

        if (!connectToDB())
                System.exit(1);

        try {

            Statement stmt = con.createStatement();

            // Load data into tables
            for (String table : indexMap.keySet()){
                try {
                    //System.out.println("Creating Index: " + tablePrefix + table);
                    //System.out.println(statementBuilder(Operation.INDEX, false, tablePrefix + table));
                    stmt.executeUpdate(statementBuilder(Operation.INDEX, false, tablePrefix + table));
                } catch (SQLException e) {
                    System.err.println("Error indexing table: " + tablePrefix + table);
                    e.printStackTrace(); tempSchemaCleanup(); System.exit(1);
                }
            }

            con.close();

        } catch (SQLException e) {
            System.err.println("Unable to load " + tablePrefix + " tables");
            e.printStackTrace(); try { con.close(); } catch (Exception s) { }

        }
        DBOperationTime.stopTimer();
    }

    // Method to rename table, no suffix as it is understood that only temp - production tables are renamed
    public static void renameTables() {

        System.out.println("Renaming tableset temp to production");
        DBOperationTime.startTimer();

        if (!connectToDB())
            System.exit(1);

        try {

            Statement stmt = con.createStatement();

            // Rename tables, can use index or field map
            // Turn auto commit off, if everything goes as planned turn auto commit on

            // Does not fucking work
            //con.createStatement().executeUpdate("SET AUTOCOMMIT = 0");

            for (String table : fieldMap.keySet()) {
                try {
                    //System.out.println("Renaming table temp" + table + " to " + table);
                    //Delete original table, then rename
                    //System.out.println(statementBuilder(Operation.DROP, false, table));
                    stmt.executeUpdate(statementBuilder(Operation.DROP, false, table));
                    //System.out.println(statementBuilder(Operation.RENAME, false, "temp" + table));
                    stmt.executeUpdate(statementBuilder(Operation.RENAME, false, "temp" + table));
                } catch (SQLException e) {
                    System.err.println("Error renaming table: " + "temp" + table);
                    e.printStackTrace();
                    tempSchemaCleanup();
                    System.exit(1);
                }
            }

            /*
             *
             * BS statements, do not work
             *
             */
            // Check if commited?
            // Everything good, commit
            //con.createStatement().executeUpdate("COMMIT");
            //con.createStatement().executeUpdate("SET AUTOCOMMIT = 1");

            con.close();


        } catch (SQLException e) {
            System.err.println("Unable to rename tables");
            e.printStackTrace(); try { con.close(); } catch (Exception s) { }
        }
        DBOperationTime.stopTimer();
    }

    public static void insertContent(String toTablePrefix, String fromTablePrefix){

        if(toTablePrefix.contentEquals(""))
            System.out.println("Inserting content into Production tables");
        else
            System.out.println("Inserting content into tableset: " + toTablePrefix);

        DBOperationTime.startTimer();

        if (!connectToDB())
                System.exit(1);

        try {

            Statement stmt = con.createStatement();

            // Insert new records
            for (String table : contentTablesList) {
                try {
                    //System.out.println("Inserting records into: " + toTablePrefix + table);
                    //System.out.println(statementBuilder(Operation.INSERT_NEW, false, fromTablePrefix + table));
                    stmt.executeUpdate(statementBuilder(Operation.INSERT_NEW, false, fromTablePrefix + table));
                } catch (SQLException e) {
                    System.err.println("Error Inserting data into table: " + toTablePrefix + table);
                    e.printStackTrace(); tempSchemaCleanup(); System.exit(1);
                }
            }


            // Delete IDs that were updated
            for (String table : contentTablesList) {
                try {
                    //System.out.println("Deleting updated records from : " + toTablePrefix + table);
                    //System.out.println(statementBuilder(Operation.DELETE_UPDATED, false, fromTablePrefix + table));
                    stmt.executeUpdate(statementBuilder(Operation.DELETE_UPDATED, false, fromTablePrefix + table));
                } catch (SQLException e) {
                    System.err.println("Error deleting records from table: " + toTablePrefix + table);
                    e.printStackTrace(); tempSchemaCleanup(); System.exit(1);
                }
            }


            // Insert IDs that were updated
            for (String table : contentTablesList) {
                try {
                    //System.out.println("Inserting updated records into : " + toTablePrefix + table);
                    //System.out.println(statementBuilder(Operation.INSERT_UPDATED, false, fromTablePrefix + table));
                    stmt.executeUpdate(statementBuilder(Operation.INSERT_UPDATED, false, fromTablePrefix + table));
                } catch (SQLException e) {
                    System.err.println("Error Inserting records into table: " + toTablePrefix + table);
                    e.printStackTrace(); tempSchemaCleanup(); System.exit(1);
                }
            }


            // Delete removed IDs
            for (String table : contentTablesList) {
                try {
                    //System.out.println("Deleting removed records from : " + toTablePrefix + table);
                    //System.out.println(statementBuilder(Operation.DELETE_REMOVED, false, fromTablePrefix + table));
                    stmt.executeUpdate(statementBuilder(Operation.DELETE_REMOVED, false, fromTablePrefix + table));
                } catch (SQLException e) {
                    System.err.println("Error deleting records from table: " + toTablePrefix + table);
                    e.printStackTrace(); tempSchemaCleanup(); System.exit(1);
                }
            }

            con.close();

        } catch (SQLException e) {
            System.err.println("Unable to update " + toTablePrefix + " tables");
            e.printStackTrace(); try { con.close(); } catch (Exception s) { }

        }

        DBOperationTime.stopTimer();

    }

    public static void replaceTaxonomy(String toTablePrefix, String fromTablePrefix){
        
        if(toTablePrefix.contentEquals(""))
            System.out.println("Replacing Taxonomy of Production tables");
        else
            System.out.println("Replacing Taxonomy of tableset: " + toTablePrefix);

        DBOperationTime.startTimer();


        if (!connectToDB())
                System.exit(1);

        try {

            Statement stmt = con.createStatement();

            // Dropping tax tables
            for (String table : contentTablesList) {
                try {
                    //System.out.println("Dropping tax table for replacement: " + toTablePrefix + table);
                    //System.out.println(statementBuilder(Operation.REPLACE_REMOVE, false, fromTablePrefix + table));
                    stmt.executeUpdate(statementBuilder(Operation.REPLACE_REMOVE, false, fromTablePrefix + table));
                } catch (SQLException e) {
                    System.err.println("Error dropping table: " + toTablePrefix + table);
                    e.printStackTrace(); tempSchemaCleanup(); System.exit(1);
                }
            }

            // Renaming tax tables
            for (String table : contentTablesList) {
                try {
                    //System.out.println("Renaming tax table: " + toTablePrefix + table);
                    //System.out.println(statementBuilder(Operation.REPLACE_RENAME, false, fromTablePrefix + table));
                    stmt.executeUpdate(statementBuilder(Operation.REPLACE_RENAME, false, fromTablePrefix + table));
                } catch (SQLException e) {
                    System.err.println("Error reanaming table: " + fromTablePrefix + table + " to " + toTablePrefix + table);
                    e.printStackTrace(); tempSchemaCleanup(); System.exit(1);
                }
            }


            con.close();

        } catch (SQLException e) {
            System.err.println("Unable to process " + toTablePrefix + " tables");
            e.printStackTrace(); try { con.close(); } catch (Exception s) { }

        }

        DBOperationTime.stopTimer();
    }

    public static void formulateAuxTables(String toTablePrefix /* Unused */, String fromTablePrefix) {

        System.out.println("Creating Aux helper tables: " + fromTablePrefix);

        /*
         * Insert relevant productIDs to aux tables
         */

        DBOperationTime.startTimer();

        if (!connectToDB())
                System.exit(1);

        try {

            Statement stmnt = con.createStatement();

            // Insert new IDs
            // Look for null string to throw exception as returned by statementBuilder
            //System.out.println("Inserting new IDs into: " + fromTablePrefix + "newIDs");
            //System.out.println(statementBuilder(Operation.LOAD_NEW, false, fromTablePrefix + "auxnewIDs"));
            stmnt.executeUpdate(statementBuilder(Operation.LOAD_NEW, false, fromTablePrefix + "newIDs"));

            // Insert Updated IDs
            // Look for null string to throw exception as returned by statementBuilder
            //System.out.println("Inserting updated IDs into: " + fromTablePrefix + "updatedIDs");
            //System.out.println(statementBuilder(Operation.LOAD_UPDATED, false, fromTablePrefix + "auxupdatedIDs"));
            stmnt.executeUpdate(statementBuilder(Operation.LOAD_UPDATED, false, fromTablePrefix + "updatedIDs"));

            // Insert to Delete IDs
            // Look for null string to throw exception as returned by statementBuilder
            //System.out.println("Inserting IDs to delete into: " + fromTablePrefix + "removedIDs");
            //System.out.println(statementBuilder(Operation.LOAD_DELETED, false, fromTablePrefix + "auxremovedIDs"));
            stmnt.executeUpdate(statementBuilder(Operation.LOAD_DELETED, false, fromTablePrefix + "removedIDs"));

            con.close();

        } catch (SQLException e) {
            System.err.println("Unable to insert into " + fromTablePrefix + " aux table");
            e.printStackTrace(); try { con.close(); } catch (Exception s) { }

        }

        DBOperationTime.stopTimer();
    }


    // Builds varius maps used in this class, need to be made OD independant
    public static void buildTableMaps() {

        System.out.println("Building Table / File list maps");

        //List of files
        fileList = new ArrayList<String>();
        fileList.add("/tax/global/tax_global_current_");
        fileList.add("/tax/" + locale + "/tax_" + locale + "_current_");
        fileList.add("/content/global/basic/basic_global_current_");
        fileList.add("/content/" + locale + "/basic/basic_" + locale + "_current_");
        fileList.add("/content/" + locale + "/detail/detail_" + locale + "_current_");
        fileList.add("/content/" + locale + "/accessories/accessories_" + locale + "_current_");
        fileList.add("/content/" + locale + "/similar/similar_" + locale + "_current_");
        fileList.add("/content/" + locale + "/upsell/upsell_" + locale + "_current_");
        fileList.add("/content/" + locale + "/sku/sku_" + locale + "_current_");

        // Build map for file names
        fileMap = new HashMap<String, String>();

        for (int i = 0; i < basicFileList.length; i++) {
            fileMap.put(basicFileList[i][0], basicFileList[i][1]);
        }

        for (int i = 0; i < addonFileList.length; i++) {

            if (accessories.equalsIgnoreCase("Yes") && addonFileList[i][0].equalsIgnoreCase("accessories"))
                fileMap.put(addonFileList[i][1], addonFileList[i][2]);
            if (detail.equalsIgnoreCase("Yes") && addonFileList[i][0].equalsIgnoreCase("detail"))
                fileMap.put(addonFileList[i][1], addonFileList[i][2]);
            if (upsell.equalsIgnoreCase("Yes") && addonFileList[i][0].equalsIgnoreCase("upsell"))
                fileMap.put(addonFileList[i][1], addonFileList[i][2]);
            if (similar.equalsIgnoreCase("Yes") && addonFileList[i][0].equalsIgnoreCase("similar"))
                fileMap.put(addonFileList[i][1], addonFileList[i][2]);
        }

        // Create field maps
        fieldMap = new HashMap<String, String>();

        if(dB.equalsIgnoreCase("mysql"))
            for (int i = 0; i < mysqlFieldList.length; i++)
                fieldMap.put(mysqlFieldList[i][0], mysqlFieldList[i][1]);

        // Create Index Map
        indexMap = new HashMap<String, String>();
        if(dB.equalsIgnoreCase("mysql")){
            for (int i = 0; i < mysqlIndexList.length; i++)
                indexMap.put(mysqlIndexList[i][0], mysqlIndexList[i][1]);
        }

        // Create delta insert list
        contentTablesList = new ArrayList<String>();
        taxonomyTablesList = new ArrayList<String>();
        for (String table : fileMap.keySet())
            if(table.startsWith("product") || table.contentEquals("search_attribute"))
                contentTablesList.add(table);
            else
                taxonomyTablesList.add(table);

        /*
        // Test Lists
        for(String table : contentTablesList)
            System.out.println("Content Table: " + table);
        for(String table : taxonomyTablesList)
            System.out.println("Taxonomy Table: " + table);

         */
    }

    // Cleans up temp tables
    public static void tempSchemaCleanup(){

        System.out.println("Performing clean-up");

        // Drop temp schema
        deleteTables("temp");

        // Drop temp tempDelta schema
        deleteTables("tempDelta");

        // Drop temp tempAggregate schema
        deleteTables("tempAggregate");
    }

    // Connects to DB, does nothing if already connected
    public static boolean connectToDB() {

        try {
            if (con == null || con.isClosed()) {
                Class.forName("com.mysql.jdbc.Driver");
                con = DriverManager.getConnection(dBURL, dBUser, dBPassword);
                // Reset sql_mode
                if(con.getMetaData().getDatabaseProductName().equalsIgnoreCase("MySQL"))
                    con.createStatement().executeUpdate("set sql_mode = ''");
            }
        } catch (SQLException e) {
            System.err.println("Unable to connct to " + dBName + "\r\n" + e.getMessage());
            e.printStackTrace(); return false;
        } catch (ClassNotFoundException e) {
            System.err.println("DB driver not loaded");
            e.printStackTrace(); return false;
        } catch (Exception e) {
            System.err.println(e.getMessage());
            e.printStackTrace(); return false;
        }

        //System.out.println("Successfully connected to DB: " + dBName);

        return true;
    }

    public static void main(String[] args) {

        // Initialize timers
        ftpTime = new TimeStatGenerator();
        fileOperationTime = new TimeStatGenerator();
        DBOperationTime = new TimeStatGenerator();
        DBSensitiveTime = new TimeStatGenerator();

        // call validate and set properties
        validateAndSetProperties();
        buildTableMaps();

        System.out.println("Values set");

        System.out.println("Sequences as they are:");
        System.out.println("latestSequence: " + latestSequence);
        System.out.println("localSequence: " + localSequence);

        //latestSequence = 404;
        //localSequence = 400;

        if (latestSequence == localSequence) {
            // Everything OK, close stuff and quit
        }

        else if(latestSequence < localSequence){
            // Call BS on import
        }

        else if (latestSequence - localSequence > threshold){

            fullImport();

            //** Code to update database with current local Sequence
            // Move this etc. to Import Logger class
            if (!connectToDB()) {
                System.exit(1);
            }
            try {
                Statement stmt = con.createStatement();
                System.out.println("Updating the summary table");
                stmt.execute(statementBuilder(Operation.UPDATE_SUMMARY));
                con.close();

            } catch (SQLException e) {
                System.err.println("Error writing to summary table: " + e.getMessage());
            }

            if(localSequence != latestSequence)
            deltaImport();
        }

        else{
            deltaImport();
        }

        //
        // Write summary data to import_summary
        if (!connectToDB())
                System.exit(1);

        try {
            Statement stmt = con.createStatement();
            System.out.println("Updating the summary table");
            stmt.execute(statementBuilder(Operation.UPDATE_SUMMARY));
            con.close();
            
        } catch (SQLException e) {
            System.err.println("Error writing to summary table: " + e.getMessage());
        }

        // Output summary Summary
        System.out.println("\r\nSummary:");
        System.out.println("Total ftp interaction time: " + ftpTime.getTotalElapsedTime() + " secs");
        System.out.println("Total file operation time: " + fileOperationTime.getTotalElapsedTime() + " secs");
        System.out.println("Total DB operation time: " + DBOperationTime.getTotalElapsedTime() + " secs");
        System.out.println("Total time spent on DB sensitive operations: " + DBSensitiveTime.getTotalElapsedTime() + " secs (Conservative estimate)");
        System.out.println("Total elapsed time (measured): " + TimeStatGenerator.getCummulativeTime() + " secs");
    }
}
