/****************************************************************************
 *
 * Copyright (C) 2003-2008 Los Alamos National Security, LLC
 *                         Packet Analytics Corporation
 *
 * This program is free software; you can redistribute it and/or modify
 * it under the terms of the GNU General Public License Version 2 as
 * published by the Free Software Foundation.  You may not use, modify or
 * distribute this program under any other version of the GNU General
 * Public License.
 *
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 * You should have received a copy of the GNU General Public License
 * along with this program; if not, write to the Free Software
 * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
 *
 ****************************************************************************/
package nfse;

import java.io.*;
import java.net.Socket;
import java.util.*;
import java.sql.*;

import javax.net.SocketFactory;

import nfse.index.*;
import nfse.stats.*;

class DataSocket extends Thread {

    /***************************************************************************
     * A temporary JDBC connection to the metadata database used in various
     * methods.
     */
    Connection tempConnection = null;

    Statement purgeStatement = null;
    
    /***************************************************************************
     * A String representation of the name of the latest dynamic table being
     * used for processing incoming data. The name is of the form
     * TYPE_SENSOR_RUN.
     */
    private String maxRunName = "";

    /***************************************************************************
     * This value is sent by a data client or remote node to instruct the server
     * to capture and index data
     */
    public static final int OP_INSERT = 1;

    /***************************************************************************
     * This value is sent by the master node to instruct the server to dump
     * temporary data and index files for merging by the sender
     */
    public static final int OP_DUMP = 2;

    /***************************************************************************
     * This value is sent by the master node to instruct the server to remove
     * temporary file associated with an insert operation
     */
    public static final int OP_CLEANUP = 3;

    /***************************************************************************
     * This is old functionality that was used to index data files that were
     * already in the system. Obsolete.
     */
    public static final int OP_INSERT_IN_PLACE = 4;

    /***************************************************************************
     * This value is sent to the master node to begin a migration.
     */
    public static final int OP_MIGRATE = 5;

    /***************************************************************************
     * The dump operation is sent to remote nodes by the master during a
     * migration operation.
     */
    public static final int OP_DATA_DUMP = 6;

    /***************************************************************************
     * This function is sent by the master and instructs remote nodes to delete
     * old files and entries in the dynamic tables to reflect that a migration
     * has taken place.
     */
    public static final int OP_DATA_CLEANUP = 7;

    /***************************************************************************
     * The socket the client and server are connected over.
     */
    private Socket sock;

    /***************************************************************************
     * The root directory in which to store/create data files and index
     * structures
     */
    private String GlobalDir = null;

    /***************************************************************************
     * The location of the Berkely DB environment to use for temporary index
     * structures
     */
    private String Temp = null;

    /***************************************************************************
     * The directory location to store local intermidiate data files and index
     * structures
     */
    private String LocalDir = null;

    /***************************************************************************
     * The directory location to find intermidiate data files and index
     * structures to be merged
     */
    private String SharedDir = null;

    /***************************************************************************
     * Array of output streams from master node to subordinates
     */
    private DataOutputStream[] doss = null;

    /***************************************************************************
     * Array of input streams from master node to subordinates
     */
    private DataInputStream[] diss = null;

    /***************************************************************************
     * Array of Socket connections from master node to subordinates
     */
    private Socket[] socks = null;

    /***************************************************************************
     * Number of bytes read for this data stream
     */
    private long totalBytes = 0;

    /***************************************************************************
     * Number of records read for this data stream
     */
    private long totalRecords = 0;

    /********
     * The database name associated with this tool's name.
     */
    private String database = "";

    /***************************************************************************
     * For performance, records are buffered in this array short-term until
     * purgeRecords() is called either in insertDynamic() or by the
     * DataSocketMonitor thread which periodically purges records for low-volume
     * data types.
     */
    private Vector<String> records = new Vector<String>();

    /***************************************************************************
     * A pointer back to the DataServer that created this thread. Used to access
     * various functions on the DataServer from within the DataSocket.
     */
    private DataServer parent = null;

    /***************************************************************************
     * This constructor instantiates the object, setting the socket and parent
     * attributes.
     * 
     * @param s
     *            Socket connection between data client and data server
     * @param server
     *            The DataServer that created/started this thread
     */
    public DataSocket(Socket s, DataServer server) {
        sock = s;
        this.parent = server;
    }

    /***************************************************************************
     * Adds an SQL insert string (minus the "insert into TABLE" part) to the
     * records Vector. The SQL will be executed by purgeRecords() periodically.
     * 
     * @param sql
     *            A partial SQL insert statement for a parsed record
     * @return The number of records currently in the Vector; used to determine
     *         when to call purgeRecords()
     */
    private int addRecord(String sql) {
        records.addElement(sql);
        return records.size();
    }

    /***************************************************************************
     * This method process a clean up request on a remote node used in index
     * building (migration).
     * 
     * @param toolInfo
     *            Information about the tool that the index was built for
     * @param sensorInfo
     *            Information about the sensor that the index was built for
     */
    private void cleanup(ToolInfo toolInfo, SensorInfo sensorInfo, DataInputStream dis, DataOutputStream dos) {
        try {
            Properties props = new Properties();
            props.setProperty("user", NetFSE.DYNAMIC_DB_USER);
            props.setProperty("password", NetFSE.DYNAMIC_DB_PASSWORD);
            props.setProperty("client_encoding", "UTF8");

            Class.forName("org.postgresql.Driver");
            String url = "jdbc:postgresql://" + NetFSE.LOCAL_HOST + "/" + toolInfo.getName().toLowerCase();
            Connection connection = DriverManager.getConnection(url, props);
            Statement statement = connection.createStatement();

            // Process the header.
            int m = dis.readInt(); // Month of this data stream (1-12)
            int d = dis.readInt(); // Day of month of this data stream (1-31)
            int y = dis.readInt(); // Year of this data stream (YYYY)
            int run = dis.readInt(); // Run number of the data stream
            // metadata

            String dataID = m + "_" + d + "_" + y + "_" + run + "_" + sensorInfo.getSensorID();

            int val = 0;
            String exec = "rm -Rf " + LocalDir + "/" + toolInfo.getName() + "/" + sensorInfo.getName() + "/index/" + y
                    + "/" + m + "/" + d + "_" + run;
            System.out.println("DataID " + dataID + ": Deleting local index: " + exec);
            NetFSEUtil.execAndWait(exec);
            String tempFilePath = LocalDir + "/" + toolInfo.getName() + "/" + sensorInfo.getName() + "/" + y + "/" + m
                    + "/" + d + "_" + run + "." + toolInfo.getName();
            System.out.println("DataID " + dataID + ": Deleting local datafile: " + tempFilePath);
            File tempFile = new File(tempFilePath);
            if (tempFile.exists())
                tempFile.delete();

            String sensorStr = "s" + sensorInfo.getSensorID() + "_";
            String nodeStr = "_n" + NetFSE.NODE_ID + "_";

            String tableName = "";
            int minVal = -1;
            int minIdx = -1;
            int idx = 0;
            Vector<String> temp = new Vector<String>();
            String sql = "select table_name from information_schema.tables where table_catalog='"
                    + toolInfo.getName().toLowerCase() + "'";
            ResultSet rs1 = statement.executeQuery(sql);
            while (rs1.next()) {
                String name = rs1.getString(1);

                if (name.startsWith(sensorStr) && name.indexOf(nodeStr) > 0) {

                    String[] tokens = name.split("_");
                    if (tokens.length == 3) {
                        try {
                            int tempVal = Integer.parseInt(tokens[2].substring(1));
                            if (minVal == -1 || tempVal < minVal) {
                                minIdx = idx;
                                minVal = tempVal;
                                tableName = name;
                            }
                            temp.addElement(name);
                            idx++;
                        } catch (Exception e) {

                        }
                    }
                }
            }
            rs1.close();
            temp.removeElementAt(minIdx);
            String[] tableNames = new String[temp.size()];
            for (int i = 0; i < tableNames.length; i++) {
                tableNames[i] = (String) temp.elementAt(i);
            }

            if (tableName.length() > 0) {

                MetaDatabase mdb = MetaDatabase.create();
                mdb.removeTempTable(tableName);
                mdb.closeConnection();

                try {
                    sql = "drop table " + tableName;
                    statement.execute(sql);

                } catch (Exception e) {
                    // no op
                }

                try {
                    sql = "drop sequence " + tableName + "_seq";
                    statement.execute(sql);

                } catch (Exception e) {
                    // no op
                }

                try {
                    sql = "drop table " + tableName + "_offsets";
                    statement.execute(sql);

                } catch (Exception e) {
                    // no op
                }

                System.out.println("DataID " + dataID + ": finished removing records; " + sql + "");

            }

            try {
                dos.writeInt(1);
                val = dis.readInt();
            } catch (Exception e) {
                // no op
            }
            statement.close();
            connection.close();
        } catch (Exception e) {
            System.out.println("Clean up failed. This is bad.");
            e.printStackTrace();
        }

    }

    private void cleanupLocal(ToolInfo toolInfo, SensorInfo sensorInfo, int m, int d, int y, int run, String tableName,
            Statement statement) {
        try {

            String dataID = m + "_" + d + "_" + y + "_" + run + "_" + sensorInfo.getSensorID();

            int val = 0;
            String exec = "rm -Rf " + LocalDir + "/" + toolInfo.getName() + "/" + sensorInfo.getName() + "/index/" + y
                    + "/" + m + "/" + d + "_" + run;
            System.out.println("DataID " + dataID + ": Deleting local index: " + exec);
            NetFSEUtil.execAndWait(exec);
            String tempFilePath = LocalDir + "/" + toolInfo.getName() + "/" + sensorInfo.getName() + "/" + y + "/" + m
                    + "/" + d + "_" + run + "." + toolInfo.getName();
            System.out.println("DataID " + dataID + ": Deleting local datafile: " + tempFilePath);
            File tempFile = new File(tempFilePath);
            if (tempFile.exists())
                tempFile.delete();

            String sql = "";

            if (tableName.length() > 0) {

                MetaDatabase mdb = MetaDatabase.create();
                mdb.removeTempTable(tableName);
                mdb.closeConnection();

                try {
                    sql = "drop table " + tableName;
                    statement.execute(sql);

                } catch (Exception e) {
                    // no op
                }

                try {
                    sql = "drop sequence " + tableName + "_seq";
                    statement.execute(sql);

                } catch (Exception e) {
                    // no op
                }

                try {
                    sql = "drop table " + tableName + "_offsets";
                    statement.execute(sql);

                } catch (Exception e) {
                    // no op
                }

                System.out.println("DataID " + dataID + ": finished removing records; " + sql + "");

            }

        } catch (Exception e) {
            System.out.println("Clean up failed. This is bad.");
            e.printStackTrace();
        }

    }

    private String[] connectToBuildNodes(int port, int[] values, int sensor) {
        return connectToBuildNodes(port, values, sensor, false);
    }

    /***************************************************************************
     * Helper method to connect to remote nodes for build operations using flags
     * in the values array. Only nodes with node tasks assigned to the given
     * sensor will be contacted.
     * 
     * @param port
     *            The port the remote server is listening on
     * @param values
     *            The flags used to connect to the remote server
     * @param sensor
     *            The
     * @return The nodes from nodeNames that were successfully connected to
     */
    private String[] connectToBuildNodes(int port, int[] values, int sensor, boolean includeSelf) {
        doss = null;

        // Determine what nodes are assigned to process data from this sensor
        NodeStatus[] status = NetFSE.getBuildNodeStatusBySensor(sensor);

        Vector<String> tempNames = new Vector<String>();
        for (int i = 0; i < status.length; i++) {
            if (status[i].getName().compareTo("localhost") != 0 || includeSelf)
                tempNames.addElement(status[i].getName());
        }

        String[] nodeNames = { "localhost" };
        // String[] nodeNames = new String[tempNames.size()];
        // for (int i = 0; i < nodeNames.length; i++)
        // nodeNames[i] = (String) tempNames.elementAt(i);

        // Call connectToNodes with the names of nodes processing this sensor's
        // data and return the result
        return connectToNodes(port, values, nodeNames);
    }

    /***************************************************************************
     * Helper method to connect to remote nodes using flags in the values array.
     * 
     * @param port
     *            The port the remote server is listening on
     * @param values
     *            The flags used to connect to the remote server
     * @param nodeNames
     *            A list of nodes to attempt to connect to
     * @return The nodes from nodeNames that were successfully connected to
     */
    private String[] connectToNodes(int port, int[] values, String[] nodeNames) {
        try {
            if (doss != null) {
                for (int i = 0; i < doss.length; i++)
                    doss[i].close();
            }
            if (diss != null) {
                for (int i = 0; i < diss.length; i++)
                    diss[i].close();
            }
            if (socks != null) {
                for (int i = 0; i < socks.length; i++) {
                    if (!socks[i].isClosed()) {
                        socks[i].getOutputStream().close();
                        socks[i].getInputStream().close();
                    }
                    socks[i].close();
                }
            }
            if (nodeNames == null) {
                // names = NetForSE.getNodeNames();
                nodeNames = new String[0];
            }
            Vector<DataOutputStream> temp = new Vector<DataOutputStream>(); // DataOutputStreams
            Vector<String> temp2 = new Vector<String>(); // node names
            Vector<DataInputStream> temp3 = new Vector<DataInputStream>(); // DataInputStreams
            Vector<Socket> temp4 = new Vector<Socket>(); // Sockets
            for (int i = 0; i < nodeNames.length; i++) {
                try {
                    SocketFactory sf = SocketFactory.getDefault();
                    // System.out.println("Connecting to " + names[i]);
                    // SocketFactory sf = SSLSocketFactory.getDefault();
                    Socket s = sf.createSocket(nodeNames[i], port);
                    // s.setSoTimeout(60000);
                    // s.setKeepAlive(true);
                    // s.setSendBufferSize(NetForSE.SOCKET_SEND_SIZE);
                    // PrintWriter out = new PrintWriter(s.getOutputStream());
                    DataOutputStream dos = new DataOutputStream(s.getOutputStream());
                    DataInputStream dis = new DataInputStream(s.getInputStream());
                    for (int x = 0; x < values.length; x++)
                        dos.writeInt(values[x]);
                    temp.addElement(dos);
                    temp2.addElement(nodeNames[i]);
                    temp3.addElement(dis);
                    temp4.addElement(s);
                    // System.out.println("Connected to " + names[i] + "!");
                } catch (Exception e1) {
                    // e1.printStackTrace();
                    System.err.println("Failed to connect to node '" + nodeNames[i] + "'.");
                }
            }
            diss = new DataInputStream[temp3.size()];
            doss = new DataOutputStream[temp.size()];
            socks = new Socket[temp4.size()];
            nodeNames = new String[temp2.size()];
            for (int i = 0; i < temp.size(); i++) {
                doss[i] = (DataOutputStream) temp.elementAt(i);
                nodeNames[i] = (String) temp2.elementAt(i);
                diss[i] = (DataInputStream) temp3.elementAt(i);
                socks[i] = (Socket) temp4.elementAt(i);
            }
        } catch (Exception E) {
            E.printStackTrace();
        }
        return nodeNames;
    }

    /***************************************************************************
     * This method dumps the contents of a Berekely DB (or MySQL) datastore into
     * a flat file to be merged into a static b+ tree
     * 
     * @param toolInfo
     *            The ToolInfo object of the data type to dump
     * @param sensorInfo
     *            The SensorInfo object of the sensor to dump
     */
    private void dump(ToolInfo toolInfo, SensorInfo sensorInfo, DataInputStream dis, DataOutputStream dos) {
        long dumpTS = System.currentTimeMillis();
        try {

            // Process the header. Format is: // integer: month // integer: day
            // //
            // integer: year // integer: data set number ("run") // integer:
            // field
            // The following fields are read to identify the data stream
            int m = dis.readInt(); // Month of this data stream (1-12)
            int d = dis.readInt(); // Day of month of this data stream (1-31)
            int y = dis.readInt(); // Year of this data stream (YYYY)
            int run = dis.readInt(); // Run number of the data stream
            // Process the field information
            int recordCount = dis.readInt(); // Number of records to dump
            // int limit = 100000;

            String dataID = m + "_" + d + "_" + y + "_" + run + "_" + sensorInfo.getSensorID();
            System.out.println("DataID " + dataID + ": Starting dump ");

            Properties props = new Properties();
            props.setProperty("user", NetFSE.DYNAMIC_DB_USER);
            props.setProperty("password", NetFSE.DYNAMIC_DB_PASSWORD);
            props.setProperty("client_encoding", "UTF8");

            Class.forName("org.postgresql.Driver");
            String url = "jdbc:postgresql://" + NetFSE.LOCAL_HOST + "/" + toolInfo.getName().toLowerCase();
            Connection tempConnection = DriverManager.getConnection(url, props);

            Statement s = tempConnection.createStatement();

            // s.execute("set session TRANSACTION ISOLATION LEVEL READ
            // UNCOMMITTED");

            // Prepare for dump, setup shared directories/files

            String sensorStr = "s" + sensorInfo.getSensorID() + "_";
            String nodeStr = "_n" + NetFSE.NODE_ID + "_";

            String tableName = "";
            int minVal = -1;
            int minIdx = -1;
            int idx = 0;
            Vector<String> temp = new Vector<String>();
            String sql = "select table_name from information_schema.tables where table_catalog='"
                    + toolInfo.getName().toLowerCase() + "' and table_type='BASE TABLE' and table_schema='public'";
            ResultSet rs1 = s.executeQuery(sql);
            while (rs1.next()) {
                String name = rs1.getString(1);
                if (name.startsWith(sensorStr) && name.indexOf(nodeStr) > 0) {
                    String[] tokens = name.split("_");

                    if (tokens.length == 3) {

                        // System.out.print(name + ": ");
                        // for (int i = 0; i < tokens.length; i++)
                        // System.out.print(tokens[i] + ", ");
                        // System.out.println();

                        try {

                            int val = Integer.parseInt(tokens[2].substring(1));
                            if (minVal == -1 || val < minVal) {
                                minIdx = idx;
                                minVal = val;
                                tableName = name;
                            }
                            temp.addElement(name);

                        } catch (Exception e) {
                            e.printStackTrace();
                        }

                        idx++;
                    }
                }

            }
            rs1.close();
            temp.removeElementAt(minIdx);

            if (temp.size() == 0) {
                System.out.println("DataID " + dataID + ": Nothing to migrate!");
                try {
                    dos.writeInt(0);
                    dos.flush();
                } catch (Exception e) {
                    e.printStackTrace();
                }
                try {

                    s.close();
                    tempConnection.close();
                } catch (Exception e) {
                    e.printStackTrace();
                }

                return;
            } else {
                // System.out.println("Starting a migrate!");
                System.out.println("DataID " + dataID + ": Dumping " + tableName);
            }

            NetFSERecord rec = NetFSE.getTypeClass(toolInfo.getId());
            int[] fields = rec.getIndexedFields();
            GlobalField[] globalFields = new GlobalField[fields.length];
            for (int i = 0; i < fields.length; i++) {
                globalFields[i] = NetFSE.getGlobalField(fields[i]);
            }

            /*******************************************************************
             * 1. Sort by startts 2. Add a unique ID to each record by adding
             * auto-inc field to temp table 3. create offsets during itteration
             * on startts and update new offset column by unique id 4. Now
             * process remaining fields: sort by field name, offset
             * 
             */
            // Database[] databases = new Database[fields.length];
            int field = -1;
            int starttsIdx = -1;
            for (int i = 0; i < fields.length; i++) {
                if (fields[i] == NetFSE.FIELD_START_TS) {
                    field = fields[i];
                    starttsIdx = i;
                    // break;
                } else {
                    // String filePath = NetForSE.DB_ENV_HOME + "/dump_" +
                    // dataID + "_" + fields[i].ID + ".bdb";
                    // DatabaseConfig dbConfig = new DatabaseConfig();
                    // dbConfig.setAllowCreate(true);
                    // dbConfig.setSortedDuplicates(true);
                    // databases[i] = dbEnv.openDatabase(null, "dump_" + dataID
                    // + "_" + fields[i].ID + ".bdb", dbConfig);
                }
            }

            String indexPath = LocalDir + "/" + toolInfo.getName() + "/" + sensorInfo.getName() + "/index/" + y + "/"
                    + m + "/" + d + "_" + run + "/";
            String dataFilePath = LocalDir + "/" + toolInfo.getName() + "/" + sensorInfo.getName() + "/" + y + "/" + m
                    + "/" + d + "_" + run + "." + toolInfo.getName();
            String[] indexDumpPaths = new String[fields.length];
            PrintWriter[] dumps = new PrintWriter[fields.length];
            for (int i = 0; i < fields.length; i++) {
                setupDirectories(LocalDir, toolInfo.getName(), sensorInfo.getName(), m, d, y, run, fields[i]);
                indexDumpPaths[i] = indexPath + NetFSE.getFieldName(fields[i]) + "/index.dump";
                // System.out.println(indexDumpPaths[i]);
                dumps[i] = new PrintWriter(new FileWriter(indexDumpPaths[i]));
            }
            DataOutputStream out = new DataOutputStream(new FileOutputStream(dataFilePath));

            long ts = System.currentTimeMillis();

            try {
                sql = "alter table " + tableName + " drop column record_id";
                // System.out.println(sql);
                s.execute(sql);
            } catch (Exception e) {
                // e.printStackTrace();
            }

            try {
                sql = "drop sequence " + tableName + "_seq";
                // System.out.println(sql);
                s.execute(sql);
            } catch (Exception e) {
                // e.printStackTrace();
            }

            sql = "create sequence " + tableName + "_seq";
            // System.out.println(sql);
            s.execute(sql);

            try {
                sql = "drop table " + tableName + "_offsets";
                // System.out.println(sql);
                s.execute(sql);

            } catch (SQLException sqle) {

            }

            sql = "create table " + tableName + "_offsets (record_id bigint, offset_hex varchar(8))";
            // System.out.println(sql);
            s.execute(sql);

            sql = "create index " + tableName + "_offsets_idx on " + tableName + "_offsets (record_id)";
            // System.out.println(sql);
            s.execute(sql);

            sql = "alter table " + tableName + " add column record_id bigint default nextval('" + tableName + "_seq');";
            // System.out.println(sql);
            s.execute(sql);

            sql = "create index " + tableName + "_recid_idx on " + tableName + " (record_id)";
            // System.out.println(sql);
            s.execute(sql);

            long ts2 = System.currentTimeMillis();
            long ms = ts2 - ts;
            float sec = (float) ms / (float) 1000;
            // System.out.println("Took " + sec + " seconds.");
            ts = ts2;

            Statement s2 = tempConnection.createStatement();
            // s2.execute("set session TRANSACTION ISOLATION LEVEL READ
            // UNCOMMITTED");

            sql = "select count(*) as a, min(startts) as b, max(startts) as c from " + tableName + "";
            System.out.println(sql);
            ResultSet rs = s.executeQuery(sql);
            rs.next();
            recordCount = rs.getInt(1);
            int minTime = (int) (rs.getTimestamp(2).getTime() / 1000);
            int maxTime = (int) (rs.getTimestamp(3).getTime() / 1000);

            ts2 = System.currentTimeMillis();
            ms = ts2 - ts;
            sec = (float) ms / (float) 1000;
            System.out.println("Count took " + sec + " seconds.");
            ts = ts2;

            int offset = 0;

            sql = "BEGIN; CREATE TEMP TABLE " + tableName
                    + "_startts AS SELECT record_id, startts, Record, Record_Hex FROM " + tableName
                    + " order by startts asc; COPY " + tableName + "_startts TO stdout; ROLLBACK;";
            // System.out.println(sql);

            File tempFile1 = File.createTempFile("dump", ".startts");
            File tempFile2 = File.createTempFile("sqldump", ".startts");

            PrintWriter sqlOut = new PrintWriter(new FileWriter(tempFile2));
            sqlOut.println(sql);
            sqlOut.flush();
            sqlOut.close();

            PrintWriter shOut = new PrintWriter(new FileWriter(tempFile1));
            shOut.println(NetFSE.PSQL + " -U " + NetFSE.DYNAMIC_DB_USER + " " + toolInfo.getName().toLowerCase()
                    + " < " + tempFile2.getAbsolutePath() + " > /tmp/startts.dump");
            shOut.flush();

            NetFSEUtil.execAndWait("sh " + tempFile1.getAbsolutePath());

            tempFile2.delete();
            tempFile1.delete();

            ts2 = System.currentTimeMillis();
            ms = ts2 - ts;
            sec = (float) ms / (float) 1000;
            System.out.println("Temp table for startts took " + sec + " seconds.");
            ts = ts2;

            LineNumberReader in = new LineNumberReader(new FileReader("/tmp/startts.dump"));
            // PrintWriter offOut = new PrintWriter(new
            // FileWriter("/tmp/startts_offsets.sql"));
            // offOut.println("BEGIN");
            s.execute("BEGIN");
            // long off = 0;
            sql = "";
            int count = 0;
            String line = in.readLine();
            while (line != null) {
                String[] tokens = line.split("\t");
                if (tokens.length == 3) {
                    count++;
                    String recID = tokens[0];
                    byte[] recBytes = tokens[2].getBytes();
                    String offStr = Long.toHexString(offset);
                    while (offStr.length() < 8)
                        offStr = "0" + offStr;

                    if (sql.length() > 0) {
                        sql += ",";
                    }
                    sql = "insert into " + tableName + "_offsets values (" + recID + ",'" + offStr + "')";

                    s.execute(sql);
                    // System.out.println(sql);

                    String key = tokens[1];

                    // key = "" + Timestamp.valueOf(tokens[1]).getTime() / 1000;
                    String keyStr = Long.toHexString(Timestamp.valueOf(tokens[1]).getTime() / (long) 1000);

                    while (keyStr.length() < globalFields[starttsIdx].getLength())
                        keyStr = "0" + keyStr;

                    dumps[starttsIdx].println(offStr + keyStr);

                    out.write(recBytes);
                    out.write('\n');
                    offset += recBytes.length + 1;
                }
                line = in.readLine();
            }
            // if (sql.length() > 0) {
            // sql = "insert into " + tableName + "_offsets values " + sql;
            // s.execute(sql);
            // }
            s.execute("COMMIT");
            out.close();
            // offOut.println("COMMIT");
            // offOut.flush();
            // offOut.close();

            ts2 = System.currentTimeMillis();
            ms = ts2 - ts;
            sec = (float) ms / (float) 1000;
            System.out.println("Offsets for startts took " + sec + " seconds.");
            ts = ts2;

            for (int x = 0; x < fields.length; x++) {
                if (fields[x] != NetFSE.FIELD_START_TS) {

                    sql = "BEGIN; CREATE TEMP TABLE " + tableName + "_" + globalFields[x].getName() + " AS SELECT "
                            + globalFields[x].getName() + ", offset_hex FROM " + tableName + " a, " + tableName
                            + "_offsets b where a.record_id=b.record_id order by " + globalFields[x].getName()
                            + " asc; COPY " + tableName + "_" + globalFields[x].getName() + " TO stdout; ROLLBACK; ";
                    // System.out.println(sql);

                    tempFile1 = File.createTempFile("dump", globalFields[x].getName());
                    tempFile2 = File.createTempFile("sqldump", globalFields[x].getName());

                    sqlOut = new PrintWriter(new FileWriter(tempFile2));
                    sqlOut.println(sql);
                    sqlOut.flush();
                    sqlOut.close();

                    shOut = new PrintWriter(new FileWriter(tempFile1));
                    shOut.println(NetFSE.PSQL + " -U " + NetFSE.DYNAMIC_DB_USER + " "
                            + toolInfo.getName().toLowerCase() + " < " + tempFile2.getAbsolutePath() + " > /tmp/"
                            + globalFields[x].getName() + ".dump");
                    shOut.flush();
                    shOut.close();

                    NetFSEUtil.execAndWait("sh " + tempFile1.getAbsolutePath());

                    tempFile2.delete();
                    tempFile1.delete();

                    ts2 = System.currentTimeMillis();
                    ms = ts2 - ts;
                    sec = (float) ms / (float) 1000;
                    System.out.println("Temp table for " + globalFields[x].getName() + " took " + sec + " seconds.");
                    ts = ts2;

                    in = new LineNumberReader(new FileReader("/tmp/" + globalFields[x].getName() + ".dump"));
                    line = in.readLine();
                    while (line != null) {
                        String[] tokens = line.split("\t");
                        if (tokens.length == 2) {

                            String offStr = tokens[1];

                            long key = -1;

                            if (globalFields[x].getId() == NetFSE.FIELD_DST_IP
                                    || globalFields[x].getId() == NetFSE.FIELD_SRC_IP) {
                                key = NetFSEUtil.convertIPtoLong(tokens[0]);
                            } else {
                                key = Long.parseLong(tokens[0]);
                            }
                            // System.out.println(globalFields[x].length);
                            if (key >= 0) {
                                String keyStr = "";
                                if (globalFields[x].getLength() == 8)
                                    keyStr = Long.toHexString(key);
                                else if (globalFields[x].getLength() == 4)
                                    keyStr = Integer.toHexString((int) key);

                                while (keyStr.length() < globalFields[x].getLength())
                                    keyStr = "0" + keyStr;
                                dumps[x].println(offStr + keyStr);
                                // System.out.println(offStr + keyStr);
                            }

                            // out.write(recBytes);
                            // out.write('\n');
                            // offset += recBytes.length + 1;
                        }
                        line = in.readLine();
                    }

                    ts2 = System.currentTimeMillis();
                    ms = ts2 - ts;
                    sec = (float) ms / (float) 1000;
                    System.out.println("Offsets for " + globalFields[x].getName() + " took " + sec + " seconds.");
                    ts = ts2;

                }
            }

            for (int i = 0; i < dumps.length; i++) {
                dumps[i].flush();
                dumps[i].close();
                if (fields[i] != NetFSE.FIELD_START_TS) {

                }
            }
            // out.close();

            dos.writeInt(1);

            dos.writeInt(minTime);
            dos.writeInt(maxTime);

            int val = dis.readInt();

            tempConnection.close();
            s.close();

            ts2 = System.currentTimeMillis();
            ms = ts2 - dumpTS;
            sec = (float) ms / (float) 1000;

            System.out.println("DataID " + dataID + ": dumped " + count + " records for " + toolInfo.getName() + "("
                    + sec + " seconds)");

        } catch (Exception e) {
            e.printStackTrace();
        }

    }

    private String dumpLocal(ToolInfo toolInfo, SensorInfo sensorInfo, int m, int d, int y, int run, Statement s) {
        long dumpTS = System.currentTimeMillis();
        try {

            String dataID = m + "_" + d + "_" + y + "_" + run + "_" + sensorInfo.getSensorID();
            System.out.println("DataID " + dataID + ": Starting dump ");

            String sensorStr = "s" + sensorInfo.getSensorID() + "_";
            String nodeStr = "_n" + NetFSE.NODE_ID + "_";

            String tableName = "";
            int minVal = -1;
            int minIdx = -1;
            int idx = 0;
            Vector<String> temp = new Vector<String>();
            String sql = "select table_name from information_schema.tables where table_catalog='"
                    + toolInfo.getName().toLowerCase() + "' and table_type='BASE TABLE' and table_schema='public'";
            ResultSet rs1 = s.executeQuery(sql);
            while (rs1.next()) {
                String name = rs1.getString(1);
                if (name.startsWith(sensorStr) && name.indexOf(nodeStr) > 0) {
                    String[] tokens = name.split("_");

                    if (tokens.length == 3) {

                        // System.out.print(name + ": ");
                        // for (int i = 0; i < tokens.length; i++)
                        // System.out.print(tokens[i] + ", ");
                        // System.out.println();

                        try {

                            int val = Integer.parseInt(tokens[2].substring(1));
                            if (minVal == -1 || val < minVal) {
                                minIdx = idx;
                                minVal = val;
                                tableName = name;
                            }
                            temp.addElement(name);

                        } catch (Exception e) {
                            e.printStackTrace();
                        }

                        idx++;
                    }
                }

            }
            rs1.close();
            temp.removeElementAt(minIdx);

            if (temp.size() == 0) {
                return "";
            } else {
                // System.out.println("Starting a migrate!");
                System.out.println("DataID " + dataID + ": Dumping " + tableName);
            }

            NetFSERecord rec = NetFSE.getTypeClass(toolInfo.getId());
            int[] fields = rec.getIndexedFields();
            GlobalField[] globalFields = new GlobalField[fields.length];
            for (int i = 0; i < fields.length; i++) {
                globalFields[i] = NetFSE.getGlobalField(fields[i]);
            }

            /*******************************************************************
             * 1. Sort by startts 2. Add a unique ID to each record by adding
             * auto-inc field to temp table 3. create offsets during itteration
             * on startts and update new offset column by unique id 4. Now
             * process remaining fields: sort by field name, offset
             * 
             */
            // Database[] databases = new Database[fields.length];
            int field = -1;
            int starttsIdx = -1;
            for (int i = 0; i < fields.length; i++) {
                if (fields[i] == NetFSE.FIELD_START_TS) {
                    field = fields[i];
                    starttsIdx = i;
                    // break;
                } else {
                    // String filePath = NetForSE.DB_ENV_HOME + "/dump_" +
                    // dataID + "_" + fields[i].ID + ".bdb";
                    // DatabaseConfig dbConfig = new DatabaseConfig();
                    // dbConfig.setAllowCreate(true);
                    // dbConfig.setSortedDuplicates(true);
                    // databases[i] = dbEnv.openDatabase(null, "dump_" + dataID
                    // + "_" + fields[i].ID + ".bdb", dbConfig);
                }
            }

            String indexPath = LocalDir + "/" + toolInfo.getName() + "/" + sensorInfo.getName() + "/index/" + y + "/"
                    + m + "/" + d + "_" + run + "/";
            String dataFilePath = LocalDir + "/" + toolInfo.getName() + "/" + sensorInfo.getName() + "/" + y + "/" + m
                    + "/" + d + "_" + run + "." + toolInfo.getName();
            String[] indexDumpPaths = new String[fields.length];
            PrintWriter[] dumps = new PrintWriter[fields.length];
            for (int i = 0; i < fields.length; i++) {
                setupDirectories(LocalDir, toolInfo.getName(), sensorInfo.getName(), m, d, y, run, fields[i]);
                indexDumpPaths[i] = indexPath + NetFSE.getFieldName(fields[i]) + "/index.dump";
                // System.out.println(indexDumpPaths[i]);
                dumps[i] = new PrintWriter(new FileWriter(indexDumpPaths[i]));
            }
            DataOutputStream out = new DataOutputStream(new FileOutputStream(dataFilePath));

            long ts = System.currentTimeMillis();

            try {
                sql = "alter table " + tableName + " drop column record_id";
                // System.out.println(sql);
                s.execute(sql);
            } catch (Exception e) {
                // e.printStackTrace();
            }

            try {
                sql = "drop sequence " + tableName + "_seq";
                // System.out.println(sql);
                s.execute(sql);
            } catch (Exception e) {
                // e.printStackTrace();
            }

            sql = "create sequence " + tableName + "_seq";
            // System.out.println(sql);
            s.execute(sql);

            try {
                sql = "drop table " + tableName + "_offsets";
                // System.out.println(sql);
                s.execute(sql);

            } catch (SQLException sqle) {

            }

            sql = "create table " + tableName + "_offsets (record_id bigint, offset_hex varchar(8))";
            // System.out.println(sql);
            s.execute(sql);

            sql = "create index " + tableName + "_offsets_idx on " + tableName + "_offsets (record_id)";
            // System.out.println(sql);
            s.execute(sql);

            sql = "alter table " + tableName + " add column record_id bigint default nextval('" + tableName + "_seq');";
            // System.out.println(sql);
            s.execute(sql);

            sql = "create index " + tableName + "_recid_idx on " + tableName + " (record_id)";
            // System.out.println(sql);
            s.execute(sql);

            long ts2 = System.currentTimeMillis();
            long ms = ts2 - ts;
            float sec = (float) ms / (float) 1000;
            // System.out.println("Took " + sec + " seconds.");
            ts = ts2;

            sql = "select count(*) as a, min(startts) as b, max(startts) as c from " + tableName + "";
            System.out.println(sql);
            ResultSet rs = s.executeQuery(sql);
            rs.next();
            long recordCount = rs.getInt(1);
            if (recordCount == 0) {
                s.execute("drop table " + tableName);
                MetaDatabase mdb = MetaDatabase.create();
                mdb.removeTempTable(tableName);
                mdb.closeConnection();
                dumpLocal(toolInfo, sensorInfo, m, d, y, run, s);
            }
            // int minTime = (int) (rs.getTimestamp(2).getTime() / 1000);
            Timestamp maxTS = rs.getTimestamp(3);
            int maxTime = 0;
            if (maxTS != null)
                maxTime = (int) (maxTS.getTime() / 1000);

            ts2 = System.currentTimeMillis();
            ms = ts2 - ts;
            sec = (float) ms / (float) 1000;
            System.out.println("Count took " + sec + " seconds.");
            ts = ts2;

            int offset = 0;

            sql = "BEGIN; CREATE TEMP TABLE " + tableName
                    + "_startts AS SELECT record_id, startts, Record, Record_Hex FROM " + tableName
                    + " order by startts asc; COPY " + tableName + "_startts TO stdout; ROLLBACK;";
            // System.out.println(sql);

            File tempFile1 = File.createTempFile("dump", "startts");
            File tempFile2 = File.createTempFile("sqldump", "startts");

            PrintWriter sqlOut = new PrintWriter(new FileWriter(tempFile2));
            sqlOut.println(sql);
            sqlOut.flush();
            sqlOut.close();

            PrintWriter shOut = new PrintWriter(new FileWriter(tempFile1));
            shOut.println(NetFSE.PSQL + " -U " + NetFSE.DYNAMIC_DB_USER + " " + toolInfo.getName().toLowerCase()
                    + " < " + tempFile2.getAbsolutePath() + " > /tmp/startts.dump");
            shOut.flush();
            shOut.close();

            NetFSEUtil.execAndWait("sh " + tempFile1.getAbsolutePath());

            tempFile2.delete();
            tempFile1.delete();

            ts2 = System.currentTimeMillis();
            ms = ts2 - ts;
            sec = (float) ms / (float) 1000;
            System.out.println("Temp table for startts took " + sec + " seconds.");
            ts = ts2;

            LineNumberReader in = new LineNumberReader(new FileReader("/tmp/startts.dump"));
            // PrintWriter offOut = new PrintWriter(new
            // FileWriter("/tmp/startts_offsets.sql"));
            // offOut.println("BEGIN");
            s.execute("BEGIN");
            // long off = 0;
            sql = "";
            int count = 0;
            String line = in.readLine();
            while (line != null) {
                String[] tokens = line.split("\t");
                if (tokens.length == 3) {
                    count++;
                    String recID = tokens[0];
                    byte[] recBytes = tokens[2].getBytes();
                    String offStr = Long.toHexString(offset);
                    while (offStr.length() < 8)
                        offStr = "0" + offStr;

                    if (sql.length() > 0) {
                        sql += ",";
                    }
                    sql = "insert into " + tableName + "_offsets values (" + recID + ",'" + offStr + "')";

                    s.execute(sql);
                    // System.out.println(sql);

                    String key = tokens[1];

                    // key = "" + Timestamp.valueOf(tokens[1]).getTime() / 1000;
                    String keyStr = Long.toHexString(Timestamp.valueOf(tokens[1]).getTime() / (long) 1000);

                    while (keyStr.length() < globalFields[starttsIdx].getLength())
                        keyStr = "0" + keyStr;

                    dumps[starttsIdx].println(offStr + keyStr);

                    out.write(recBytes);
                    out.write('\n');
                    offset += recBytes.length + 1;
                }
                line = in.readLine();
            }
            // if (sql.length() > 0) {
            // sql = "insert into " + tableName + "_offsets values " + sql;
            // s.execute(sql);
            // }
            s.execute("COMMIT");
            out.close();
            // offOut.println("COMMIT");
            // offOut.flush();
            // offOut.close();

            ts2 = System.currentTimeMillis();
            ms = ts2 - ts;
            sec = (float) ms / (float) 1000;
            System.out.println("Offsets for startts took " + sec + " seconds.");
            ts = ts2;

            for (int x = 0; x < fields.length; x++) {
                if (fields[x] != NetFSE.FIELD_START_TS) {

                    sql = "BEGIN; CREATE TEMP TABLE " + tableName + "_" + globalFields[x].getName() + " AS SELECT "
                            + globalFields[x].getName() + ", offset_hex FROM " + tableName + " a, " + tableName
                            + "_offsets b where a.record_id=b.record_id order by " + globalFields[x].getName()
                            + " asc; COPY " + tableName + "_" + globalFields[x].getName() + " TO stdout; ROLLBACK; ";
                    // System.out.println(sql);

                    tempFile1 = File.createTempFile("dump", globalFields[x].getName());
                    tempFile2 = File.createTempFile("sqldump", globalFields[x].getName());

                    sqlOut = new PrintWriter(new FileWriter(tempFile2));
                    sqlOut.println(sql);
                    sqlOut.flush();
                    sqlOut.close();

                    shOut = new PrintWriter(new FileWriter(tempFile1));
                    shOut.println(NetFSE.PSQL + " -U " + NetFSE.DYNAMIC_DB_USER + " "
                            + toolInfo.getName().toLowerCase() + " < " + tempFile2.getAbsolutePath() + " > /tmp/"
                            + globalFields[x].getName() + ".dump");
                    shOut.flush();
                    shOut.close();

                    NetFSEUtil.execAndWait("sh " + tempFile1.getAbsolutePath());

                    tempFile2.delete();
                    tempFile1.delete();

                    ts2 = System.currentTimeMillis();
                    ms = ts2 - ts;
                    sec = (float) ms / (float) 1000;
                    System.out.println("Temp table for " + globalFields[x].getName() + " took " + sec + " seconds.");
                    ts = ts2;

                    in = new LineNumberReader(new FileReader("/tmp/" + globalFields[x].getName() + ".dump"));
                    line = in.readLine();
                    while (line != null) {
                        String[] tokens = line.split("\t");
                        if (tokens.length == 2) {

                            String offStr = tokens[1];

                            long key = -1;

                            if (globalFields[x].getId() == NetFSE.FIELD_DST_IP
                                    || globalFields[x].getId() == NetFSE.FIELD_SRC_IP) {
                                key = NetFSEUtil.convertIPtoLong(tokens[0]);
                            } else {
                                key = Long.parseLong(tokens[0]);
                            }
                            // System.out.println(globalFields[x].length);
                            if (key >= 0) {
                                String keyStr = "";
                                if (globalFields[x].getLength() == 8)
                                    keyStr = Long.toHexString(key);
                                else if (globalFields[x].getLength() == 4)
                                    keyStr = Integer.toHexString((int) key);

                                while (keyStr.length() < globalFields[x].getLength())
                                    keyStr = "0" + keyStr;
                                dumps[x].println(offStr + keyStr);
                                // System.out.println(offStr + keyStr);
                            }

                            // out.write(recBytes);
                            // out.write('\n');
                            // offset += recBytes.length + 1;
                        }
                        line = in.readLine();
                    }

                    ts2 = System.currentTimeMillis();
                    ms = ts2 - ts;
                    sec = (float) ms / (float) 1000;
                    System.out.println("Offsets for " + globalFields[x].getName() + " took " + sec + " seconds.");
                    ts = ts2;

                }
            }

            for (int i = 0; i < dumps.length; i++) {
                dumps[i].flush();
                dumps[i].close();
                if (fields[i] != NetFSE.FIELD_START_TS) {

                }
            }

            ts2 = System.currentTimeMillis();
            ms = ts2 - dumpTS;
            sec = (float) ms / (float) 1000;

            System.out.println("DataID " + dataID + ": dumped " + count + " records for " + toolInfo.getName() + "("
                    + sec + " seconds)");
            return tableName;
        } catch (Exception e) {
            e.printStackTrace();
        }
        return null;
    }

    private NetFSERecord getRecord(String line, NetFSERecord record, int sensor) throws Exception {
        NetFSERecord temp = record.generate(line, sensor);

        return temp;
    }

   
    public synchronized String getMaxRunName() {
        return maxRunName;
    }

    public synchronized void voidsetMaxRunName(String name) {
        this.maxRunName = name;
    }

    private void reconnect(String database) throws Exception {
        Properties props = new Properties();
        props.setProperty("user", NetFSE.DYNAMIC_DB_USER);
        props.setProperty("password", NetFSE.DYNAMIC_DB_PASSWORD);
        props.setProperty("client_encoding", "UTF8");

        Class.forName("org.postgresql.Driver");
        String url = "jdbc:postgresql://" + NetFSE.LOCAL_HOST + "/" + database;
        tempConnection = DriverManager.getConnection(url, props);

    }

    public synchronized void setMaxRunName(String name) {
        this.maxRunName = name;
    }

    private int insertDynamic(ToolInfo toolInfo, SensorInfo sensorInfo, DataInputStream dis, DataOutputStream dos) {
        long count = 0; // Number of records successfully read
        long localWrites = 0; // Number of records written locally
        MetaDatabase mdb = null; // Connection to the main NetFSE
        Statement tempStatement = null;

        // meta-database
        try {
            long startMS = System.currentTimeMillis(); // Unix time in MS of
            String[] nodeNames = null; // Nodes connected to for distributed
            long minTime = -1; // Unix time of the earliest record in the data
            long maxTime = -1; // Unix time of the latest record in the data

            dis = new DataInputStream(sock.getInputStream());
            dos = new DataOutputStream(sock.getOutputStream());

            // Read flag to determine if to use distributed processing
            boolean distribute = false;
            int dist = dis.readInt();
            if (dist != 0)
                distribute = true;

            // Read flag to determine if to index with this node
            boolean self = false;
            int slf = dis.readInt();
            if (slf != 0)
                self = true;

            self = true;
            distribute = false;

            // Read flag to determine if this is the master node
            boolean master = false;
            int mst = dis.readInt();
            if (mst != 0)
                master = true;

            sock.setReceiveBufferSize(NetFSE.SOCKET_RECV_SIZE);
            if (distribute)
                sock.setSendBufferSize(NetFSE.SOCKET_SEND_SIZE);

            // The following fields are read to identify the data stream
            int m = dis.readInt(); // Month of this data stream (1-12)
            int d = dis.readInt(); // Day of month of this data stream (1-31)
            int y = dis.readInt(); // Year of this data stream (YYYY)
            int run = dis.readInt(); // Run number of the data stream

            GregorianCalendar now = new GregorianCalendar();
            now.setTimeInMillis(System.currentTimeMillis());
            m = now.get(Calendar.MONTH) + 1;
            d = now.get(Calendar.DAY_OF_MONTH);
            y = now.get(Calendar.YEAR);

            String messageHeader = "[Dynamic " + toolInfo.getName() + "." + sensorInfo.getName() + " " + y + "-" + m
                    + "-" + d + ", " + run + "] ";

            mdb = MetaDatabase.create();

            String[] connectedNodes = null;
            if (distribute) {

                int[] values = { sensorInfo.getSensorID(), DataSocket.OP_INSERT, 0, 1, 0, m, d, y, run };

                // Sends header, sets doss and nodes arrays
                nodeNames = connectToBuildNodes(9097, values, sensorInfo.getSensorID());
                connectedNodes = new String[nodeNames.length];
                for (int i = 0; i < nodeNames.length; i++) {
                    connectedNodes[i] = nodeNames[i];
                    // System.out.println(messageHeader + "Connected to " +
                    // connectedNodes[i]);
                }
            }

            // self = false; // turning this off to test distributed processing

            int numNodes = 0;
            if (nodeNames != null)
                numNodes = nodeNames.length;
            if (self) {
                numNodes++;
            }

            NetFSERecord rec = ((NetFSERecord) NetFSE.TYPE_CLASSES.get(new Integer(toolInfo.getId())));

            if (numNodes == 0) {
                self = true;
                distribute = false;
                numNodes = 1;
            }

            tempStatement = null;
            String tableSQL = "";
            final long maxRecordsInRun = NetFSE.TEMP_TABLE_SIZE;
            long numRecordsInRun = 0;
            int maxRun = 0;
            String maxRunName = "";

            if (self) {

                Properties props = new Properties();
                props.setProperty("user", NetFSE.DYNAMIC_DB_USER);
                props.setProperty("password", NetFSE.DYNAMIC_DB_PASSWORD);
                props.setProperty("client_encoding", "UTF8");

                try {
                    try {
                        Class.forName("org.postgresql.Driver");
                        String url = "jdbc:postgresql://" + NetFSE.LOCAL_HOST + "/" + toolInfo.getName().toLowerCase();
                        tempConnection = DriverManager.getConnection(url, props);
                        tempStatement = tempConnection.createStatement();
                        purgeStatement = tempConnection.createStatement();
                    } catch (SQLException sqle) {
                        sqle.printStackTrace();

                        String url = "jdbc:postgresql://" + NetFSE.LOCAL_HOST + "/" + NetFSE.METADATA_DATABASE;

                        tempConnection = DriverManager.getConnection(url, props);
                        tempStatement = tempConnection.createStatement();
                        tempStatement.execute("create database " + toolInfo.getName().toLowerCase());
                        tempStatement.close();
                        tempConnection.close();

                        url = "jdbc:postgresql://" + NetFSE.LOCAL_HOST + "/" + toolInfo.getName().toLowerCase();

                        tempConnection = DriverManager.getConnection(url, props);
                        tempStatement = tempConnection.createStatement();
                        purgeStatement = tempConnection.createStatement();
                        
                        try {
                            tempStatement.execute("create sequence('searchid')");
                        } catch (Exception e) {

                        }

                    }
                    tableSQL = "select table_name from information_schema.tables where table_catalog='"
                            + toolInfo.getName().toLowerCase()
                            + "' and table_type='BASE TABLE' and table_schema='public'";

                    // System.out.println(tableSQL);

                    String sensorStr = "s" + sensorInfo.getSensorID() + "_";
                    String nodeStr = "_n" + NetFSE.NODE_ID + "_";

                    ResultSet rs3 = tempStatement.executeQuery(tableSQL);
                    while (rs3.next()) {
                        String name = rs3.getString(1);
                        boolean matchesSensor = false;
                        if (name.startsWith(sensorStr) && name.indexOf(nodeStr) > 0) {
                            String[] tokens = name.split("_");
                            if (tokens.length == 3 && !name.endsWith("Old") && !name.endsWith("New")) {
                                int sensor = Integer.parseInt(tokens[0].substring(1));
                                int node = Integer.parseInt(tokens[1].substring(1));
                                if ((sensor == sensorInfo.getSensorID()) && (node == NetFSE.NODE_ID)) {
                                    int value = Integer.parseInt(tokens[2].substring(1));
                                    if (value > maxRun) {
                                        maxRun = value;
                                        maxRunName = name;
                                    }
                                }
                            }
                        }
                    }
                    rs3.close();
                    // maxRun++;

                    if (maxRun == 0) {
                        maxRunName = "s" + sensorInfo.getSensorID() + "_n" + NetFSE.NODE_ID + "_r1";
                        tableSQL = rec.getTempTableSQL(maxRunName);
                        try {
                            tempStatement.execute(tableSQL);
                        } catch (Exception e) {
                            e.printStackTrace();
                        }

                        String[] indexSQL = rec.getTempIndexSQL(maxRunName);
                        for (int i = 0; i < indexSQL.length; i++)
                            try {
                                tempStatement.execute(indexSQL[i]);
                            } catch (Exception e) {
                                e.printStackTrace();
                            }

                        mdb.addTempTable(maxRunName, NetFSE.NODE_ID, sensorInfo.getSensorID());
                        maxRun = 1;
                    } else {

                        maxRunName = "s" + sensorInfo.getSensorID() + "_n" + NetFSE.NODE_ID + "_r" + maxRun;

                    }

                    setMaxRunName(maxRunName);

                    rs3 = tempStatement.executeQuery("select count(*) as a from " + maxRunName);
                    if (rs3.next())
                        numRecordsInRun = rs3.getInt(1);
                    rs3.close();

                } catch (Exception e) {
                    e.printStackTrace();
                }
            }

            long nodeWrites = 0; // Number of records written to remote data
            // servers
            int node = 0; // Used to round robin records to remote nodes
            long ts = System.currentTimeMillis(); // Unix time in MS used to
            // monitor throughput
            int len = 0;
            try {
                /***************************************************************
                 * Read records from the socket until a value of -1 is sent by
                 * the client (or some failure causes an exception to be
                 * thrown). The client will send an int value specifying the
                 * length of the record in bytes followed by the bytes
                 * themselves. For each record, read the length and then grab
                 * the bytes.
                 */
                while (true) {
                    try {
                        len = dis.readInt();
                    } catch (Exception e) {
                        len = -1;
                    }
                    if (len == -1) {
                        break;
                    }
                    int bytesRead = 0;
                    byte[] dataBytes = new byte[len];

                    /***********************************************************
                     * Read bytes from the socket connection into an array
                     * (dataBytes) of length len until the specified length is
                     * met.
                     */
                    while (bytesRead < len)
                        bytesRead += dis.read(dataBytes, bytesRead, len - bytesRead);

                    if (bytesRead != len) {
                        System.err.println(messageHeader + "Bad data, not enough bytes.");
                    } else {
                        /*******************************************************
                         * Determine which node to send the record to. Currently
                         * this is just round-robin. A more sophisticated load
                         * balancer should be implemented here.
                         */
                        if (numNodes > 1)
                            node = (int) (count % numNodes);
                        else
                            node = 0;

                        NetFSERecord record = null;
                        try {

                            /***************************************************
                             * Try to parse the record. An exception should be
                             * thrown by the NetFSERecord subclass if the record
                             * cannot be parsed.
                             */
                            record = getRecord(new String(dataBytes), rec, sensorInfo.getSensorID());

                            /***************************************************
                             * See if we have exceeded the threshold. If
                             * incrementCount() returns false then it has been
                             * exceeded.
                             */
                            if (!parent.incrementCount()) {
                                /***********************************************
                                 * Check to see if it is a new day, if not then
                                 * null the record so that it will not get
                                 * processed.
                                 */
                                if (!parent.checkNewDay())
                                    record = null;
                            }
                        } catch (Exception e) {
                            record = null;
                            System.out.println("Bad record (" + sensorInfo.getName() + "): '" + new String(dataBytes)
                                    + "'");
                            e.printStackTrace();
                        }

                        /*******************************************************
                         * Records that could not be parsed will be null and
                         * ignored. Also if the record threshold is exceeded
                         * records will be null. If not null then process the
                         * record.
                         */
                        if (record != null) {
                            count++;
                            numRecordsInRun++;

                            if ((node == numNodes - 1) && (self)) {

                                /***********************************************
                                 * Check to see if the local dynamic table is
                                 * full. If so, create a new one and start
                                 * putting records in the new table. This is how
                                 * Net/FSE maintains near-constant insert
                                 * performance.
                                 */
                                if (numRecordsInRun > maxRecordsInRun) {
                                    numRecordsInRun = 0;

                                    if (self) {

                                        /***************************************
                                         * Store the last run name for later,
                                         * generate the new run name and create
                                         * the new dynamic table.
                                         */
                                        String prevRunName = maxRunName;
                                        maxRun++;
                                        maxRunName = "s" + sensorInfo.getSensorID() + "_n" + NetFSE.NODE_ID + "_r"
                                                + maxRun;
                                        setMaxRunName(maxRunName);
                                        tableSQL = rec.getTempTableSQL(maxRunName);
                                        try {
                                            tempStatement.execute(tableSQL);
                                        } catch (Exception e) {
                                            e.printStackTrace();
                                        }

                                        /***************************************
                                         * Create the indexes for the new
                                         * dynamic table.
                                         */
                                        String[] indexSQL = rec.getTempIndexSQL(maxRunName);
                                        for (int i = 0; i < indexSQL.length; i++)
                                            try {
                                                tempStatement.execute(indexSQL[i]);
                                            } catch (Exception e) {
                                                e.printStackTrace();
                                            }

                                        // Add the metadata entry for this table
                                        mdb.addTempTable(maxRunName, NetFSE.NODE_ID, sensorInfo.getSensorID());

                                        /***************************************
                                         * Now compute some basic stats about
                                         * the run. This is used for a variety
                                         * of stats and dashboard features. More
                                         * processing and functionality will be
                                         * added here in the future, or broken
                                         * off into a separate module/thread for
                                         * more complex mining of the database
                                         * table.
                                         */
                                        ResultSet rs = tempStatement
                                                .executeQuery("select min(startts) as a, max(startts) as b, count(*) as c from "
                                                        + prevRunName);
                                        rs.next();
                                        java.sql.Timestamp minTS = rs.getTimestamp("a");
                                        java.sql.Timestamp maxTS = rs.getTimestamp("b");
                                        long recordCount = rs.getLong("c");
                                        mdb.setTempTableTimestamps(prevRunName, minTS, maxTS);
                                        RunStats stats = new RunStats();
                                        stats.sensorID = sensorInfo.getSensorID();
                                        stats.typeID = sensorInfo.getToolID();
                                        stats.minTS = minTS;
                                        stats.maxTS = maxTS;
                                        stats.count = recordCount;
                                        StatsServer.addRunStats(stats);
                                        rs.close();
                                    }

                                }

                                /***********************************************
                                 * If the record threshold has not been
                                 * exceeded, call purgeRecords() to flush the
                                 * last 100 records.
                                 */
                                if (addRecord(record.getTempInsertSQL()) == 100)
                                    purgeRecords(database, maxRunName);

                                localWrites++;
                            } else if (numNodes == 1) {
                                /***********************************************
                                 * This is the case where there is only one node
                                 * processing data and the master (this thread)
                                 * is not.
                                 */
                                doss[0].writeInt(dataBytes.length);
                                doss[0].write(dataBytes);
                                nodeWrites++;
                            } else {
                                /***********************************************
                                 * This is the multi-node case. Write the record
                                 * to the next node. Currently this is just
                                 * round-robin.
                                 */
                                doss[node].writeInt(dataBytes.length);
                                doss[node].write(dataBytes);
                                nodeWrites++;
                            }

                            float postgresFullPercent = 0;
                            float globalDirFullPercent = 0;
                            if (count % 10000 == 0) {
                                postgresFullPercent = NetFSE.getPostgresDiskFullPercent();
                                globalDirFullPercent = NetFSE.getDiskFullPercent(GlobalDir);

                                while (globalDirFullPercent > 90.0f) {
                                    System.out.println(messageHeader + " " + GlobalDir
                                            + " almost out of disk. Waiting for DiskManager to free up space.");
                                    globalDirFullPercent = DiskManager.reclaimSpace(GlobalDir);
                                    Thread.sleep(1000);
                                }

                                while (postgresFullPercent > 90.0f) {
                                    System.out
                                            .println(messageHeader
                                                    + " PostgreSQL almost out of disk. Waiting for AutoMigrate to free up space.");
                                    postgresFullPercent = NetFSE.getPostgresDiskFullPercent();
                                    Thread.sleep(30000);
                                }

                            }

                            /***************************************************
                             * This code block outputs system usage stats to
                             * stdout every 10,000 records. This can be
                             * commented out to reduce server log output.
                             */
                            if (count % 10000 == 0) {
                                long ts2 = System.currentTimeMillis();
                                long ms = ts2 - ts;
                                long elapsedMS = ts2 - startMS;
                                float elapsedSec = (float) elapsedMS / (float) 1000;
                                float sec = (float) ms / (float) 1000;
                                float sps1 = (float) count / elapsedSec;
                                float sps2 = (float) 10000 / sec;
                                System.out.println(messageHeader + " Total Records="
                                        + NetFSEUtil.formattedInteger(count) + "; Current RPS= "
                                        + NetFSEUtil.formattedDouble(sps2) + "; Average RPS= "
                                        + NetFSEUtil.formattedDouble(sps1));
                                System.out.println(messageHeader + " PostgreSQL partition is "
                                        + NetFSEUtil.formattedDouble(postgresFullPercent) + "% full; " + GlobalDir
                                        + " partition is " + NetFSEUtil.formattedDouble(globalDirFullPercent)
                                        + "% full; ");
                                ts = System.currentTimeMillis();
                            }
                        }
                    }
                } // End of main record-reading loop

            } catch (Exception E) {
                E.printStackTrace();
            }

            // Now that the loop is done, purge any left over records
            purgeRecords(database, maxRunName);

            try {
                /***************************************************************
                 * Write a value of 1 to the data client to signal completion.
                 * The client should wait but this is in a try block just in
                 * case the client/agent was not implemented correctly.
                 */
                dos.writeInt(1);
                dos.flush();
            } catch (Exception e) {

            }

            /*******************************************************************
             * When finished processing, the master should update the final
             * record counts and close down connections to all remote nodes.
             */
            if (master) {
                parent.updateRecordCounts();

                if (doss != null) {
                    for (int i = 0; i < doss.length; i++) {
                        try {
                            doss[i].writeInt(-1);
                            doss[i].flush();
                            doss[i].close();
                            diss[i].close();
                        } catch (Exception e) {

                        }
                    }
                }
            }

            /*******************************************************************
             * Output the final stats for this data stream.
             */
            System.out.println(messageHeader + "Records: Local=" + localWrites + "; Remote=" + nodeWrites + "; Bad="
                    + (count - localWrites - nodeWrites) + "");

        } catch (Exception E) {
            E.printStackTrace();
        } finally {
            /*******************************************************************
             * In before returning or following an unexpected error, clean up
             * system resources (database connections, sockets, etc).
             */
            try {
                tempStatement.close();
            } catch (Exception e) {

            }
            try {
                purgeStatement.close();
            } catch (Exception e) {

            }
            try {
                tempConnection.close();
            } catch (Exception e) {

            }
            try {
                mdb.closeConnection();
            } catch (Exception e) {

            }

            try {
                if (doss != null) {
                    // Close output streams
                    for (int i = 0; i < doss.length; i++) {
                        try {
                            doss[i].writeInt(-1);
                        } catch (Exception e) {
                            // no op
                        }
                        doss[i].close();
                    }
                }
                if (diss != null) {
                    // Close input streams
                    for (int i = 0; i < diss.length; i++)
                        diss[i].close();
                }
                if (socks != null) {
                    // Close sockets
                    for (int i = 0; i < socks.length; i++)
                        socks[i].close();
                }

            } catch (Exception e) {
                e.printStackTrace();
            }

        }
        return 0;

    }

    /***************************************************************************
     * The master node builds the static b+ tree index structures, first sending
     * requests for remote nodes to dump their temporary files which are then
     * merged by the master node. This process can utilize considerable system
     * resources. Database tuning can have a dramatic impact on this aspect of
     * the system, in this case the more memory the better.
     * 
     * Note: migration can run concurrently but not for the same sensor.
     * 
     */
    private int migrate(ToolInfo toolInfo, SensorInfo sensorInfo, DataInputStream dis, DataOutputStream dos) {
        try {
            long startTS = System.currentTimeMillis();
            dis = new DataInputStream(sock.getInputStream());
            dos = new DataOutputStream(sock.getOutputStream());

            sock.setReceiveBufferSize(NetFSE.SOCKET_RECV_SIZE);

            GregorianCalendar now = new GregorianCalendar();
            now.setTimeInMillis(System.currentTimeMillis());
            int m = now.get(Calendar.MONTH) + 1;
            int d = now.get(Calendar.DAY_OF_MONTH);
            int y = now.get(Calendar.YEAR);

            // Build the date string to search on
            String indexDate = y + "-";
            if (m < 10)
                indexDate += "0";
            indexDate += "" + m + "-";
            if (d < 10)
                indexDate += "0";
            indexDate += "" + d;

            Properties props = new Properties();
            props.setProperty("user", NetFSE.METADATA_USER);
            props.setProperty("password", NetFSE.METADATA_PASSWORD);
            props.setProperty("client_encoding", "UTF8");

            JDBCDatabase tempDB = new JDBCDatabase("jdbc:postgresql://" + NetFSE.METADATA_HOST + "/"
                    + NetFSE.METADATA_DATABASE, props);
            Statement s = tempDB.createStatement();

            /*******************************************************************
             * Find the highest run number for this date to determine the next
             * run number. This functionality could be replaced with a Postresql
             * sequence.
             */

            String sql = "create sequence run_s" + sensorInfo.getSensorID() + "_seq";
            try {
                s.execute(sql);
            } catch (Exception e) {

            }

            sql = "select nextval('run_s" + sensorInfo.getSensorID() + "_seq')";
            ResultSet rs = s.executeQuery(sql);
            rs.next();
            int run = rs.getInt(1);
            rs.close();

            s.close();
            tempDB.closeConnection();

            props.setProperty("user", NetFSE.DYNAMIC_DB_USER);
            props.setProperty("password", NetFSE.DYNAMIC_DB_PASSWORD);
            props.setProperty("client_encoding", "UTF8");

            Class.forName("org.postgresql.Driver");
            String url = "jdbc:postgresql://" + NetFSE.LOCAL_HOST + "/" + toolInfo.getName().toLowerCase();
            Connection tempConnection = DriverManager.getConnection(url, props);
            Statement tempStatement = tempConnection.createStatement();

            NetFSERecord rec = NetFSE.getTypeClass(toolInfo.getId());
            int[] fields = rec.getIndexedFields();

            /*******************************************************************
             * Set up data identifiers and various file system paths.
             */
            String dataID = m + "_" + d + "_" + y + "_" + run + "_" + sensorInfo.getSensorID(); // Set
            // up
            String globalResultFilePath = GlobalDir + "/" + toolInfo.getName() + "/" + sensorInfo.getName() + "/" + y
                    + "/" + m + "/" + d + "_" + run + "." + toolInfo.getName();
            String globalIndexDirPath = GlobalDir + "/" + toolInfo.getName() + "/" + sensorInfo.getName() + "/index/"
                    + y + "/" + m + "/" + d + "_" + run;

            for (int x = 0; x < fields.length; x++) {
                NetFSEUtil.execAndWait("rm -Rf " + globalIndexDirPath + "/" + NetFSE.getFieldName(fields[x]));
            }

            setupDirectories(GlobalDir, toolInfo.getName(), sensorInfo.getName(), m, d, y, run, fields);

            int minTime = -1;
            int maxTime = -1;

            boolean gotResults = false;
            String tableName = dumpLocal(toolInfo, sensorInfo, m, d, y, run, tempStatement);
            if (tableName != null && tableName.length() > 0) {
                sql = "select min(startts), max(startts) from " + tableName;
                rs = tempStatement.executeQuery(sql);
                rs.next();
                minTime = (int) (rs.getTimestamp(1).getTime() / 1000);
                maxTime = (int) (rs.getTimestamp(2).getTime() / 1000);
                gotResults = true;
            }

            if (!gotResults) {
                try {
                    // System.out.println("Sending signal to AutoMigrate that
                    // there is nothing to do.");
                    dos.writeInt(0); // Signal complete
                    dos.flush();
                    dis.readInt();
                } catch (Exception e) {
                    e.printStackTrace();
                }

                if (doss != null) { // Close output streams
                    for (int i = 0; i < doss.length; i++)
                        try {
                            doss[i].close();
                        } catch (Exception e) {

                        }
                }
                if (diss != null) { // Close input streams
                    for (int i = 0; i < diss.length; i++)
                        try {
                            diss[i].close();
                        } catch (Exception e) {

                        }
                }
                if (socks != null) { // Close sockets
                    for (int i = 0; i < socks.length; i++)
                        try {
                            socks[i].close();
                        } catch (Exception e) {

                        }
                }

                tempStatement.close();
                tempConnection.close();

                System.gc();
                return 0;
            }

            // Create the input file for the index builder
            File inputFile = new File(globalIndexDirPath + ".input");
            PrintWriter ibInput = new PrintWriter(new FileWriter(inputFile));
            ibInput.println("dataFile " + globalResultFilePath);
            ibInput.println("dataType " + toolInfo.getId());
            ibInput.println("toolName " + toolInfo.getName());
            ibInput.println("sensorName " + sensorInfo.getName());
            ibInput.println("sensorID " + sensorInfo.getSensorID());
            ibInput.println("extension " + toolInfo.getStorage());
            ibInput.println("pageSize " + 1000);
            ibInput.println("month " + m);
            ibInput.println("day " + d);
            ibInput.println("year " + y);
            ibInput.println("run " + run);
            ibInput.println("globalDir " + GlobalDir);
            ibInput.println("sharedDir " + SharedDir);
            ibInput.println("localDir " + LocalDir);
            ibInput.println("bdbCreated");
            ibInput.println("cacheSize " + 200000000);
            ibInput.println("metaDB " + NetFSE.METADATA_HOST + " metadata " + NetFSE.METADATA_USER + " "
                    + NetFSE.METADATA_PASSWORD);
            ibInput.println("tempDB " + Temp);
            ibInput.println("minTime " + minTime);
            ibInput.println("maxTime " + maxTime);
            ibInput.println("bytes " + totalBytes);
            ibInput.println("records " + totalRecords);

            for (int w = 0; w < fields.length; w++)
                ibInput.println("db " + globalIndexDirPath + "/" + NetFSE.getFieldName(fields[w]) + " "
                        + NetFSE.getFieldName(fields[w]) + " " + fields[w]);

            String[] nodeNames = { "localhost" };
            // ibInput.println("node localhost");
            if (nodeNames != null) {
                for (int x = 0; x < nodeNames.length; x++) {
                    ibInput.println("node " + nodeNames[x]);
                }
            }
            ibInput.close(); // Now build the static
            // b+ trees by calling
            // the IndexBuilder
            System.out.println("DataID " + dataID + ": Starting index build.");
            long ts2 = System.currentTimeMillis(); // Start time of the //
            // build int
            int val = IndexBuilder.build(IndexBuildInput.processParms(inputFile.getAbsolutePath()));
            System.out.println("DataID " + dataID + ": Index build completed with return value " + val
                    + "; time taken was " + (System.currentTimeMillis() - ts2) / 1000 + " seconds");

            // s.execute(arg0)

            if ((val == 0) || (val == 1)) {
                System.out.println("DataID " + dataID + ": Cleaning up ");
                String summary = "Index complete " + m + "/" + d + "/" + y + " run " + run;

                cleanupLocal(toolInfo, sensorInfo, m, d, y, run, tableName, tempStatement);

            } else {
                System.out.println("DataID " + dataID + ": build failed");
                String summary = "Index FAILED " + m + "/" + d + "/" + y + " run " + run;
            }

            System.out.println("DataID " + dataID + ": migrate completed; time taken was "
                    + (System.currentTimeMillis() - startTS) / 1000 + " seconds");

            tempStatement.close();
            tempConnection.close();

            try {
                dos.writeInt(1); // Signal complete
                dos.flush();
                dis.readInt();
            } catch (Exception e) {
                e.printStackTrace();
            }

            return 0;
        } catch (Exception E) {
            System.out.println("DATA SOCKET EXITED UNSAFELY!!!");
            E.printStackTrace();
        }
        return -5;
    }

    /***************************************************************************
     * This method reads in the configuration for the data server.
     * 
     */
    private void processConfig() {
        LineNumberReader in = null;
        try {
            File conf = new File(NetFSE.CONFIG_FILE);
            if (!conf.exists()) {
                return;
            }

            in = new LineNumberReader(new FileReader(conf));
            String line = in.readLine();
            while (line != null) {
                if (line.startsWith("DataSocket::globaldir ")) {
                    GlobalDir = line.substring(line.indexOf(' ')).trim();
                } else if (line.startsWith("DataSocket::localdir ")) {
                    LocalDir = line.substring(line.indexOf(' ')).trim();
                } else if (line.startsWith("DataSocket::shareddir ")) {
                    SharedDir = line.substring(line.indexOf(' ')).trim();
                } else if (line.startsWith("DataSocket::temp ")) {
                    Temp = line.substring(line.indexOf(' ')).trim();
                }
                line = in.readLine();
            }
            in.close();

        } catch (Exception E) {
            E.printStackTrace();
            try {
                in.close();
            } catch (IOException ioe) {
                ioe.printStackTrace();
            }
        }
    }

    public synchronized void purgeRecords(String database, String table) throws Exception {
        purgeRecords(database, table, true);
    }

    /***************************************************************************
     * This method inserts buffered records into the dynamic table as a bulk
     * transaction for speed. To achieve true real-time processing this function
     * should not be called and all inserts should be done immediatly when
     * records are parsed.
     * 
     * @param table
     *            The dynamic table in which to insert the records
     * @param statement
     *            An open JDBC statement to use for the inserts
     * @throws Exception
     *             An Exception will be thrown if the Statement is bad or the
     *             database server is down.
     */
    public synchronized void purgeRecords(String database, String table, boolean transactional)
            throws Exception {
        if (records.size() == 0)
            return;
        
        String insert = "insert into " + table + " values ";
        if (transactional) {
        	StringBuffer temp = new StringBuffer("begin;");
            try {
                
                // StringBuffer temp = new StringBuffer("");
                for (int i = 0; i < records.size(); i++) {
                    temp.append(insert + records.elementAt(i) + ";");
                }
                temp.append("commit;");
                purgeStatement.execute(temp.toString());
            } catch (Exception e) {
                e.printStackTrace();
                System.out.println("BAD SQL: " + temp);
                try {
                	purgeStatement.close();
                    tempConnection.close();
                } catch (Exception e2) {
                    e2.printStackTrace();
                }
                reconnect(database);
                purgeStatement = tempConnection.createStatement();
                purgeStatement.execute("rollback;");
                Thread.sleep(10000);
                purgeRecords(database, table,  false);
            }
        } else {
            for (int i = 0; i < records.size(); i++) {
                String sql = insert + records.elementAt(i);
                try {
                	purgeStatement.execute(sql);
                } catch (SQLException sqle) {
                    sqle.printStackTrace();
                    System.out.println("BAD SQL: '" + sql + "'");
                } catch (Exception e) {
                    e.printStackTrace();
                    try {
                    	purgeStatement.close();
                        tempConnection.close();
                    } catch (Exception e2) {
                        e2.printStackTrace();
                    }
                    reconnect(database);
                    purgeStatement = tempConnection.createStatement();
                    purgeStatement.execute(sql);
                    try {
                    	purgeStatement.execute(sql);
                    } catch (SQLException sqle2) {
                        sqle2.printStackTrace();
                        System.out.println("BAD SQL: '" + sql + "'");
                    }
                }
            }
        }
        records.removeAllElements();
    }

    public String getDatabaseName() {
        return database;
    }

    /***************************************************************************
     * Processes the data client's request as a thread within the data server. A
     * header is read from the data client and the the proper methods are called
     * based on the data type and operation type for this connection.
     */
    public void run() {
        try {
            processConfig();

            // Read in data type information (sensor), operation type
            DataInputStream dis = new DataInputStream(sock.getInputStream());
            DataOutputStream dos = new DataOutputStream(sock.getOutputStream());
            int sensorID = dis.readInt(); // read the sensor ID
            int opType = dis.readInt(); // read the operation type (see
            // NetForSE.OP_ flags)

            // Look up sensor information
            SensorInfo sensorInfo = NetFSE.getSensorInfo(sensorID);
            if (sensorInfo == null) {
                System.err.println("Unknown sensor ID specified: " + sensorID + ".");
                dos.writeInt(-1);
                dos.flush();
                dos.close();
                dis.close();
                sock.close();
                return;
            } else {
                dos.writeInt(0);
            }

            NetFSERecord rec = NetFSE.getTypeClass(sensorInfo.getToolID());
            if (rec == null) {
                System.err.println("Unknown tool ID: " + sensorInfo.getToolID() + ".");
                dos.writeInt(-1);
                dos.flush();
                dos.close();
                dis.close();
                sock.close();
                return;
            }
            ToolInfo toolInfo = new ToolInfo(rec.getTypeID(), rec.getTypeName(), "." + rec.getTypeName());

            this.database = toolInfo.getName().toLowerCase();

            // Branch depending on the operation and data type
            if (opType == DataSocket.OP_INSERT) {
                Object test = NetFSE.TYPE_CLASSES.get(new Integer(toolInfo.getId()));
                if (test != null)
                    insertDynamic(toolInfo, sensorInfo, dis, dos);
                else
                    System.err.println("Unsupported tool for insert operation (" + toolInfo.getId() + ").");
            } else if (opType == DataSocket.OP_MIGRATE) {
                Object test = NetFSE.TYPE_CLASSES.get(new Integer(toolInfo.getId()));
                if (test != null)
                    migrate(toolInfo, sensorInfo, dis, dos);
                else
                    System.err.println("Unsupported tool for migrate operation (" + toolInfo.getId() + ").");
            } else if (opType == DataSocket.OP_DUMP) {
                Object test = NetFSE.TYPE_CLASSES.get(new Integer(toolInfo.getId()));
                if (test != null)
                    dump(toolInfo, sensorInfo, dis, dos);
                else
                    System.err.println("Unsupported tool for dump operation (" + toolInfo.getId() + ").");
            } else if (opType == DataSocket.OP_CLEANUP) { // Op is cleanup
                Object test = NetFSE.TYPE_CLASSES.get(new Integer(toolInfo.getId()));
                if (test != null)
                    cleanup(toolInfo, sensorInfo, dis, dos);
                else
                    System.err.println("Unsupported tool for cleanup operation (" + toolInfo.getId() + ").");
            } else { // Error out if the op type is not insert, dump or
                // cleanup
                System.err.println("Unsupported OP code specified (" + opType + ").");
            }

            // Clean up the socket to conserve file descriptors
            dis.close();
            dos.close();
            sock.close();

        } catch (Exception E) {
            E.printStackTrace();
        }
    }

    /***************************************************************************
     * Convinience method for calling the setupDirectories method below.
     * 
     * @param rootPath
     *            Root of directory structure where new directories should be
     *            built
     * @param toolName
     *            Name of the tool that the directories are being built for
     * @param sensorName
     *            Name of the sensor that the directories are being built for
     * @param m
     *            Month that the directories are being built for
     * @param d
     *            Day of month that the directories are being built for
     * @param y
     *            Year that the directories are being built for
     * @param run
     *            Run that the directories are being built for
     * @param fieldName
     *            Name of the field that the directories are being built for
     */
    private void setupDirectories(String rootPath, String toolName, String sensorName, int m, int d, int y, int run,
            int field) {
        // GlobalField[] fields = new GlobalField[1];
        // fields[0] = field;
        int[] fields = new int[1];
        fields[0] = field;
        setupDirectories(rootPath, toolName, sensorName, m, d, y, run, fields);
    }

    /***************************************************************************
     * Convinience method for building directory structures needed for building
     * b+ tree index and storing data file
     * 
     * @param rootPath
     *            Root of directory structure where new directories should be
     *            built
     * @param toolName
     *            Name of the tool that the directories are being built for
     * @param sensorName
     *            Name of the sensor that the directories are being built for
     * @param m
     *            Month that the directories are being built for
     * @param d
     *            Day of month that the directories are being built for
     * @param y
     *            Year that the directories are being built for
     * @param run
     *            Run that the directories are being built for
     * @param fieldNames
     *            Names of the fields that the directories are being built for
     */
    private void setupDirectories(String rootPath, String toolName, String sensorName, int m, int d, int y, int run,
            int[] fields) {
        try {
            if (!rootPath.endsWith("/"))
                rootPath += "/";

            Vector<String> dirs = new Vector<String>();

            dirs.addElement(rootPath);
            dirs.addElement(rootPath + toolName);
            dirs.addElement(rootPath + toolName + "/" + sensorName);
            dirs.addElement(rootPath + toolName + "/" + sensorName + "/" + y);
            dirs.addElement(rootPath + toolName + "/" + sensorName + "/" + y + "/" + m);
            dirs.addElement(rootPath + toolName + "/" + sensorName + "/" + y + "/" + m + "/" + d);
            dirs.addElement(rootPath + toolName + "/" + sensorName + "/index");
            dirs.addElement(rootPath + toolName + "/" + sensorName + "/index/" + y);
            dirs.addElement(rootPath + toolName + "/" + sensorName + "/index/" + y + "/" + m);
            dirs.addElement(rootPath + toolName + "/" + sensorName + "/index/" + y + "/" + m + "/" + d + "_" + run);
            for (int i = 0; i < fields.length; i++) {
                dirs.addElement(rootPath + toolName + "/" + sensorName + "/index/" + y + "/" + m + "/" + d + "_" + run
                        + "/" + NetFSE.getFieldName(fields[i]));
                dirs.addElement(rootPath + toolName + "/" + sensorName + "/index/" + y + "/" + m + "/" + d + "_" + run
                        + "/" + NetFSE.getFieldName(fields[i]) + "/pages");
                dirs.addElement(rootPath + toolName + "/" + sensorName + "/index/" + y + "/" + m + "/" + d + "_" + run
                        + "/" + NetFSE.getFieldName(fields[i]) + "/index");
            }

            for (int i = 0; i < dirs.size(); i++) {
                File dir = new File((String) dirs.elementAt(i));
                // System.out.println(dir.getAbsolutePath());
                if ((dir.exists() && (!dir.isDirectory()))) {
                    System.err.println("Error in directory structure: must have a directory '" + dirs.elementAt(i)
                            + "'.");
                    return;
                } else if (!dir.exists()) {
                    dir.mkdir();
                    // System.out.println("Created " + dir.getAbsolutePath());
                }
            }
        } catch (Exception E) {
            E.printStackTrace();
        }
    }

}