/****************************************************************************
 *
 * Copyright (C) 2003-2008 Los Alamos National Security, LLC
 *                         Packet Analytics Corporation
 *
 * This program is free software; you can redistribute it and/or modify
 * it under the terms of the GNU General Public License Version 2 as
 * published by the Free Software Foundation.  You may not use, modify or
 * distribute this program under any other version of the GNU General
 * Public License.
 *
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 * You should have received a copy of the GNU General Public License
 * along with this program; if not, write to the Free Software
 * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
 *
 ****************************************************************************/
package nfse;

import java.io.*;

import nfse.stats.RunStats;
import nfse.stats.StatsServer;

import java.sql.*;

import org.postgresql.*;
import org.postgresql.copy.CopyManager;
import org.postgresql.util.PSQLException;

/**
 * This is the core indexing class for new data coming into the system via
 * NetListener.
 * 
 * This class is called continuously by the server when new data records are
 * ready for indexing. Fields that may be indexed here are time, ip address and
 * port number however any numerical field can be indexed.
 * 
 * Data indexed here is stored in transient relational database tables.
 * AutoMigrate determines when to move data from transient storage to permanent
 * b+ tree indexes.
 * 
 * @author bduphoff
 * 
 */
public class NumericIndexFileProcessor extends DataFileProcessor {

    /**********
     * This method writes new records to a file which is then bulk loaded into
     * PostgreSQL with the COPY command.
     */
    public void process() throws Exception {
        File file = null;

        // These lines move to the last record processed in this file, if any
        int offset = getOffset();
        if (offset > 0) {
            moveToOffset();
        }

        // Create a connection to the metadatabase
        MetaDatabase mdb = MetaDatabase.create();

        // Get information about what type of data is in this file
        SensorInfo sensorInfo = NetFSE.getSensorInfo(this.getSensorID());
        ToolInfo toolInfo = NetFSE.getToolInfo(sensorInfo.getToolID());

        long startMS = System.currentTimeMillis();
        NetFSERecordFactory rec = (NetFSERecordFactory) NetFSE.getRecordFactory(toolInfo.getId());

        Connection tempConnection = null;
        Statement tempStatement = null;
        // Connection con = null;

        /**
         * Try to connect to the local database for this type of data, which may
         * not exist. If the connection fails, try to create the database for
         * this type.
         */
        try {
            tempConnection = NetFSE.createLocalDatabaseConnection(toolInfo.getName().toLowerCase());
            tempStatement = tempConnection.createStatement();
        } catch (SQLException sqle) {

            // Connect to metadata, which must exist and create the new DB from
            // there
            tempConnection = NetFSE.createLocalDatabaseConnection(NetFSE.METADATA_DATABASE);
            tempStatement = tempConnection.createStatement();
            tempStatement.execute("create database " + toolInfo.getName().toLowerCase() + " encoding='SQL_ASCII'");
            tempStatement.close();
            tempConnection.close();

            tempConnection = NetFSE.createLocalDatabaseConnection(toolInfo.getName().toLowerCase());
            tempStatement = tempConnection.createStatement();
        }

        // This creates sql to find all tables storing data for this type
        String tableSQL = "select table_name from information_schema.tables where table_catalog='"
                + toolInfo.getName().toLowerCase() + "' and table_type='BASE TABLE' and table_schema='public'";

        try {
            String sensorStr = "s" + sensorInfo.getSensorID() + "_"; // The
            // sensor
            // info
            String nodeStr = "_n" + NetFSE.NODE_ID + "_"; // The node info
            int maxRun = 0; // The latest table used
            String maxRunName = ""; // Will store the full table name of the
            // latest table

            /*
             * This logic loops through all the tables in this database to find
             * the latest one used to store data for this type.
             */
            ResultSet rs3 = tempStatement.executeQuery(tableSQL);
            while (rs3.next()) {
                String name = rs3.getString(1);
                boolean matchesSensor = false;
                if (name.startsWith(sensorStr) && name.indexOf(nodeStr) > 0) {
                    String[] tokens = name.split("_");
                    if (tokens.length == 3 && !name.endsWith("Old") && !name.endsWith("New")) {
                        int sensor = Integer.parseInt(tokens[0].substring(1));
                        int node = Integer.parseInt(tokens[1].substring(1));
                        if ((sensor == sensorInfo.getSensorID()) && (node == NetFSE.NODE_ID)) {
                            int value = Integer.parseInt(tokens[2].substring(1));
                            if (value > maxRun) {
                                maxRun = value;
                                maxRunName = name;
                            }
                        }
                    }
                }
            }
            rs3.close();

            // The sequence is used to determine the next "run" number to use,
            // it may not exist but will be created below when maxRun==0
            String sequenceName = "run_s" + sensorInfo.getSensorID() + "_n" + NetFSE.NODE_ID + "_sequence";

            if (maxRun == 0) {
                /**
                 * This is the first time we are processing data for this type.
                 * Create the first table and other needed information in the
                 * database.
                 */
                tempStatement.execute("create sequence " + sequenceName);

                // Need a new table
                ResultSet rs = tempStatement.executeQuery("select nextval('" + sequenceName + "')");
                rs.next();
                maxRun = rs.getInt(1);
                rs.close();

                // Determine the table name
                maxRunName = "s" + sensorInfo.getSensorID() + "_n" + NetFSE.NODE_ID + "_r" + maxRun;

                // Get the table creation SQL for this type
                tableSQL = rec.generate().getTempTableSQL(maxRunName);
                try {
                    // Create the table, add it to the system's metadatabase
                    tempStatement.execute(tableSQL);
                    mdb.addTempTable(maxRunName, NetFSE.NODE_ID, sensorInfo.getSensorID());
                } catch (Exception e) {
                    e.printStackTrace();
                }

                // Now index the table according to this type
                String[] indexSQL = rec.generate().getTempIndexSQL(maxRunName);
                for (int i = 0; i < indexSQL.length; i++) {
                    try {
                        tempStatement.execute(indexSQL[i]);
                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                }
            } else {
                // In this case the table already exists and we just need to
                // generate its name
                maxRunName = "s" + sensorInfo.getSensorID() + "_n" + NetFSE.NODE_ID + "_r" + maxRun;
            }

            /**
             * Determine how many records are in the latest table.
             */
            int numRecordsInRun = 0;
            rs3 = tempStatement.executeQuery("select count(*) as a from " + maxRunName);
            if (rs3.next())
                numRecordsInRun = rs3.getInt(1);
            rs3.close();

            // Class.forName("org.postgresql.Driver");
            // con = NetFSE.createLocalDatabaseConnection(toolInfo.getName());

            System.out.println("Indexing " + getFile().getAbsolutePath());
            file = File.createTempFile(sensorInfo.getName() + "_", ".sql");
            file.deleteOnExit();
            int count = 0;
            int total = 0;
            int badCount = 0;
            PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(file)));
            byte[] bytes = null;

            PGConnection pgCon = (PGConnection) tempConnection;
            CopyManager copyMgr = pgCon.getCopyAPI();

            /**
             * This loop gets records from the file being processed and writes
             * out their COPY values to a file. Once the current temp table is
             * full, COPY the data written so far into the current table. Then
             * create a new empty table to hold any additional records. Start
             * writing any remaining records to a new file.
             */
            while ((bytes = getNextRecord()) != null) {
                int numToProcess = NetFSE.TEMP_TABLE_SIZE - numRecordsInRun;
                /**
                 * When numToProcess <= 0, it is time to copy what we have
                 * written so far into the table to fill it. Then create a new
                 * table and start outputting records to a new file.
                 */
                if (numToProcess <= 0) {
                    // System.out.println("Indexer: Records=" + count +
                    // " (Not parsed: " + badCount + ")");
                    String fileName = file.getName();
                    out.flush();
                    out.close();
                    FileInputStream in = new FileInputStream(file);

                    long ts = System.currentTimeMillis();
                    System.err.println("Indexer: Copying data to " + maxRunName);

                    copyMgr.copyIntoDB("COPY " + maxRunName + " FROM STDIN", in);
                    in.close();

                    offset = getOffset();
                    this.getParent().setFileOffset(getFileID(), offset);

                    long ts2 = System.currentTimeMillis();
                    long copyMS = ts2 - ts;
                    long totalMS = ts2 - startMS;
                    float copySec = (float) copyMS / (float) 1000;
                    float totalSec = (float) totalMS / (float) 1000;
                    System.out.println("Indexer: Total Records=" + count + "; Copy RPS= " + count / copySec
                            + "; Total RPS= " + count / totalSec);
                    ts = System.currentTimeMillis();

                    // Store the old (full) table name so we can update values
                    // later
                    String prevRunName = maxRunName;

                    // Need a new table, determine the next run number from the
                    // sequence
                    ResultSet rs = tempStatement.executeQuery("select nextval('" + sequenceName + "')");
                    rs.next();
                    maxRun = rs.getInt(1);
                    rs.close();

                    // Create the new table and asscociated indexes
                    maxRunName = "s" + sensorInfo.getSensorID() + "_n" + NetFSE.NODE_ID + "_r" + maxRun;
                    tableSQL = rec.generate().getTempTableSQL(maxRunName);
                    try {
                        tempStatement.execute(tableSQL);
                        mdb.addTempTable(maxRunName, NetFSE.NODE_ID, sensorInfo.getSensorID());
                    } catch (Exception e) {
                        // e.printStackTrace();
                    }
                    String[] indexSQL = rec.generate().getTempIndexSQL(maxRunName);
                    for (int i = 0; i < indexSQL.length; i++) {
                        try {
                            tempStatement.execute(indexSQL[i]);
                        } catch (Exception e) {
                            // e.printStackTrace();
                        }
                    }

                    // Update timestamps and record count in Temp_Tables
                    rs = tempStatement.executeQuery("select min(startts) as a, max(startts) as b, count(*) as c from "
                            + prevRunName);
                    rs.next();
                    java.sql.Timestamp minTS = rs.getTimestamp("a");
                    java.sql.Timestamp maxTS = rs.getTimestamp("b");
                    long recordCount = rs.getLong("c");
                    RunStats stats = new RunStats();
                    stats.sensorID = sensorInfo.getSensorID();
                    stats.typeID = sensorInfo.getToolID();
                    stats.minTS = minTS;
                    stats.maxTS = maxTS;
                    stats.count = recordCount;
                    StatsServer.addRunStats(stats);
                    rs.close();

                    // Update the metadatabase now that we won't be adding more
                    // data to the old table
                    mdb.setTempTableTimestamps(prevRunName, minTS, maxTS);
                    total += count;
                    file.delete();

                    /**
                     * Create a new file to store the COPY values, reinstantiate
                     * out
                     */
                    file = File.createTempFile(sensorInfo.getName() + "_", ".sql");
                    file.deleteOnExit();
                    out = new PrintWriter(new BufferedWriter(new FileWriter(file)));

                    // Reset values
                    count = 0;
                    badCount = 0;
                    numRecordsInRun = 0;
                }

                NetFSERecord temp = null;
                try {
                    temp = rec.generate(new String(bytes), this.getSensorID());
                } catch (Exception e) {
                    // e.printStackTrace();
                    rec = null;
                }

                // If we parsed the record write it's COPY values to disk,
                // increment counts
                if (temp != null) {
                    String sql = temp.getTempCopySQL();
                    if (sql != null) {
                        out.println(sql);
                        count++;
                        numRecordsInRun++;
                    } else {
                        badCount++;
                    }
                } else {
                    // This record could not be parsed
                    badCount++;
                }

            } // End main while loop

            /**
             * This code block will be executed if there are records in the data
             * file (should be true almost always).
             */
            if (count > 0) {
                total += count;
                // System.out.println("Indexer: Records=" + count +
                // " (Not parsed: "
                // + badCount + ")");
                out.flush();
                out.close();
                String fileName = file.getName();

                BufferedInputStream in = new BufferedInputStream(new FileInputStream(file));

                long ts = System.currentTimeMillis();
                System.err.println("Indexer: Copying data to " + maxRunName);

                try {
                    /**
                     * Bulk load the data into PostgreSQL using the COPY
                     * command. A modified JDBC driver is needed for this to
                     * work.
                     */
                    ((PGConnection) tempConnection).getCopyAPI().copyIntoDB("COPY " + maxRunName + " FROM STDIN", in);
                    in.close();
                } catch (PSQLException psqle) {
                    /**
                     * If the COPY command fails, try to insert records
                     * 1-at-a-time a s a fall back
                     */
                    if (psqle.getMessage().equals("ERROR: extra data after last expected column")) {
                        System.out
                                .println("Indexer: Could not execute COPY command. Logs may contain tabs. Remove tabs from logs before processing. Use another delimiter if using a tab delimited format.");
                        setOffset(offset);
                        moveToOffset();
                        // Need to re-read the file from this point and try
                        // one-by-one inserts
                        Statement statement = tempConnection.createStatement();
                        while ((bytes = getNextRecord()) != null) {
                            NetFSERecord temp = rec.generate(new String(bytes), this.getSensorID());
                            if (temp != null) {
                                String insert = "insert into " + maxRunName + " values " + temp.getTempInsertSQL();
                                statement.execute(insert);
                            }
                        }
                        statement.close();
                    } else {
                        /**
                         * Here there is nothing we can do about the data.
                         * Better handling of this case should be added in a
                         * future release.
                         */
                        try {
                            in.close();
                            tempStatement.close();
                            tempConnection.close();
                            mdb.closeConnection();
                        } catch (Exception e) {

                        }
                        throw psqle;
                    }
                }

                // Update timestamps and record count in Temp_Tables
                ResultSet rs = tempStatement
                        .executeQuery("select min(startts) as a, max(startts) as b, count(*) as c from " + maxRunName);
                rs.next();
                java.sql.Timestamp minTS = rs.getTimestamp("a");
                java.sql.Timestamp maxTS = rs.getTimestamp("b");

                RunStats stats = new RunStats();
                stats.sensorID = sensorInfo.getSensorID();
                stats.typeID = sensorInfo.getToolID();
                stats.minTS = minTS;
                stats.maxTS = maxTS;
                stats.count = count;
                StatsServer.addRunStats(stats);
                rs.close();

                long ts2 = System.currentTimeMillis();
                long copyMS = ts2 - ts;
                long totalMS = ts2 - startMS;
                float copySec = (float) copyMS / (float) 1000;
                float totalSec = (float) totalMS / (float) 1000;
                System.out.println("Indexer: Total Records=" + count + "; Copy RPS= " + count / copySec
                        + "; Total RPS= " + count / totalSec);
                ts = System.currentTimeMillis();

                file.delete();
            }

            /**
             * The next lines will cause this thread to block until the
             * AutoMigrate feature frees up more disk space. Space could also be
             * reclaimed manually and the thread would resume if enough space
             * was freed.
             */
            double postgresFullPercent = NetFSE.getPostgresDiskFullPercent();
            while (postgresFullPercent > 90.0) {
                System.out.println("Indexer: PostgreSQL almost out of disk. Waiting for AutoMigrate to free up space.");
                postgresFullPercent = NetFSE.getPostgresDiskFullPercent();
                Thread.sleep(30000);
            }

            System.out.println("Indexer: Finished. Total Records=" + total + "");
        } finally {
            try {
                file.delete();
            } catch (Exception e) {
                e.printStackTrace();
            }
            // Clean up database connections before exiting.
            mdb.closeConnection();
            tempStatement.close();
            tempConnection.close();
        }

    }
}
