/****************************************************************************
 *
 * Copyright (C) 2003-2010 Los Alamos National Security, LLC
 *                         Packet Analytics Corporation
 *
 * This program is free software; you can redistribute it and/or modify
 * it under the terms of the GNU General Public License Version 2 as
 * published by the Free Software Foundation.  You may not use, modify or
 * distribute this program under any other version of the GNU General
 * Public License.
 *
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 * You should have received a copy of the GNU General Public License
 * along with this program; if not, write to the Free Software
 * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
 *
 ****************************************************************************/
package nfse.solr;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.URL;
import java.net.URLConnection;
import java.util.Random;

import nfse.DataFileProcessor;
import nfse.NetFSE;
import nfse.NetFSERecord;
import nfse.SensorInfo;
import nfse.ToolInfo;
import nfse.gui.server.GUIRecordFactory;

/**
 * An example class of how to write a DataFileProcessor that sends data to
 * Apache Solr (http://lucene.apache.org/solr/). Working knowledge of Solr is
 * required to properly configure Solr.
 * 
 * To enable, add this line to the end of your Net/FSE config file
 * (/etc/netfse/0.3/local.cnf):
 * 
 * App 3 1 nfse.solr.ApacheSolrIndexer
 * 
 * You can now perform full text search using Net/FSE!
 * 
 * @author bduphoff
 * 
 */
public class ApacheSolrIndexer extends DataFileProcessor {

    /**
     * A random number generator for creating Solr IDs
     */
    Random r = new Random();

    /**
     * Read in all the records, parse them, write them out as CSVs that Solr can
     * consume, send to Solr. Full text search!
     */
    @Override
    public void process() throws Exception {
        /**
         * Behind the scenes, the system tracks how much of a file has been
         * processed if any. Call getOffset() to skip to the next record to be
         * processed. If it is non-zero, moveToOffset() will move to the correct
         * location in the file.
         */
        int offset = getOffset();
        if (offset > 0) {
            moveToOffset();
        }

        /**
         * Need to know what kind of data we are dealing with. The "Tool" is a
         * legacy naming convention that should really be "Type". The Sensor is
         * an instance of the Tool/Type. For example you would likely have
         * multiple Snort sensors. They share a common Tool but are different
         * Sensors.
         */
        SensorInfo sensorInfo = NetFSE.getSensorInfo(this.getSensorID());
        ToolInfo toolInfo = NetFSE.getToolInfo(sensorInfo.getToolID());

        /**
         * This is the class needed to parse records. Determined by the config
         * file and system metadata.
         */
        GUIRecordFactory rec = (GUIRecordFactory) NetFSE.getRecordFactory(toolInfo.getId());

        File file = File.createTempFile(sensorInfo.getName() + "_", ".csv", new File(NetFSE.TEMP_DIR));
        file.deleteOnExit();
        int count = 0;
        int total = 0;
        int badCount = 0;

        /**
         * Create the output file, write out the column names.
         */
        PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(file)));
        out.println("id," + rec.getCSVColNames());

        /**
         * Read each record and write the data as a CSV to the output file.
         */
        byte[] bytes = null;
        while ((bytes = getNextRecord()) != null) {
            NetFSERecord temp = null;
            try {
                temp = rec.generate(new String(bytes), this.getSensorID());
            } catch (Exception e) {
                e.printStackTrace();
                rec = null;
            }

            /**
             * A non-null record means it was parsed successfully. Write out the
             * CSV.
             */
            if (temp != null) {
                String csv = rec.toCSV(temp);

                if (csv != null) {
                    /**
                     * We need a unique ID for Solr. There may be a smarter way
                     * but this works as an example.
                     */
                    String id = System.currentTimeMillis() + "-" + r.nextInt() + "-" + Math.abs(csv.hashCode());
                    out.println(id + "," + csv);
                    count++;
                } else {
                    badCount++;
                }
            } else {
                badCount++;
            }
        }
        out.flush();
        out.close();

        /**
         * Send the data to Solr for indexing.
         */
        String urlStr = "http://localhost:8983/solr/update/csv?commit=true&stream.file=" + file.getAbsolutePath()
                + "&stream.contentType=text/plain;charset=utf-8";
        URL yahoo = new URL(urlStr);
        URLConnection yc = yahoo.openConnection();

        /**
         * Process the response. Ignored here as this is just an example.
         */
        BufferedReader in = new BufferedReader(new InputStreamReader(yc.getInputStream()));
        String inputLine;
        while ((inputLine = in.readLine()) != null) {
            // System.out.println(inputLine);
        }
        in.close();

    }

}
