
package ubc;

import java.io.File;
import java.util.List;

/* QueueOrganizer
 * - Stores
 *   - A list of CrawlResults that have not been written to file
 *   - A list of hostnames that have not been crawled yet
 *   - A list of hostnames that have been crawled
 * - Has methods to start a stop a queue writer
 */

/* QueueWriter
 * A queue writer checks a queue for items,
 * if there is one, it writes it to a file
 *
 * If we are in server-mode, then when we get an item from the queue,
 * add the node's ultrapeers and leaves to our uncrawled hostnames queue
 */

public class QueueOrganizer extends Thread{

    public Queue crawledResults; // A list of CrawlResults that have not been written to file yet
    public Queue uncrawledHosts; // A list of hostnames that have not been crawled yet
    public Queue crawledHosts;   // A list of hostnames that have been crawled
    public Queue writeQueue;     // A list of CrawlResults ready to be written

    private String _mode;
    public  String _path;
    private QueueWriter qwriter;

    public int numWriten = 0;


    public QueueOrganizer(String mode, String path) {

        // Initialize some variables
        _mode = mode;
        _path = path;
        crawledResults = new Queue();
        uncrawledHosts = new Queue();
        crawledHosts = new Queue();
        writeQueue = new Queue();

        // Check if the file already exists, if it does, then read it before we replace it
        File f = new File(_path);
        if( f.exists() ) {
            System.out.println("\nReading Existing File...");
            // Read the file
            ObjectReader or = new ObjectReader(_path);
            List<CrawlResult> l = (List<CrawlResult>)or.readAll();

            // Add the hostnames we just read into our known crawled lists
            for( int i=0; i<l.size(); i++) {
                crawledHosts.enqueue( l.get(i).getHostPort() );
            }

            // Add the crawlResult objects in to our crawled result list so that
            // they can be written back to the new file
            for( int i=0; i<l.size(); i++) {
                crawledResults.enqueue(l.get(i));
            }
            System.out.println("Done!\n");
        }

    }

    public void run() {

        // Start the queue writer
        startWriter();

        while(!Thread.interrupted()) {
            try {
                // Get an item from the queue
                CrawlResult cr = (CrawlResult)crawledResults.dequeue();
                writeQueue.enqueue(cr);
                if(_mode.equalsIgnoreCase("Server")) {
                    (new HostnamesAdder(cr)).start();
                }

            } catch (Exception e) {
                continue;
            }
        }
        System.out.println("\n\nQueueOrganizer is ending!!!\n\n");
    }

    public void startWriter() {
        numWriten = 0;
        qwriter = new QueueWriter(writeQueue, _mode);
        qwriter.start();
    }

    public void stopWriter() {
        // Make sure we finish writing all the items in the queue before
        // we interupt the writer thread
        while(writeQueue.size() !=0) {}
        // Stop the writer thread
        qwriter.kill();
    }

    // Checks the queue
    // If it is not empty, write the head element to the file
    // And if this is the server, then also add the ultrapeers and leaves from
    // the CrawlResult into our queue of uncrawled nodes
    public class QueueWriter extends Thread{
        ObjectWriter writer;
        Queue _queue;
        String _mode;

        public QueueWriter(Queue queue, String mode) {
            writer = new ObjectWriter(_path);
            _queue = queue;
            _mode = mode;
        }

        public void kill() {
            this.interrupt();
            //qwriter = null;
            while(qwriter.isAlive()) {}
        }

        public void run() {

            while(!Thread.interrupted()) {
                
                try {
                    // Get an item from the queue
                    CrawlResult obj = (CrawlResult)_queue.dequeue();
                    obj.buf = null;
                    // Write it to the file
                    writer.write(obj);
                    numWriten++;
                } catch (InterruptedException e) {
                    return;
                } catch (Exception e) {
                    continue;
                } 
            }
            writer.close();
        }
    }


    public class HostnamesAdder extends Thread{
        CrawlResult _cr;

        public HostnamesAdder(CrawlResult cr) {
            _cr = cr;
        }
        
        public void run() {

            // Parse for ultapeers
            String[] hostnames = {_cr.getUltrapeers()};
            if( hostnames[0].contains(",") ) {
                hostnames = hostnames[0].split(",");
            }

            // Add the newly discovered hostnames into our list only if we haven't
            // already crawled it
            for(int i=0; i<hostnames.length; i++) {
                if( !hostnames[i].isEmpty() && !crawledHosts.contains(hostnames[i]) ) {
                    uncrawledHosts.enqueueFirst(hostnames[i]);
                    crawledHosts.enqueueFirst(hostnames[i]);
                }
            }



            // Parse the list of hostnames into a nice array
            String[] leaves = {_cr.getLeaves()};
            if( leaves[0].contains(",") ) {
                leaves = leaves[0].split(",");
            }

            // Add the newly discovered hostnames into our list only if we haven't
            // already crawled it
            for(int i=0; i<leaves.length; i++) {
                if( !leaves[i].isEmpty() && !crawledHosts.contains(leaves[i]) ) {
                    uncrawledHosts.enqueue(leaves[i]);
                    crawledHosts.enqueue(leaves[i]);
                }
            }




        }
    }

}
