package paper.crawler;

import paper.http.ClassReLoader;

import java.util.Random;
import java.util.PriorityQueue;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.Vector;
import java.sql.*;

import java.io.InputStream;
import java.io.IOException;

import java.net.InetAddress;
import java.net.URL;
import java.net.URLConnection;

import paper.crawler.LinkEvent; 
//import paper.crawler.LinkSet;
import paper.crawler.HTMLParser;

import paper.batch.Batch;
import paper.db.DB;
 
public class Crawler extends Thread {
    static HashMap<InetAddress,Crawler> spiders = new HashMap<InetAddress,Crawler>();

    private HashSet<String> myHosts = new HashSet<String>();
    private InetAddress myIP;
    private PriorityQueue<LinkEvent> schedule = new PriorityQueue<LinkEvent>();

    Crawler(InetAddress ip, String hostName) {
        myIP = ip;
        addHost(hostName);
        start();
    }

    public synchronized void addHost(String hostname) {
        myHosts.add(hostname);
    }

    // Synchronized, since it adds new Crawlers to the mega list, can be called from any spider.
    public synchronized static Crawler createNewCrawler(InetAddress ip, String host) {
        Crawler c = new Crawler(ip, host);
        spiders.put(ip, c);
        return c;
    }

    // Not synchronized, since it does a DNS lookup
    public void scheduleGlobalURL(String url, int hopCount, double quality) {
        String host = null;
        try {
            host = new URL(url).getHost();
        } catch (java.net.MalformedURLException e) {
            System.out.println("Dropped " + url + " : " + e);
            return;
        }
        boolean allowNonLocal = false;
        if (myHosts.contains(host)) {
            scheduleLocalURL(url, hopCount, quality);
        } else {
            InetAddress ipAddr = null;
            try {
                ipAddr = InetAddress.getByName(host);
            } catch (java.net.UnknownHostException e) {
                System.out.println("Dropped " + url + " : " + e);
            }
            Crawler c = spiders.get(ipAddr);
            if (c == this) {
                addHost(host);
            } else if (c == null) {
                c = createNewCrawler(ipAddr, host);
            }
            if (allowNonLocal || c == this) {
                c.scheduleLocalURL(url, hopCount, quality);
            }
        }
    }

    public void scheduleLocalURL(String url, int hopCount, double quality) {
        addEvent(new LinkEvent(url, hopCount, quality));
    }

    // Synchronized, adds a URL can be called from multiple crawlers.
    private synchronized void addEvent(LinkEvent e) {
        System.out.println("Added " + e.url);
        boolean wasEmpty = schedule.isEmpty();
        schedule.add(e);
        if (wasEmpty) {
            notify();
        }
    }

    private synchronized LinkEvent getEvent() {
        while (schedule.isEmpty()) {
            System.out.println("Waiting...");
            try {
                wait();
            } catch(Exception e) {
            }
            System.out.println("Awoken");
        }
        System.out.println("Returnint event");
        return schedule.poll();
    }

    public static synchronized void markURL(String url) {
        try {
            String qUrl = DB.q(url,DB.urlMax);
            Statement st = DB.connection.createStatement();
            st.executeUpdate("UPDATE urls SET status='"+
                             DB.urlStatusDone +
                             "' WHERE url='"+qUrl+"'");
        } catch (SQLException e) {
            System.err.println("Failed to add update url " + url + " " + e);
        }
    }

    static Random r = new Random();
    /*
     * computes a waiting time. fixme: make time right according to RFC
     * whatever. Normal time is 2-4 seconds between requests. Time can be
     * multiplied by up to 100 for after very bad requests.
     */
    long milliSeconds(double q) {
        final double worst = 1.0/100;
        double time = 2000 + 2000 * r.nextDouble();
        if (q < worst) q = worst;
        return (long) (time / q);
    }

    public void run() {
        double lastQuality = 1.0;
        // grab and process robots.txt here.
        HillManager hm = new HillManager();
        System.out.println("Started...");
        while (true) {
            try {
                sleep(milliSeconds(lastQuality));
                LinkEvent ev = getEvent();
                if (ev.hopCount == 0) {
                    continue;
                }
                lastQuality = ev.quality;
                URL u = new URL(ev.url);
                URLConnection uc = u.openConnection();
                String type = uc.getContentType();
                InputStream is = null;
                HillClimber hp = hm.makeHillClimber("paper.crawler.BlindHTMLParser");
                System.out.println("Gradding " + ev.url);
                try {
                    is = uc.getInputStream();
                } catch (Exception e) {
                    System.out.println("404 on " + ev.url + " " + e);
                    markURL(ev.url);
                    return;
                }
                if (type.startsWith("text/html")) {
                    hp.parse(ev.url, ev.hopCount, is, this);
                    markURL(ev.url);
                } else {
                    Batch.processPublicStream(ev.url, is);
                    markURL(ev.url);
                }
            } catch (Exception e) {
                System.err.println("Crawler trouble " + e);
            }
        }
    }

    
    public static void main(String args[]) {
        startCrawlerFromURL("http://localhost/Publications/");
    }

    private static void startCrawlerFromURL(String x) {
        try {
            URL u = new URL(x);
            String host = u.getHost();
            Crawler c = Crawler.createNewCrawler(InetAddress.getByName(host),host);
            c.scheduleGlobalURL(x, 2, 1.0);
        } catch (Exception e) {
            System.err.println("" + e);
        }
    }
}
