package edu.upenn.yas;

import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.IOException;
import java.net.URL;
import java.net.HttpURLConnection;
import java.util.List;

import javax.swing.Timer;

/** Periodically combs the database for dead links and removes them. */
public class Crawler {

    /** The singleton crawler instance (crawls every 3 minutes). */
    public static Crawler instance = new Crawler(1000 * 60 * 3);

    private Timer timer;

    /** Creates a new Crawler that crawls every n milliseconds. */
    private Crawler(int n) {
        ActionListener task = new ActionListener() {
            public void actionPerformed(ActionEvent e) {
                List<String> urls = QueryEngine.instance.getAllURLs();
                for (String url : urls) {
                    HttpURLConnection conn = null;
                    String agent = "Mozilla/4.0";
                    try {
                        conn = (HttpURLConnection)(
                                new URL(url)).openConnection();
                        conn.setRequestProperty("User-Agent", agent);
                        int response = conn.getResponseCode();
                        if (response == HttpURLConnection.HTTP_NOT_FOUND) {
                            QueryEngine.instance.removeURL(url); 
                        }
                    } catch (IOException ex) {
                        throw new DBException(ex);
                    }
                }
            }
        };
        timer = new Timer(n, task);
        timer.start();
    }
}
