package crawler;

import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Arrays;
import java.util.LinkedList;

import database.ConnectionPool;
import parser.HtmlParser;
import model.ParsedPage;

public class DomainCrawler extends Thread {

    {
        this.queue = new LinkedList<String>();
        this.visited = new LinkedList<String>();
        this.runningThreads = 0;
    }

    private String startURL;
    private LinkedList<String> queue;
    private LinkedList<String> visited;
    private LinkedList<String> keywords; // TODO: am adaugat cateva. Mai aruncati voi o privire pe site-uri si bagati si altele daca vreti
    private int runningThreads;
    private ConnectionPool connectionPool;

    public static final int THREADS_NO = 100;
    public static final int MAX_VISITS = 20;

    public DomainCrawler(String startURL, ConnectionPool connectionPool) {
        this.startURL = startURL;
        this.connectionPool = connectionPool;
        keywords = new LinkedList<String>(Arrays.asList("nokia"));
    }

    // Increases number of threads or fails
    public boolean registerThread() {
        if (this.runningThreads < DomainCrawler.THREADS_NO) {
            this.runningThreads++;
            return true;
        }
        return false;
    }

    // Decreases number of threads
    public void unregisterThread() {
        this.runningThreads--;
    }

    public void addParsedPage(ParsedPage page) {
        if (page.getParsedProduct() != null) {
            Connection connection;
            try {
                connection = connectionPool.getConnection();

                Statement statement = connection.createStatement();
                ResultSet resultSet;
                String sql = "select id from starturls where startUrl='" + startURL + "'";
                resultSet = statement.executeQuery(sql);
                resultSet.next();
                int id_startURL = resultSet.getInt(1);
                statement.executeUpdate("insert into urls (url ,  idStartUrl) values ('" + page.getPageURL() + "'," + id_startURL + ");");
                ResultSet resultSet2;
                resultSet2 = statement.executeQuery("select id from urls where url = '" + page.getPageURL() + "'");
                resultSet2.next();
                int id_url = resultSet2.getInt("id");
                statement.executeUpdate("insert into produse (descriere,  pret , data_vizita, idUrl) values " +
                        "('" + page.getParsedProduct().getStrDescription() + "'," + page.getParsedProduct().getPrice() + ", current_date() ," + id_url + ");");
            } catch (SQLException e) {
                System.out.println("failed to connect to db" + e);
                e.printStackTrace();
            }
        }

        // Adds all the new links in the queue
        String[] links = page.getLinks();
        for (String link : links) {
//            if ((link != null && this.visited.contains(link)) || (!containsKeywordsURL(link)) || visited.contains(link)) {
            if (link == null || this.visited.contains(link) || !containsKeywordsURL(link) || link.contains("#")
                    || this.queue.contains(link) || link.contains("filter") || link.contains("intre")) {
                continue;
            }
            this.queue.add(link);
        }
        System.out.println(this.queue);
    }

    private boolean containsKeywordsURL(String url) {
        for (String keyword : keywords) {
            if (url.contains(keyword))
                return true;
        }
        return false;
    }

    @Override
    public void run() {
        startCrawl();
    }

    private void startCrawl() {

        int visits = 0;
        this.queue.add(this.startURL);
        while ((!this.queue.isEmpty() || runningThreads == 0) && visits < MAX_VISITS) {

            if (runningThreads < THREADS_NO) {
                String url = this.queue.removeFirst();
                this.visited.add(url);
                visits++;
                HtmlParser htmlParser = new HtmlParser(url, this);
                htmlParser.start();
                try {
                    htmlParser.join();
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
            } else {
                try {
                    Thread.sleep(3000);
                } catch (Exception e) {
                    System.out.println(e.getMessage());
                }
            }
        }
        while (runningThreads > 0) {
            try {
                Thread.sleep(1000);
            } catch (Exception e) {
                System.out.println(e.getMessage());
            }
        }

    }

    @Override
    public String toString() {
        return "DomainCrawler [startURL=" + startURL + ", queue=" + queue
                + ", visited=" + visited + ", keywords=" + keywords
                + ", runningThreads=" + runningThreads + "]";
    }

}
