package crawler;

import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.LinkedList;

import database.ConnectionPool;
import parser.HtmlParser;
import model.ParsedPage;

public class DomainCrawler extends Thread {
	
	{
		this.queue = new LinkedList<String>();
		this.visited = new LinkedList<String>();
		this.runningThreads = 0;
	}
	
	private String startURL;
	private LinkedList<String> queue;
	private LinkedList<String> visited;
	private LinkedList<String> keywords; // TODO: de adaugat keyworduri dupa care sa se filtreze url-urile
	private int runningThreads;
	private ConnectionPool connectionPool;
	
	public static final int THREADS_NO = 8;
	public static final int MAX_VISITS = 1;
	
	public DomainCrawler(String startURL, ConnectionPool connectionPool) {
		this.startURL = startURL;
		this.connectionPool = connectionPool;
		start();
	}
	
	// Increases number of threads or fails
	public boolean registerThread() {
		if (this.runningThreads < DomainCrawler.THREADS_NO) {
			this.runningThreads++;
			return true;
		}
		return false;
	}
	
	// Decreases number of threads
	public void unregisterThread() {
		this.runningThreads--;
	}
	
	public void addParsedPage(ParsedPage page) {
		if (page.getParsedProduct() != null) { 
			  Connection connection;
			try {
				connection = connectionPool.getConnection();
			
			      Statement statement = connection.createStatement();
			      ResultSet resultSet;
			      String sql = "select id from starturls where startUrl='"+startURL+"'";
			      resultSet = statement.executeQuery(sql);
			      resultSet.next();
			      int id_startURL = resultSet.getInt(1);
			      int resultSet1;
			      resultSet1 = statement.executeUpdate("insert into urls (url ,  idStartUrl) values ('" + page.getPageURL() +"'," + id_startURL+");");
			      ResultSet resultSet2;
			      resultSet2 = statement.executeQuery("select id from urls where url = '" + page.getPageURL() +"'");
			      resultSet2.next();
			      int id_url = resultSet2.getInt("id");
			      int resultSet3;
			      resultSet3 = statement.executeUpdate("insert into produse (descriere,  pret , data_vizita, idUrl) values " +
			    		  	"('" + page.getParsedProduct().getDescription() +"'," + page.getParsedProduct().getPrice() + ", current_date() ," + id_url +");");
			} catch (SQLException e) {
				System.out.println("failed to connect to db" + e);
				e.printStackTrace();
			}
		}
		
		// Adds all the new links in the queue
		String[] links = page.getLinks();
		for (String link : links) {
			if (link != null && this.visited.contains(link)) continue;
			this.queue.add(link);
		}
	}
	
	@Override
	public void run() {
		startCrawl();
	}
	
	private void startCrawl() {
		
		int visits = 0;
		this.queue.add(this.startURL);
		while ((!this.queue.isEmpty() || runningThreads == 0) && visits < MAX_VISITS) {
			
			if (runningThreads < THREADS_NO ) {
				String url = this.queue.removeFirst();
				this.visited.add(url);
				visits++;
				new HtmlParser(url, this);
			} else {
				try {
					Thread.sleep(3000);
				} catch (Exception e) {
					System.out.println(e.getMessage());
				}
			}
		}
		while(runningThreads > 0) {
			try {
				Thread.sleep(1000);
			} catch (Exception e) {
				System.out.println(e.getMessage());
			}
		}
			
	}

	@Override
	public String toString() {
		return "DomainCrawler [startURL=" + startURL + ", queue=" + queue
				+ ", visited=" + visited + ", keywords=" + keywords
				+ ", runningThreads=" + runningThreads + "]";
	}
	
}
