package net.toby200.canalstoppages.scraper;

import com.google.api.client.extensions.appengine.http.UrlFetchTransport;
import com.google.api.client.http.GenericUrl;
import com.google.api.client.http.HttpHeaders;
import com.google.api.client.http.HttpRequest;
import com.google.appengine.api.ThreadManager;

import net.toby200.canalstoppages.Config;
import net.toby200.canalstoppages.model.Notice;
import net.toby200.canalstoppages.model.Notices;

import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;

import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

public class JSoupNoticeListScraper implements INoticeListScraper {
	
	private static final Logger LOG = Logger.getLogger(JSoupNoticeListScraper.class.getName());
    private static final int MAX_THREADS = 50;
	
	// search-result-url.text() gives:
	// Waterway: Erewash Canal From Date: 6th January 2014 To Date: until further notice	
	private static final Pattern basicsPattern = Pattern.compile("Waterway:(.*?) From Date: (.*?) To Date: (.*)");

    //https://canalrivertrust.org.uk/notice/2047/100-metres-west-of-wood-lane-bridge-erdington-birmingham-and-fazeley-canal-
    private static final Pattern idPattern = Pattern.compile("notice\\/(\\d*)\\/");
    static final String testString = "canalrivertrust.org.uk/notice/2047/100-metres-west-of-w";

    private static final JSoupNoticeDetailScraper detailScraper = new JSoupNoticeDetailScraper();
	
	public Notices getNotices(String urlString) {
		return getNotices(urlString, 3);
	}
	
	Notices getNotices(String urlString, int retryCount) {
			Document doc;
			InputStream input;
			
			try {
				HttpRequest request = UrlFetchTransport.getDefaultInstance().createRequestFactory()				
						.buildGetRequest(new GenericUrl(urlString));
				HttpHeaders headers = request.getHeaders();
				headers.setUserAgent("CanalStoppages crawler toby@toby200.net");
				request.setHeaders(headers);
				input = request.execute().getContent();																			
			} catch (Exception e) {				
				if (retryCount > 0) {
					try {
						LOG.log(Level.INFO, "Attempt failed. Sleeping 2 seconds then retrying");
						Thread.sleep(2000);
					} catch (InterruptedException e1) {						
					}
                    //noinspection AssignmentToMethodParameter
                    return getNotices(urlString, --retryCount);
				} else {
					LOG.log(Level.SEVERE, "Unable to get any information from website, no more retries", e);
				}
				return  new Notices();
			} 
			
			if (input != null) {
				try {
					doc = Jsoup.parse(input, null, urlString);
					return parseDoc(doc);
				} catch (IOException e) {
					LOG.log(Level.SEVERE, "Unable to parse main notices page", e);
					LOG.log(Level.INFO, input.toString(), e);
				}				
			} 
			
			return  new Notices();
	}

	private Notices parseDoc(Document doc) {
		LOG.info("Parsing main document...");
		final Notices notices = new Notices();
		
		final Elements ulNotices = doc.select("table.search-results-list-table").select("tr");
		if (ulNotices == null || ulNotices.size() == 0) {
			LOG.warning("Unable to find table.search-results-list-table or it's empty");		
		}
		
		int count = 0;
		LOG.info("Found " + ulNotices.size() + " rows of results");

        final ThreadFactory factory = ThreadManager.currentRequestThreadFactory();
        // MAX_THREADS is appengine limit, but we want to spread load out over the 10 minutes we have so use min threads
        // necessary to handle all the notices in time, assuming each thread handles ~50 updates a minute
        int maxThreads = Math.min(MAX_THREADS-1, (int)(ulNotices.size()/50)) + 1;
        final ExecutorService executor = new ThreadPoolExecutor(0, maxThreads, 30, TimeUnit.SECONDS,  new LinkedBlockingDeque<Runnable>(), factory);
        List<Future<Notice>> futureNotices = new ArrayList<Future<Notice>>();

		for (final Element element : ulNotices) {
            count++;

            Future<Notice> notice = executor.submit(new Callable<Notice>() {
                public Notice call() {
                    return getNoticeFromElement(element);
                }
            });
            futureNotices.add(notice);

		}

        for (Future<Notice> futureNotice : futureNotices) {

            try {
                Notice notice = futureNotice.get();
                if (notice != null) {
                    notices.add(notice);
                }
                if (notices.size() % 50 == 0 || notices.size() == ulNotices.size()) {
                    LOG.info("Processed " + notices.size() + " of " + ulNotices.size() + " notices...");
                }
            } catch (InterruptedException e) {
                e.printStackTrace();
            } catch (ExecutionException e) {
                e.printStackTrace();
            }
        }

        executor.shutdown();
		
		if (notices.size() == 0) {
			LOG.warning("No notices found, HTML was: ");
			LOG.info(doc.outerHtml());
		}
		
		LOG.info("Processing completed, loaded " + notices.size() + " notices");
		return notices;				
	}

    private Notice getNoticeFromElement(Element element) {
        Element titleEl = element;
        Element detailLink = titleEl.select("a[href]").first();
        if (detailLink == null) {
            LOG.warning("Unable to find detail link in " + element);
            return null;
        }
        String detailUrl = detailLink.attr("href");
        String title = detailLink.text().replace("\u00BB", "").trim();

        Element basics = element.select("div.search-result-url").first();
        if (basics == null) {
            LOG.warning("Unable to find div.search-result-url in " + element);
            return null;
        }
        String basicText = basics.text();
        Matcher m = basicsPattern.matcher(basicText);
        if (!m.find()) {
            LOG.warning("Unable to find waterways or date details in " + basicText);
            return null;
        }
        String waterway = m.group(1).trim();
        String fromDate = m.group(2).trim();
        String toDate = m.group(3).trim();

        long id = 0L;
        m = idPattern.matcher(detailUrl);
        if (m.find()) {
            String idStr = m.group(1);
            id = Long.parseLong(idStr); // id must be valid long due to regex only matching numbers
        } else {
            LOG.warning("Unable to extract id from url " + detailUrl);
            return null;
        }


        NoticeDetail detail = detailScraper.getNoticeDetail(Config.BASE_URL + detailUrl);
        if (detail == null) {
            LOG.info("Missing detail for " + element);
            return null;
        }

        Notice notice = new Notice(id, title, waterway, fromDate, toDate, detail.getType(), detail.getDescription(),
                detail.getLocationDetail(), detail.getStartLoc(), detail.getEndLoc(), detailUrl);
        return notice;
    }

}
