package com.kalua.list.harvester;

import java.time.Instant;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.google.common.collect.Maps;
import com.kalua.list.dbaccess.retailer.CrawlBatchInfoDAO;
import com.kalua.list.domainmodel.TextIdentifier;
import com.kalua.list.domainmodel.retailer.CrawlBatchInfo;
import com.kalua.list.domainmodel.retailer.CrawlBatchInfoImpl;
import com.kalua.list.harvester.crawldata.CrawlEntryPoints;
import com.kalua.list.harvester.crawldata.History;
import com.kalua.list.harvester.crawldata.HistoryImpl;
import com.kalua.list.harvester.execution.CrawlExecutor;
import com.kalua.list.harvester.parser.ProductParseFunction;
import com.kalua.list.harvester.parser.ProductParseFunctionFactory;
import com.kalua.list.harvester.parser.ProductParseResult;
import com.kalua.list.harvester.resultprocessing.ResultProcessor;
import com.kalua.list.harvester.resultprocessing.ResultProcessorFactory;

public class ConcurrentWebCrawler implements WebCrawler{
	
	private static final Logger logger = LoggerFactory.getLogger(ConcurrentWebCrawler.class);
	
	private Map<TextIdentifier, CrawlEntryPoints> retailerEntryPoints;
	
	private CrawlExecutor crawlExecutor;
	private CrawlBatchInfoDAO crawlBatchInfoDAO;	
	private ProductParseFunctionFactory productParseFunctionFactory;
	private ResultProcessorFactory resultProcessorFactory;
	
	public ConcurrentWebCrawler(	List<CrawlEntryPoints> points,
													CrawlExecutor crawlExecutor,
													CrawlBatchInfoDAO crawlBatchInfoDAO,
													ProductParseFunctionFactory productParseFunctionFactory,
													ResultProcessorFactory resultProcessorFactory) {
		this.retailerEntryPoints = mapEntryPoints(points);
		this.crawlExecutor = crawlExecutor;
		this.crawlBatchInfoDAO = crawlBatchInfoDAO;
		this.productParseFunctionFactory = productParseFunctionFactory;
		this.resultProcessorFactory = resultProcessorFactory;
	}
	
	@SuppressWarnings("unused")
	@Override
	public void doCrawl(TextIdentifier retailerId) throws InterruptedException {
		CrawlBatchInfo previousBatchInfo = 
				this.crawlBatchInfoDAO.findLatest(retailerId);
		logger.info("Previous batch details: {}", previousBatchInfo);		
		CrawlBatchInfoImpl batchInfo = createBatchInfo(previousBatchInfo, retailerId);
		
		CrawlEntryPoints entryPoint = this.retailerEntryPoints.get(retailerId); 		
		logger.info("Initiating crawl with entry-points {}.", entryPoint.listToString());
				
		if (entryPoint == null ) 
			logger.warn("No entry points found for {}, exiting crawl.", retailerId);
		else startCrawl(retailerId, batchInfo, entryPoint);
	}
	
	public CrawlBatchInfoDAO getCrawlBatchInfoDAO() { return this.crawlBatchInfoDAO; }
	
	public CrawlExecutor getCrawlExecutor() { return this.crawlExecutor; }

	public ProductParseFunctionFactory getProductParseFunctionFactory() 
	{ return this.productParseFunctionFactory; }

	public ResultProcessorFactory getResultProcessorFactory() 
	{ return this.resultProcessorFactory; }

	private void startCrawl(	TextIdentifier retailerId,
											CrawlBatchInfoImpl batchInfo, 
											CrawlEntryPoints entryPoint)
											throws InterruptedException {
		try {
			History history = indexProductPages(retailerId, batchInfo, entryPoint);
			List<ProductParseResult> productParseResults = 
					mineProductData(retailerId, batchInfo, history);
			
			ResultProcessor resultProcessor = 
					this.resultProcessorFactory.createResultProcessor(batchInfo);
			
			productParseResults.parallelStream().forEach(resultProcessor);			
			resultProcessor.writeCollectedResults();			
			
			batchInfo.setTimeCompleted(Instant.now());
			this.crawlBatchInfoDAO.update(batchInfo);
			logger.info("Batch terminating normally. {}", batchInfo);
		} catch (Exception exce) {
			logger.error("Unexpected exception caught in batch.", exce);
			if (batchInfo.getCreateCount()== 0 && 
					batchInfo.getUpdateCount() == 0) {
				logger.error("Create/update count are zero, so will delete batch record {}.",
									batchInfo);
				this.crawlBatchInfoDAO.delete(batchInfo);
			}
		}
	}

	protected History indexProductPages(	TextIdentifier retailerId,
																CrawlBatchInfoImpl batchInfo, 
																CrawlEntryPoints entryPoint)
																		throws InterruptedException {
		History result = new HistoryImpl();
		this.crawlExecutor.crawl(retailerId, result, entryPoint);
		this.crawlExecutor.waitUntilIdle();

		batchInfo.setNumberOfProductsExtracted(result.productCount());
		logger.info(	"Completed indexing. Found {} potential products.",
							result.productCount());
		this.crawlBatchInfoDAO.create(batchInfo);
		return result;
	}

	protected List<ProductParseResult> 
							mineProductData(	TextIdentifier retailerId, 
														CrawlBatchInfoImpl batchInfo,
														History history) {
		ProductParseFunction parseFunction = 
				this.productParseFunctionFactory.createParseFunction(
																retailerId, batchInfo.getId());

		List<ProductParseResult> productParseResults = 
				history	.getProductLinks()
							.parallelStream()
							.map(parseFunction)
							.collect( Collectors.toCollection(LinkedList::new));
		
		logger.info("Completed web stage. {} Product results found.",
							productParseResults.size());
		
		return productParseResults;
	}	
	
	private HashMap<TextIdentifier, CrawlEntryPoints> 
						mapEntryPoints(List<CrawlEntryPoints> points) {
		HashMap<TextIdentifier, CrawlEntryPoints> result = 
															Maps.newHashMap();
		for (CrawlEntryPoints pts : points) {
			result.put(pts.getRetailerId(), pts);
		}
		return result;
	}	
	
	private CrawlBatchInfoImpl 
						createBatchInfo(CrawlBatchInfo previousBatchInfo,
												TextIdentifier retailerId) {
		CrawlBatchInfoImpl result;
		if (previousBatchInfo !=null) {
			result = 				 
				new CrawlBatchInfoImpl(		previousBatchInfo.getId(), 
															retailerId,  
															Instant.now());
		} else result = new CrawlBatchInfoImpl(null, retailerId,Instant.now());
		return result;
	}
}