﻿using System;
using System.Diagnostics;
using System.IO;
using System.Net;
using System.Threading;
using Abot.Crawler;
using Abot.Poco;
using Crawler.Abot.Configurations;
using Crawler.Abot.Crawlers;
using Crawler.ApplicationServices;
using Crawler.Domain;
using Crawler.Domain.DataEntities;
using Crawler.DomainServices.DataServices;
using Crawler.DomainServices.Download;
using log4net;

namespace Crawler.Abot
{
	internal class TorrentCrawler : ITorrentCrawler
	{
		private static readonly ILog Logger = LogManager.GetLogger("AppLogger");

		private readonly IConfiguratorProvider _configuratorProvider;
		private readonly ICrawlerProvider _crawlerProvider;
		private readonly IDependencyDownloaderProvider _dependencyDownloaderProvider;
		private readonly IDownloadConfiguration _downloadConfiguration;
		private readonly ICrawlingSessionRepository _sessionRepository;

		public TorrentCrawler(ICrawlerProvider crawlerProvider,
			IConfiguratorProvider configuratorProvider,
			IDependencyDownloaderProvider dependencyDownloaderProvider,
			IDownloadConfiguration downloadConfiguration,
			ICrawlingSessionRepository sessionRepository)
		{
			_crawlerProvider = crawlerProvider;
			_configuratorProvider = configuratorProvider;
			_dependencyDownloaderProvider = dependencyDownloaderProvider;
			_downloadConfiguration = downloadConfiguration;
			_sessionRepository = sessionRepository;
		}

		public void Start(TorrentSource source, CancellationTokenSource cts)
		{
			CrawlingSession crawlingSession = CreateCrawlingSession(source);

			IWebCrawler crawler = SetUpWebCrawler(source, crawlingSession);

			CrawlResult result = DoCrawl(source, cts, crawler, crawlingSession);


			ExecutePostCrawlingTasks(result, crawlingSession);
		}

		private CrawlResult DoCrawl(TorrentSource source, CancellationTokenSource cts, IWebCrawler crawler,
			CrawlingSession session)
		{
			Uri rootUri = _configuratorProvider.Get(source).RootUri;

			Logger.InfoFormat("DoCrawl Started For {0}", rootUri.AbsoluteUri);
			CrawlResult crawlResult = crawler.Crawl(rootUri, cts);
			session.CrawlCompleted = true;
			_sessionRepository.Save(session);
			return crawlResult;
		}


		private void ExecutePostCrawlingTasks(CrawlResult result, CrawlingSession crawlingSession)
		{
			var sessionInfo = (CrawlingSession) result.CrawlContext.CrawlBag.CrawlingSession;

			Debug.Assert(crawlingSession.SessionId == sessionInfo.SessionId);

			sessionInfo.Succeed = !result.ErrorOccurred;
			sessionInfo.IsCompleted = true;
			sessionInfo.EndDateUtc = DateTime.UtcNow;
			_sessionRepository.Save(crawlingSession);
		}

		private IWebCrawler SetUpWebCrawler(TorrentSource source, CrawlingSession crawlingSession)
		{
			IWebCrawler crawler = _crawlerProvider.Get(source);
			crawler.CrawlBag.CrawlingSession = crawlingSession;
			crawler.PageCrawlCompletedAsync += crawler_ProcessPageCrawlCompleted;
			return crawler;
		}

		private CrawlingSession CreateCrawlingSession(TorrentSource source)
		{
			DateTime startDateUtc = DateTime.UtcNow;
			var crawlingSession = new CrawlingSession
			{
				SessionId = Guid.NewGuid().ToString(),
				StartDateUtc = startDateUtc,
				UtcCrawled = startDateUtc,
				FromUrl = _configuratorProvider.Get(source).RootUri.AbsoluteUri,
			};


			string downloadsFolder = _downloadConfiguration.DownloadsFolder;
			string folderName = Path.Combine(downloadsFolder, crawlingSession.SessionId.Replace('-', '_'));
			if (!Directory.Exists(folderName))
				Directory.CreateDirectory(folderName);

			crawlingSession.Folder = folderName;
			_sessionRepository.Save(crawlingSession);

			return crawlingSession;
		}


		private void crawler_ProcessPageCrawlCompleted(object sender, PageCrawlCompletedArgs e)
		{
			try
			{
				CrawledPage crawledPage = e.CrawledPage;
				var sessionInfo = (CrawlingSession) e.CrawlContext.CrawlBag.CrawlingSession;

				sessionInfo.IncrementPagesCrawled();

				if (crawledPage.WebException != null || crawledPage.HttpWebResponse.StatusCode != HttpStatusCode.OK)
					Logger.WarnFormat("SessionId:{0} - ProcessPageCrawlCompleted: FAILURE - PageUrl: {1} - Reason: {2}",
						sessionInfo.SessionId, e.CrawledPage.Uri.AbsoluteUri,
						crawledPage.WebException != null ? crawledPage.WebException.ToString() : string.Empty);
				else
					Logger.InfoFormat("SessionId:{0} - ProcessPageCrawlCompleted: SUCCEED - PageUrl: {1}",
						sessionInfo.SessionId, e.CrawledPage.Uri.AbsoluteUri);

				if (string.IsNullOrEmpty(crawledPage.Content.Text))
					Logger.InfoFormat("SessionId:{0} - ProcessPageCrawlCompleted: SUCCEED BUT THE PAGE HAD NO CONTENT - PageUrl: {1}",
						sessionInfo.SessionId, e.CrawledPage.Uri.AbsoluteUri);
				else
				{
					//TODO: create downloader to download pollitely all dependencies

					//PROCESSORS ARE IN ANOTHER STAGE, PER FILE JUST CHANGE THE SOURCES TO BE IN THE DRIVE

					string urlToProccess = e.CrawledPage.Uri.AbsoluteUri;

					string htmlContent = crawledPage.Content.Text;
					IDependencyDownloader dependencyDownloader = _dependencyDownloaderProvider.GetFor(sessionInfo.Source);
					dependencyDownloader.Store(sessionInfo, urlToProccess, htmlContent);
					sessionInfo.IncrementPagesContentDownloaded();

					//ProccessPage(sessionInfo, urlToProccess, htmlContent);
				}
			}
			catch (Exception ex)
			{
				//TODO: LOG
			}
		}
	}
}