﻿using System;
using System.IO;
using System.Net;
using System.Text;
using System.Threading;
using Crawler.ApplicationServices;
using log4net;

namespace Crawler.Infrastructure.ApplicationServices
{
	internal class RequestsHelper : IRequestsHelper
	{
		private static readonly ILog Logger = LogManager.GetLogger("AppLogger");
		private int _tries = 30;
		public string GetPageHtmlText(string url)
		{
			
			do
			{
				try
				{
					return DoRequest(url);
				}
				catch (Exception ex)
				{
					Logger.WarnFormat("RequestHelper: Retrying {0} - {1}", url, ex);
					Thread.Sleep(TimeSpan.FromSeconds(5));
				}
			} while (--_tries > 0);
			throw new Exception(string.Format("could not parse {0}", url));
		}

		public void SetRetriesUpTo(int numRetries)
		{
			_tries = numRetries;
		}

		private string DoRequest(string url)
		{
			var request = BuildRequestObject(new Uri(url));
			using (var response = (HttpWebResponse) request.GetResponse())
			{
				using (Stream receiveStream = response.GetResponseStream())
				{
					Encoding encode = Encoding.GetEncoding("utf-8");
					using (var readStream = new StreamReader(receiveStream, encode))
						return readStream.ReadToEnd();
				}
			}
		}

		private HttpWebRequest BuildRequestObject(Uri uri)
		{
			var request = (HttpWebRequest) WebRequest.Create(uri);
			request.AllowAutoRedirect = true;
			request.UserAgent = @"Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html);";
			request.Accept = "*/*";


			request.AutomaticDecompression = DecompressionMethods.GZip | DecompressionMethods.Deflate;


			return request;
		}
	}
}