// Downloader.cs
// 
// Copyright © 2009 FreeZzaph
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program.  If not, see <http://www.gnu.org/licenses/>.
//

using System;
using System.Collections.Generic;
using System.IO;
using System.Text.RegularExpressions;

namespace LibFreeZzaph
{
	
	internal static class Downloader
	{
		private class DownloadItem
		{
			public string Referer { get; set; }
			public Uri Url { get; set; }
		}
		
		private static string GenerateFileName(string fileNameFormat, IDictionary<string, string> filenameReplacementValues)
		{
			string fileName = fileNameFormat;

			foreach (string key in filenameReplacementValues.Keys)
			{
				fileName = fileName.Replace("%" + key + "%", filenameReplacementValues[key]);
			}
			
			return fileName;
		}
		
		public static IList<string> DownloadFrom(IList<Uri> urls, string filter, string savePath)
		{
			List<string> fileList = new List<string>();
			
			List<DownloadItem> urlList = new List<DownloadItem>();
			List<string> blackList = new List<string>();
			
			int linkCount = 0, fileNumber = 0;
			List<Uri> duplicateCheck = new List<Uri>();
			
			int urlCount = urls.Count, currentUrl = 1;
			foreach (Uri url in urls)
			{
				string host = url.Host;
				
				if (blackList.Contains(host.ToLower()))
				{
					Console.WriteLine(string.Format("Skipping blacklisted address. ({0}/{1})",
						currentUrl, --urlCount));
					continue;
				}
				
				if (duplicateCheck.Contains(url))
				{
					Console.WriteLine(string.Format("Skipping duplicate address. ({0}/{1})",
						currentUrl, --urlCount));
					continue;
				}
				
				Console.Write(string.Format("Gathering URLs from {0} ({1}/{2})...",
					url.ToString(), currentUrl, urlCount));
				duplicateCheck.Add(url);
				
				try 
				{
					IList<Uri> fileUrls = CollectUrlsFrom(url, filter);
					Console.WriteLine(string.Format("found {0}.", fileUrls.Count));
					linkCount += fileUrls.Count;
					foreach (Uri fileUrl in fileUrls)
					{
						DownloadItem item = new DownloadItem();
						item.Referer = url.ToString();
						item.Url = fileUrl;
						
						urlList.Add(item);
					}
					currentUrl++;
				}
				catch
				{
					Console.WriteLine("failed! Blacklisting address.");
					blackList.Add(host);
					urlCount--;
				}	
			}
			Console.WriteLine(string.Format("Found a total of {0} files to download.", linkCount));
			Console.WriteLine();
			
			double countLength = urlList.Count;
			string countFormat = "";
			while (countLength >= 1)
			{
				countFormat += "0";
				countLength /= 10;
			}
			
			duplicateCheck.Clear();
			foreach (DownloadItem item in urlList)
			{
				Console.Write(string.Format("Downloading {0} ({1}/{2})...", item.Url.ToString(), fileNumber + 1, linkCount));
				if (duplicateCheck.Contains(item.Url))
				{
					Console.WriteLine("duplicate! Skipping.");
					linkCount--;
					continue;
				}
				else
					duplicateCheck.Add(item.Url);
				
				try
				{
					byte[] fileBytes = FreeZzaphUtility.FetchUrlBytes(item.Url, item.Referer);
					
					IDictionary<string, string> filenameReplacementValues = new Dictionary<string, string>();
					
					string filePath = item.Url.AbsolutePath;

					string fileCount = string.Format("{0:" + countFormat + "}", fileNumber++);
					filenameReplacementValues.Add("filenumber", fileCount); 
					
					string fileExtension = filePath.Substring(filePath.LastIndexOf('.') + 1).ToLower();
					filenameReplacementValues.Add("extension", fileExtension);
					
					string origFileName = filePath.Substring(filePath.LastIndexOf('/') + 1).ToLower();
					filenameReplacementValues.Add("originalfilename", origFileName);
					
					filenameReplacementValues.Add("hostname", item.Url.Host);
					
					string fileFormat = FreeZzaphCLI.FreeZzaphCLI.Settings["SaveFileFormat"];
					string fileName = GenerateFileName(fileFormat, filenameReplacementValues);
					string saveFile = Path.Combine(savePath, fileName);
					
					FileStream fs = File.Create(saveFile);
					fs.Write(fileBytes, 0, fileBytes.Length);
					fs.Close();
					fileList.Add(saveFile);
					
					Console.WriteLine("done.");
				}
				catch (Exception e)
				{
					Console.WriteLine("failed:");
					Console.WriteLine(e.Message);
					Console.WriteLine();
					linkCount--;
				}
			}
			return fileList;
		}
		
		private static IList<Uri> CollectUrlsFrom(Uri url, string filter)
		{
			string page = FreeZzaphUtility.FetchUrl(url);
			List<Uri> urlList = new List<Uri>();
			Regex filterPattern = new Regex(@"<a.+?href=""?([^"">\.]+\.(?:" + filter + @"))""?[^>]*?>", RegexOptions.Compiled | RegexOptions.IgnoreCase | RegexOptions.Singleline);
			
			Match match = filterPattern.Match(page);
			while (match.Success)
			{
				try
				{
					Uri fileUrl = new Uri(url, match.Groups[1].ToString());
					urlList.Add(fileUrl);
				}
				catch
				{
					// Malformed URL, ignore
				}
				
				match = match.NextMatch();
			}
			
			return urlList;
		}
		
	}
}
