﻿using System;
using System.Collections.Generic;
using System.Data;
using System.Globalization;
using System.Linq;
using System.Web;
using DotNetNuke.ComponentModel;
using DotNetNuke.Entities.Portals;
using LitS3;

namespace BrandonHaynes.Providers.AmazonS3.Data
	{
	/// <summary>
	/// A provider which provides integration services with Amazon S3.  In general, files and folders
	/// marked as S3-enabled (defined as having their second highest-order bit set) are delegated to
	/// S3 for authorization checks.  Otherwise, base functionality is employed.
	/// </summary>
	public class AmazonS3DataProvider : DotNetNuke.Data.SqlDataProvider
		{
		#region Provider Settings
		/// <summary>
		/// This override is required for all derived DataProviders (due to squirliness in the core framework)
		/// </summary>
		public override Dictionary<string, string> Settings
			{ get { return ComponentFactory.GetComponentSettings<AmazonS3DataProvider>() as Dictionary<string, string>; } }
		#endregion

		#region File Overrides

		public override IDataReader GetFile(string FileName, int PortalId, int FolderID)
			{
			if (S3Folder.IsS3Folder(FolderID))
				return GetFile(FileName, PortalId, PortalSettings.Current.S3Service().GetBucketById(FolderID));
			else
				return base.GetFile(FileName, PortalId, FolderID);
			}

		public override IDataReader GetFileById(int FileId, int PortalId)
			{
			if (S3File.IsS3File(FileId))
				return GetFileById(FileId, PortalId, PortalSettings.Current.S3Service().GetAllBuckets());
			else
				return base.GetFileById(FileId, PortalId);
			}

		public override IDataReader GetFileContent(int FileId, int PortalId)
			{
			if (S3File.IsS3File(FileId))
				// If this is a "direct file request" -- perform a 301 Redirect.  Otherwise feed 
				// the contact back via the IDataReader.  This is a sneaky performance hack; without
				// it we would actually have to download the content via S3 and resend it to the user.
				return HttpContext.Current.IsDirectFileRequest() ?
					RedirectToFileContent(FileId, HttpContext.Current) :
					GetS3FileContent(FileId, PortalId);
			else
				return base.GetFileContent(FileId, PortalId);
			}

		public override IDataReader GetFiles(int PortalId, int FolderID)
			{
			if (S3Folder.IsS3Folder(FolderID))
				return GetFiles(PortalId, PortalSettings.Current.S3Service().GetBucketById(FolderID));
			else
				return base.GetFiles(PortalId, FolderID);
			}

		public override int AddFile(int PortalId, string FileName, string Extension, long Size, int Width, int Height, string ContentType, string Folder, int FolderID)
			{
			if(S3Folder.IsS3Folder(FolderID))
				throw new InvalidOperationException(string.Format(CultureInfo.InvariantCulture, @"Cannot add file to reserved location ""{0}""", Folder));
			else
				return base.AddFile(PortalId, FileName, Extension, Size, Width, Height, ContentType, Folder, FolderID);
			}

		public override void UpdateFile(int FileId, string FileName, string Extension, long Size, int Width, int Height, string ContentType, string Folder, int FolderID)
			{
			if (S3File.IsS3File(FileId))
				throw new InvalidOperationException(string.Format(CultureInfo.InvariantCulture, @"Cannot update file in reserved location ""{0}""", Folder));
			else
				base.UpdateFile(FileId, FileName, Extension, Size, Width, Height, ContentType, Folder, FolderID);
			}

		public override void UpdateFileContent(int FileId, byte[] Content)
			{
			if (S3File.IsS3File(FileId))
				throw new InvalidOperationException(@"Cannot update file content in reserved location.");
			else
				base.UpdateFileContent(FileId, Content);
			}

		public override void DeleteFile(int PortalId, string FileName, int FolderID)
			{
			if (S3Folder.IsS3Folder(FolderID))
				throw new InvalidOperationException(string.Format(CultureInfo.InvariantCulture, @"Cannot delete file ""{0}"" in reserved location.", FileName));
			else
				base.DeleteFile(PortalId, FileName, FolderID);
			}

		#endregion

		#region Folder Methods

		public override IDataReader GetFolder(int PortalID, int FolderID)
			{
			if (S3Folder.IsS3Folder(FolderID))
				return new S3Folder(PortalID, PortalSettings.Current.S3Service().GetBucketById(FolderID).Name).ToReader();
			else
				return base.GetFolder(PortalID, FolderID);
			}

		public override IDataReader GetFolder(int PortalID, string FolderPath)
			{
			if (S3Folder.IsS3Folder(FolderPath))
				return new S3Folder(PortalID, PortalSettings.Current.S3Service().GetBucketByPath(FolderPath).Name).ToReader();
			else
				return base.GetFolder(PortalID, FolderPath);
			}

		public override IDataReader GetFoldersByPortal(int PortalID)
			{
			// Return the union of (a) base folders, (b) the S3 root, and (c) the S3 buckets
			if (PortalSettings.Current.HasS3Service())
				return base.GetFoldersByPortal(PortalID)
					.Union(S3Folder.GetRootFolder(PortalID))
					.Union(PortalSettings.Current.S3Service().GetAllBuckets()
							.Select(bucket =>
								new S3Folder(PortalID, bucket.Name, bucket.CreationDate)));
			else
				return base.GetFoldersByPortal(PortalID);
			}

		public override IDataReader GetFolderPermissionsByFolderPath(int PortalID, string FolderPath, int PermissionID)
			{
			// This avoids an issue with the DotNetNuke core -- for whatever reason, not all authorization
			// checks are determined via the CanViewFolder method, and are instead (sometimes) evaluated
			// via HasFolderPermission("READ", ...).  Even more unfortunately, the HasFolderPermission
			// method does not include the folder itself for evaluation, only a set of permissions.
			// So, if we return no permissions, we have no context to evaluate during authorization.
			
			// As a complete kludge, we return a "fake" S3-tagged permission object with 
			// enough context to evaluate during authorization.  When/if the authorization side is
			// enhanced to include the actual entity being authorized (and a principal!), we can
			// remove all this artifice.  See work item DNN-10039 for progress on this.
			if (S3Folder.IsS3Folder(FolderPath))
				// Return our fake S3 permission; this contains just enough context to later perform authorization
				return new S3Permission(PortalID, FolderPath).ToEnumerable().ToReader();
			else
				return base.GetFolderPermissionsByFolderPath(PortalID, FolderPath, PermissionID);
			}

		public override void DeleteFolder(int PortalID, string FolderPath)
			{
			if(S3Folder.IsS3Folder(FolderPath))
				throw new InvalidOperationException(string.Format(CultureInfo.InvariantCulture, @"Cannot delete system folder ""{0}""", FolderPath));
			else
				base.DeleteFolder(PortalID, FolderPath);
			}

		public override int AddFolder(int PortalID, string FolderPath, int StorageLocation, bool IsProtected, bool IsCached, DateTime LastUpdated, int createdByUserID)
			{
			if (S3Folder.IsS3Folder(FolderPath))
				throw new InvalidOperationException(@"Cannot create folder under reserved location ""Amazon.S3""");
			else
				return base.AddFolder(PortalID, FolderPath, StorageLocation, IsProtected, IsCached, LastUpdated, createdByUserID);
			}

		public override void UpdateFolder(int PortalID, int FolderID, string FolderPath, int StorageLocation, bool IsProtected, bool IsCached, DateTime LastUpdated, int lastModifiedByUserID)
			{
			if(S3Folder.IsS3Folder(FolderID))
				throw new InvalidOperationException(@"Cannot update folder under reserved location ""Amazon.S3""");
			else
				base.UpdateFolder(PortalID, FolderID, FolderPath, StorageLocation, IsProtected, IsCached, LastUpdated, lastModifiedByUserID);
			}

		#endregion

		#region S3 File Implementation

		/// <summary>
		/// Given a filename, portalId, and source bucket, returns a IDataReader with data about the S3 file
		/// </summary>
		private static IDataReader GetFile(string fileName, int portalId, Bucket bucket)
			{
			// Get a list of buckets, find the bucket with name == FileName, convert to a S3 file datareader
			return
				PortalSettings.Current.S3Service().ListObjects(bucket.Name, null)
					.OfType<ObjectEntry>()
					.Single(entry => entry.Name == fileName)
					.ToS3File(portalId, bucket.Name)
					.ToReader();
			}

		/// <summary>
		/// Given a portal and bucket, returns an IDataReader with information about the files therein
		/// </summary>
		/// <param name="PortalId">The current portalId</param>
		/// <param name="bucket">The S3 bucket with which to retrieve files</param>
		/// <returns>An IDataReader with information about the files in the bucket</returns>
		private static IDataReader GetFiles(int portalId, Bucket bucket)
			{
			// Get all the objects in a bucket, convert to S3Files, and return as a datareader
			return PortalSettings.Current.S3Service().ListObjects(bucket.Name, string.Empty)
				.OfType<ObjectEntry>()
				.Select(entry => entry.ToS3File(portalId, bucket.Name))
				.ToReader();
			}

		/// <summary>
		/// Given a fileId and a set of buckets, searches those buckets for a file with matching FileId.
		/// 
		/// Note that this is currently an O(n*m) operation and could use some cache and dictionary love.
		/// </summary>
		/// <param name="FileId">The S3 fileId to search for (see S3File object for fileId details)</param>
		/// <param name="buckets">The set of buckets in which to search</param>
		/// <returns>A bucket and S3 object entry pair; throws if none (or multiple) found</returns>
		private static BucketObjectPair GetBucketObjectPair(int FileId, IEnumerable<Bucket> buckets)
			{
			// This is a poorly-implemented O(n*m) operation; we could greatly improve this
			// by caching our buckets and objects (at the expense of occasional synchronization issues).
			// Here we generate a list of all objects in all buckets, find the one with the desired
			// fileId, convert it to a S3 file, and return it as a datareader.

			// Note that we are using some bit magic for our S3 fileId generation; see S3File for details.
			return buckets
				.SelectMany(bucket =>
					PortalSettings.Current.S3Service().ListObjects(bucket.Name, null)
						.OfType<ObjectEntry>()
						.Where(entry => entry.FileId() == FileId)
						.Select(entry => new BucketObjectPair(bucket, entry)))
				.Single();
			}

		/// <summary>
		/// Given a fileId and portalId, gets a file from a set of buckets
		/// </summary>
		/// <param name="FileId">The fileId to search for</param>
		/// <param name="PortalId">The current portalId</param>
		/// <param name="buckets">A set of buckets to search for the fileId in</param>
		/// <returns>An IDataReader representing the retrieved file</returns>
		private static IDataReader GetFileById(int FileId, int PortalId, IEnumerable<Bucket> buckets)
			{
			return GetBucketObjectPair(FileId, buckets)
				.ToS3File(PortalId)
				.ToReader();
			}

		/// <summary>
		/// Given a fileId, retrieves the content of that file (searching all S3 buckets)
		/// </summary>
		/// <param name="FileId">The fileId to search for in the S3 buckets</param>
		/// <param name="PortalId">The current portalId</param>
		/// <returns>An IDataReader containing information about the file (including content)</returns>
		protected virtual IDataReader GetS3FileContent(int fileId, int portalId)
			{
			var s3Service = PortalSettings.Current.S3Service();
			var bucketObjectPair = GetBucketObjectPair(fileId, s3Service.GetAllBuckets());
			var file = bucketObjectPair.ToS3File(portalId);

			// Load our file with content downloaded from the S3 service (this has the potential
			// to be a very large double-data transfer.  In many cases, we avoid this via a 301 redirect,
			// but there are cases where the core actually needs the data).
			file.Content = s3Service
				.GetObjectBytes(bucketObjectPair.Bucket.Name, bucketObjectPair.ObjectEntry.Key);

			return file.ToReader();
			}

		/// <summary>
		/// Given a fileId, performs a permanent redirect to that file using the given HttpContext
		/// </summary>
		/// <param name="FileId">The fileId to which a redirect will occur</param>
		/// <param name="context">The httpContext with which to perform the redirect</param>
		/// <returns>An empty datareader (for convenience)</returns>
		private static IDataReader RedirectToFileContent(int FileId, HttpContext context)
			{
			var bucketObjectPair = GetBucketObjectPair(FileId, PortalSettings.Current.S3Service().GetAllBuckets());

			// Send a 301 Perm to the user
			context.Response.StatusCode = 301;
			context.Response.Status = "301 Moved Permanently";
			context.Response.RedirectLocation = string.Format(CultureInfo.InvariantCulture, "http://s3.amazonaws.com/{0}/{1}", bucketObjectPair.Bucket.Name, bucketObjectPair.ObjectEntry.Name);
			context.Response.End();

			// Return a stub file so that core processing may continue; however, the thread will have
			// been aborted above.
			return new S3File()
				{ Content = new byte[0] }
				.ToReader();
			}

		public override IDataReader GetProfile(int UserId, int PortalId)
			{
			EnsureS3ProfileProperties();
			return base.GetProfile(UserId, PortalId);
			}

		private static bool isEnsured;
		protected virtual void EnsureS3ProfileProperties()
			{
			if(!isEnsured)
				{
				isEnsured = true;

				foreach (var portal in new PortalController().GetPortals().Cast<PortalInfo>())
					{				
					Extensions.EnsureProfilePropertyExists(AmazonS3Extensions.S3KeyProfilePropertyName, portal.PortalID);
					Extensions.EnsureProfilePropertyExists(AmazonS3Extensions.S3SecretProfilePropertyName, portal.PortalID);
					}
				}
			}

		#endregion
		}
	}
