﻿//     Copyright (c) Microsoft Corporation.  All rights reserved.

using System;
using System.Collections.Generic;
using System.Data.Entity;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Gateway.Console.Models;
using Microsoft.Hadoop.Client;
using Microsoft.IdentityModel.Clients.ActiveDirectory;
using Microsoft.WindowsAzure;
using Microsoft.WindowsAzure.Management.HDInsight;
using Microsoft.WindowsAzure.Management.Storage;
using Microsoft.WindowsAzure.Storage;
using Microsoft.WindowsAzure.Storage.Blob;

namespace Gateway.Console.Services.Azure
{
    public class HDInsightManagement : AzureServiceManagement, IDisposable
    {
        private Lazy<IHDInsightClient> _hdInsightClient;
        private Lazy<IJobSubmissionClient> _hadoopJobClient;

        public HDInsightManagement(AuthenticationResult accessToken, string subscriptionId, string clusterName)
            : base(accessToken, subscriptionId)
        {
            InitializeClientWrapper(clusterName);
        }

        public HDInsightManagement(AuthenticationResult accessToken, string subscriptionId, string clusterName, ConsoleContext db)
            : base(accessToken, subscriptionId, db)
        {
            InitializeClientWrapper(clusterName);
        }

        private void InitializeClientWrapper(string clusterName)
        {
            this._hdInsightClient = new Lazy<IHDInsightClient>(() => HDInsightClient.Connect(GetHdInsightCredential()));
            this._hadoopJobClient = new Lazy<IJobSubmissionClient>(() => JobSubmissionClientFactory.Connect(GetJobSubmissionCredential(clusterName)));
        }

        private IHDInsightSubscriptionCredentials GetHdInsightCredential()
        {
            return new HDInsightAccessTokenCredential(Guid.Parse(this._subscriptionId), this._accessToken.AccessToken);
        }

        private IJobSubmissionClientCredential GetJobSubmissionCredential(string clusterName)
        {
            System.Diagnostics.Debug.Assert(!String.IsNullOrWhiteSpace(clusterName));
            return new JobSubmissionAccessTokenCredential((HDInsightAccessTokenCredential)GetHdInsightCredential(), clusterName);
        }

        public new void Dispose()
        {
            if (this._hdInsightClient.IsValueCreated)
            {
                this._hdInsightClient.Value.Dispose();
            }
            if (this._hadoopJobClient.IsValueCreated)
            {
                this._hadoopJobClient.Value.Dispose();
            }
            base.Dispose();
        }

        public async Task<IEnumerable<string>> ListHDInsightClustersAsync()
        {
            var clusters = await this._hdInsightClient.Value.ListClustersAsync();
            return clusters.Select(cluster => cluster.Name);
        }

        const string HDInsightDefaultContainer = "sg-application-analytics";

        public async Task<bool> CreateClusterAsync(AnalyticsModel model)
        {
            try
            {
                var deploymentInfo = await GetDeployment();
                WriteStatus(model, true, "Creating HDInsight cluster [{0}] using storage account [{1}/{2}] in location [{3}]",
                    model.ClusterName,
                    deploymentInfo.Item1.SelectedStorageAccount,
                    HDInsightDefaultContainer,
                    deploymentInfo.Item1.RegionSelection);
                using (new HDInsightLogger(this, model, _hdInsightClient.Value, String.Format("HDInsight cluster creation [{0}].", model.ClusterName)))
                {
                    var clusterDetails = await _hdInsightClient.Value.CreateClusterAsync(new ClusterCreateParameters
                        {
                            Name = model.ClusterName,
                            Location = deploymentInfo.Item1.RegionSelection,
                            DefaultStorageAccountName = deploymentInfo.Item1.SelectedStorageAccount,
                            DefaultStorageAccountKey = deploymentInfo.Item2,
                            DefaultStorageContainer = HDInsightDefaultContainer,
                            UserName = model.ClusterUserName,
                            Password = model.ClusterPassword,
                            ClusterSizeInNodes = model.ClusterNodeCount,
                        });
                    return clusterDetails != null && (clusterDetails.State == ClusterState.Operational || clusterDetails.State == ClusterState.Running);
                }
            }
            catch (Exception ex)
            {
                WriteStatus(model, false, "Failed to create HDInsight cluster [{0}]. Details: {1}", model.ClusterName, ex);
            }
            return false;
        }

        public async Task<bool> DestroyCluster(AnalyticsModel model)
        {
            try
            {
                WriteStatus(model, true, "Deleting HDInsight cluster [{0}]", model.ClusterName);
                await this._hdInsightClient.Value.DeleteClusterAsync(model.ClusterName);
                return true;
            }
            catch (Exception ex)
            {
                WriteStatus(model, false, "Failed to destroy HDInsight cluster [{0}]. Details: {1}", model.ClusterName, ex);
            }
            return false;
        }

        static readonly Tuple<string, string>[] hiveObjectsDdl = new[] {
            Tuple.Create("iislogs_raw", 
                        @"CREATE EXTERNAL TABLE iislogs_raw 
	                            (sdate string, stime string, ssitename string, scomputername string, sip string, csmethod string, csuristem string, 
	                             csuriquery string, sport int, csusername string, cip string, csversion string, csuseragent string, cscookie string, 
	                             csreferrer string, cshost string, scstatus int, scsubstatus int, scwin32status int, scbytes int, csbytes int, timetaken int) 
                            PARTITIONED BY (deploymentid string, instance string) 
                            ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' 
                            LOCATION 'wasb://wad-iis-logfiles@{0}.blob.core.windows.net/';" ),
            Tuple.Create("iislogs", 
                        @"CREATE VIEW iislogs 
                            AS 
                            SELECT sdate, stime, ssitename, scomputername, sip, csmethod, csuristem, csuriquery, sport, csusername, cip, csversion, 
	                            csuseragent, cscookie, csreferrer, cshost, scstatus, scsubstatus, scwin32status, scbytes, csbytes, timetaken, 
	                            regexp_extract(csusername, '.*@(.*)', 1) as tenant 
                            FROM iislogs_raw 
                            WHERE sdate NOT RLIKE '.*#.*';"),
        };
        const string createAggregateTableDdl = @"CREATE TABLE {0} (date STRING, week INT, value INT{1})";
        const string addPartitionDdl = @"ALTER TABLE iislogs_raw 
                                        ADD PARTITION (deploymentid='{0}',instance='{1}') LOCATION 'wasb://wad-iis-logfiles@{2}.blob.core.windows.net/{3}'";
        static readonly IDictionary<string, Tuple<string, Func<DateTime, DateTime>>> calculateAggregateStatements = new Dictionary<string, Tuple<string, Func<DateTime, DateTime>>>(StringComparer.OrdinalIgnoreCase) {
            { "month", Tuple.Create<string, Func<DateTime, DateTime>>(
                        @"INSERT OVERWRITE TABLE {0}
                            SELECT concat(conv(year(sdate), 10, 10), '-', lpad(conv(month(sdate), 10, 10), 2, '0'), '-01') AS date,
                                   0 as week,
                                   count({6}) as value
                                   {1}
                            FROM iislogs
                            WHERE (sdate > '{2}' AND sdate < '{3}') {4}
                            GROUP BY year(sdate), month(sdate){5}",
                        (date) => date.AddDays(-(date.Day - 1)))},
            { "week", Tuple.Create<string, Func<DateTime, DateTime>>(
                        @"INSERT OVERWRITE TABLE {0}
                            SELECT conv(year(sdate), 10, 10) AS date,
                                   weekofyear(sdate) as week,
                                   count({6}) as value
                                   {1}
                            FROM iislogs
                            WHERE (sdate > '{2}' AND sdate < '{3}') {4}
                            GROUP BY year(sdate), weekofyear(sdate){5}",
                        (date) => GetFirstDayOfWeek(date))}
        };

        public async Task<bool> CreateOrUpdateHiveMetadataAsync(AnalyticsModel model)
        {
            try
            {
                WriteStatus(model, true, "Updating Hive metadata for analytics model.");
                var hiveJobDefinition = new HiveJobCreateParameters()
                {
                    StatusFolder = "/CreateObjectsStatusFolder",
                    Query = "show tables;",
                };
                var hiveTables = new HashSet<string>(await CreateResultsHiveJobAndWaitAsync(hiveJobDefinition), StringComparer.OrdinalIgnoreCase);
                var deployment = await this._database.Deployments.FirstOrDefaultAsync();
                foreach (var ddlStatement in hiveObjectsDdl
                                                .Where(statement => !hiveTables.Contains(statement.Item1)))
                {
                    WriteStatus(model, true, "Creating Hive table [{0}]", ddlStatement.Item1);
                    hiveJobDefinition.Query = FormatHiveStatement(ddlStatement.Item2, deployment.SelectedStorageAccount);
                    await CreateHiveJobAndWaitAsync(hiveJobDefinition);
                }
                foreach (var reportTable in this._database.AnalyticsReportModels
                                                    .ToList()
                                                    .Where(report => !String.IsNullOrWhiteSpace(report.HiveTable))
                                                    .SelectMany(report => new[] { 
                                                        new 
                                                        {
                                                            TableName = report.HiveTable, 
                                                            HasMultipleSeries = report.HasMultipleSeries,
                                                        },
                                                        new
                                                        {
                                                            TableName = report.HiveTempTable, 
                                                            HasMultipleSeries = report.HasMultipleSeries,
                                                        }})
                                                    .Where(tableName => !hiveTables.Contains(tableName.TableName)))
                {
                    WriteStatus(model, true, "Creating Hive table [{0}]", reportTable.TableName);
                    hiveJobDefinition.Query = FormatHiveStatement(createAggregateTableDdl,
                        reportTable.TableName,
                        reportTable.HasMultipleSeries ? ", series STRING" : "");
                    await CreateHiveJobAndWaitAsync(hiveJobDefinition);
                }
                return true;
            }
            catch (Exception ex)
            {
                WriteStatus(model, false, "Failed to create Hive metadata for analytics model. Details: {0}", ex);
            }
            return false;
        }

        private static void InitiateAnalyticsAction(AuthenticationResult accessToken, string subscriptionId, AnalyticsModel model, Func<HDInsightManagement, Task> action)
        {
            Task.Run(() =>
            {
                using (var clusterManager = new HDInsightManagement(accessToken, subscriptionId, model.ClusterName))
                {
                    action(clusterManager).Wait();
                }
            });
        }

        public static void InitiateUpdateIISLogTablePartitions(AuthenticationResult accessToken, string subscriptionId, AnalyticsModel model)
        {
            InitiateAnalyticsAction(accessToken, subscriptionId, model, (clusterManager) => clusterManager.UpdateIISLogTablePartitionsAsync(model));
        }

        public async Task<bool> UpdateIISLogTablePartitionsAsync(AnalyticsModel model)
        {
            try
            {
                WriteStatus(model, DateTime.UtcNow, "Checking web log partitions for Hive external table.", true);
                var deploymentInfo = await GetDeployment();
                var hiveJobDefinition = new HiveJobCreateParameters()
                {
                    StatusFolder = "/CreateObjectsStatusFolder",
                };
                hiveJobDefinition.Query = "show partitions iislogs_raw";
                var partitions = (await CreateResultsHiveJobAndWaitAsync(hiveJobDefinition))
                    .Select(partitionDefinition =>
                    {
                        var items = partitionDefinition.Split('/', '=');
                        return new
                        {
                            DeploymentId = items[1],
                            Instance = items[3],
                        };
                    })
                    .ToList();
                var storageAccount = CloudStorageAccount.Parse(
                    String.Format("DefaultEndpointsProtocol=https;AccountName={0};AccountKey={1}",
                        deploymentInfo.Item1.SelectedStorageAccount, deploymentInfo.Item2));
                var blobClient = storageAccount.CreateCloudBlobClient();
                var container = blobClient.GetContainerReference("wad-iis-logfiles");
                string pattern = "*/Router/*/Web/W3SVC1273337584";
                var patternSegments = pattern.Split(new char[] { '/' }, StringSplitOptions.RemoveEmptyEntries);
                var directories = FindLeafDirectories(container, patternSegments, 0)
                    .ToList();
                var partitionDirectories = directories
                    .Select(path =>
                        {
                            var segments = path.Split(new char[] { '/' }, StringSplitOptions.RemoveEmptyEntries);
                            return new
                            {
                                DeploymentId = segments[0],
                                Instance = segments[2],
                                Path = path.TrimEnd('/'),
                            };
                        });
                var partitionsToAdd = partitionDirectories
                    .Where(directory => !partitions.Any(existingPartition => existingPartition.DeploymentId.Equals(directory.DeploymentId, StringComparison.OrdinalIgnoreCase) &&
                                                                                existingPartition.Instance.Equals(directory.Instance, StringComparison.OrdinalIgnoreCase)));
                foreach (var newPartition in partitionsToAdd)
                {
                    WriteStatus(model, true, "Adding new web logs partition [{0}][{1}]", newPartition.DeploymentId, newPartition.Instance);
                    hiveJobDefinition.Query = FormatHiveStatement(addPartitionDdl,
                        newPartition.DeploymentId,
                        newPartition.Instance,
                        deploymentInfo.Item1.SelectedStorageAccount,
                        newPartition.Path);
                    await CreateHiveJobAndWaitAsync(hiveJobDefinition);
                }

                return true;
            }
            catch (Exception ex)
            {
                WriteStatus(model, false, "Failed to create Hive partitions to match web logs for analytics model. Details: {1}", ex);
            }
            return false;
        }

        public static void InitiateCalculateAggregates(AuthenticationResult accessToken, string subscriptionId, AnalyticsModel model)
        {
            InitiateAnalyticsAction(accessToken, subscriptionId, model, (clusterManager) => clusterManager.CalculateAggregatesAsync(model));
        }

        public async Task CalculateAggregatesAsync(AnalyticsModel model)
        {
            try
            {
                (await this._database.AnalyticsReportModels.ToListAsync())
                    .Where(report => !String.IsNullOrWhiteSpace(report.HiveTable))
                    .ForEach(async report =>
                    {
                        try
                        {
                            // Calculate the aggregates to temp table
                            var aggregateStatement = calculateAggregateStatements[report.TickInterval];
                            DateTime lastDate = aggregateStatement.Item2(DateTime.UtcNow);
                            if (!report.LastDate.HasValue || (lastDate - report.LastDate.Value).TotalDays > 1)
                            {
                                var hiveJobDefinition = new HiveJobCreateParameters
                                {
                                    StatusFolder = String.Format("/{0}StatusFolder", report.HiveTable),
                                    Query = FormatHiveStatement(aggregateStatement.Item1,
                                                                report.HiveTempTable,
                                                                report.HasMultipleSeries ? ", " + report.SeriesColumn : "",
                                                                report.LastDate.HasValue ? report.LastDate.Value.ToString("yyyy-MM-dd") : "1900-01-01",
                                                                lastDate.ToString("yyyy-MM-dd"),
                                                                String.IsNullOrWhiteSpace(report.FilterPredicate) ? String.Empty : "AND " + report.FilterPredicate,
                                                                report.HasMultipleSeries ? ", " + report.SeriesColumn : "",
                                                                String.IsNullOrWhiteSpace(report.DistinctExpression) ? "*" : "DISTINCT " + report.DistinctExpression),
                                };
                                await CreateHiveJobAndWaitAsync(hiveJobDefinition);
                                // Now pull the data back so that we can copy it into our local db
                                hiveJobDefinition.Query = FormatHiveStatement("SELECT * FROM {0}", report.HiveTempTable);
                                var selectData = await CreateResultsHiveJobAndWaitAsync(hiveJobDefinition);
                                foreach (var record in selectData)
                                {
                                    try
                                    {
                                        // Column layout is:
                                        //  date    week    value   [series]
                                        var fields = record.Split('\t');
                                        DateTime date;
                                        int week = 0;
                                        if (int.TryParse(fields[1], out week) && week > 0)
                                        {
                                            // Calculate date for 1st day in this week
                                            date = new DateTime(int.Parse(fields[0]), 1, 1);
                                            date = GetFirstDayOfWeek(CultureInfo.InvariantCulture.Calendar.AddWeeks(date, week));
                                        }
                                        else
                                        {
                                            date = DateTime.Parse(fields[0]);
                                        }
                                        string series = null;
                                        if (report.HasMultipleSeries)
                                        {
                                            series = fields[3];
                                        }
                                        report.Rows.Add(new AnalyticsReportRow
                                        {
                                            Date = date,
                                            Value = Convert.ToDouble(fields[2]),
                                            Series = series,
                                        });
                                    }
                                    catch (Exception)
                                    {
                                        // TODO: Log exception
                                    }
                                }
                                // Now append the new aggregates to the real table
                                hiveJobDefinition.Query = FormatHiveStatement("INSERT INTO TABLE {0} SELECT * FROM {1}",
                                    report.HiveTable,
                                    report.HiveTempTable);
                                await CreateHiveJobAndWaitAsync(hiveJobDefinition);
                                // Finally, update the processing dates on the report
                                report.LastDate = lastDate.AddDays(-1);
                                this._database.Entry(report).State = EntityState.Modified;
                                await this._database.SaveChangesAsync();
                            }
                        }
                        catch (Exception)
                        {
                            // TODO: Log exception
                        }
                    });
            }
            catch (Exception)
            {

            }
        }

        private static DateTime GetFirstDayOfWeek(DateTime date)
        {
            return date.AddDays(-((date.DayOfWeek - CultureInfo.InvariantCulture.DateTimeFormat.FirstDayOfWeek + 7) % 7));
        }

        private static string FormatHiveStatement(string hiveStatement, params object[] args)
        {
            return String.Format(hiveStatement.Replace("\r\n", "").Replace("\t", " "), args);
        }

        private static IEnumerable<string> FindLeafDirectories(CloudBlobContainer container, string[] patternSegments, int segmentIndex)
        {
            return FindLeafDirectories("/", patternSegments, segmentIndex, () => container.ListBlobs(), (directory) => container.GetDirectoryReference(directory));
        }

        private static IEnumerable<string> FindLeafDirectories(string path,
            string[] patternSegments, 
            int segmentIndex,
            Func<IEnumerable<IListBlobItem>> getSubDirectories,
            Func<string, CloudBlobDirectory> getSubDirectory)
        {
            if (segmentIndex == patternSegments.Length)
            {
                return Enumerable.Repeat(path, 1);
            }
            else if (segmentIndex > patternSegments.Length)
            {
                return Enumerable.Repeat("", 0);
            }
            string segment = patternSegments[segmentIndex];
            IEnumerable<string> leafDirs = null;
            if (segment == "*")
            {
                leafDirs = Enumerable.Repeat("", 0);
                foreach (var subDir in getSubDirectories().OfType<CloudBlobDirectory>())
                {
                    leafDirs = leafDirs.Concat(FindLeafDirectories(subDir.Prefix, 
                                                                    patternSegments, 
                                                                    segmentIndex + 1, 
                                                                    () => subDir.ListBlobs(), 
                                                                    (directory) => subDir.GetDirectoryReference(directory)));
                }
            }
            else
            {
                var subDir = getSubDirectory(segment);
                leafDirs = FindLeafDirectories(subDir.Prefix,
                                                patternSegments,
                                                segmentIndex + 1,
                                                () => subDir.ListBlobs(),
                                                (directory) => subDir.GetDirectoryReference(directory));
            }
            return leafDirs;
        }

        private async Task<Tuple<Deployment, string>> GetDeployment()
        {
            var deployment = await this._database.Deployments.FirstOrDefaultAsync();
            using (var storageClient = CloudContext.Clients.CreateStorageManagementClient(GetAzureCredentials()))
            {
                var keys = await storageClient.StorageAccounts.GetKeysAsync(deployment.SelectedStorageAccount);
                return Tuple.Create(deployment, keys.SecondaryKey);
            }
        }

        private void WriteStatus(AnalyticsModel model, DateTime date, string message, bool success)
        {
            System.Diagnostics.Trace.TraceInformation("{0} {1} {2}", date, success, message);
            if (model != null && model.Id != 0)
            {
                this._database.AddStatus<AnalyticsStatus>(model.Id, date, success, message);
            }
        }

        private void WriteStatus(AnalyticsModel model, bool success, string message, params object[] args)
        {
            WriteStatus(model, DateTime.UtcNow, String.Format(message, args), success);
        }

        private async Task CreateHiveJobAndWaitAsync(HiveJobCreateParameters hiveJobDefinition)
        {
            await WaitForJobCompletionAsync(await _hadoopJobClient.Value.CreateHiveJobAsync(hiveJobDefinition), _hadoopJobClient.Value);
        }

        private async Task<IEnumerable<string>> CreateResultsHiveJobAndWaitAsync(HiveJobCreateParameters hiveJobDefinition)
        {
            var hiveJob = await _hadoopJobClient.Value.CreateHiveJobAsync(hiveJobDefinition);
            await WaitForJobCompletionAsync(hiveJob, _hadoopJobClient.Value);
            return ReadAllLines(await _hadoopJobClient.Value.GetJobOutputAsync(hiveJob.JobId));
        }

        private static async Task WaitForJobCompletionAsync(JobCreationResults jobResults, IJobSubmissionClient client)
        {
            var jobInProgress = await client.WaitForJobCompletionAsync(jobResults, TimeSpan.MaxValue, CancellationToken.None);
            if (jobInProgress.ExitCode.HasValue && jobInProgress.ExitCode != 0)
            {
                using (var errorStream = await client.GetJobErrorLogsAsync(jobResults.JobId))
                {
                    throw new InvalidOperationException(String.Format("HDInsight job [{0}] failed with status [{1}]. Details: {2}",
                        jobResults.JobId,
                        jobInProgress.StatusCode,
                        new StreamReader(errorStream).ReadToEnd()));
                }
            }
            else if (jobInProgress.StatusCode != JobStatusCode.Completed)
            {
                throw new InvalidOperationException(String.Format("Hive job [{0}] ended with status [{1}]", jobInProgress.JobId, jobInProgress.StatusCode));
            }
        }

        private static IEnumerable<string> ReadAllLines(Stream stream)
        {
            using (var reader = new StreamReader(stream))
            {
                string line;
                while ((line = reader.ReadLine()) != null)
                {
                    yield return line;
                }
            }
        }

        private class HDInsightLogger : IDisposable
        {
            HDInsightManagement _manager;
            AnalyticsModel _model;
            IHDInsightClient _clusterClient;
            IJobSubmissionClient _jobClient;
            ClusterState _currentClusterState;
            string _message;

            public HDInsightLogger(HDInsightManagement manager, AnalyticsModel model, IHDInsightClient client, string message)
            {
                _manager = manager;
                _model = model;
                _clusterClient = client;
                client.ClusterProvisioning += ClusterClient_ClusterProvisioning;
                _currentClusterState = ClusterState.Unknown;
                _message = message;
            }

            public HDInsightLogger(HDInsightManagement manager, AnalyticsModel model, IJobSubmissionClient client)
            {
                _manager = manager;
                _model = model;
                _jobClient = client;
                client.JobStatusEvent += JobClient_JobStatusEvent;
            }

            public void Dispose()
            {
                if (_clusterClient != null)
                {
                    _clusterClient.ClusterProvisioning -= ClusterClient_ClusterProvisioning;
                }
                if (_jobClient != null)
                {
                    _jobClient.JobStatusEvent -= JobClient_JobStatusEvent;
                }
            }

            void ClusterClient_ClusterProvisioning(object sender, ClusterProvisioningStatusEventArgs e)
            {
                if (e.State != _currentClusterState)
                {
                    bool failure = e.State == ClusterState.Error;
                    string statusMessage = String.Format(" Status [{0}]", e.State);
                    _manager.WriteStatus(_model, !failure, _message + statusMessage);
                    _currentClusterState = e.State;
                }
            }

            void JobClient_JobStatusEvent(object sender, WaitJobStatusEventArgs e)
            {
                // TODO: Log job progress messages
            }
        }
    }
}