﻿using System.Collections.Generic;
using System.Dynamic;
using System.IO;
using System.Linq;
using CsvHelper;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using Newtonsoft.Json;

namespace dataXJobGenerator
{
    public class JobGenerator
    {
        private const string DataXJobDir = "dataXjobs";
        private readonly IConfiguration _configuration;
        private readonly ILogger<JobGenerator> _logger;
        private readonly IDBColumnsUtil _dbColumnsUtil;

        public JobGenerator(ILogger<JobGenerator> logger, IConfiguration configuration, IDBColumnsUtil dbColumnsUtil)
        {
            _logger = logger;
            _configuration = configuration;
            _dbColumnsUtil = dbColumnsUtil;
        }

        public class JobCheckList
        {
            public string TableName { get; set; }
            public int Records { get; set; }
        }
        public void DoAction()
        {
            var dataXJobDirInConfig = _configuration.GetValue("jobsOutput");
            var destinationJobDir = dataXJobDirInConfig ?? DataXJobDir;
            _logger.LogDebug($">> the job files store to {destinationJobDir}");


            var configList = new List<TablesConfiguration>();

            var dataXJobConfigTables = TablesConfiguration.GetTableConfigurations(_configuration);
            var dataSource = DataSourceConfiguration.GetDataSource(_configuration);

            var jobTemplate = File.ReadAllText("./dataXJob.template.txt");
            var progressing = 0.0d;
            var totalTasks = dataXJobConfigTables.Count();
            foreach (var cfg in dataXJobConfigTables)
            {
                _logger.LogInformation($">> progressing {++progressing}/{totalTasks},generate the job config file for {cfg.Table}");
                var (records, columns) = _dbColumnsUtil.GetColumns(dataSource.ConnStr, cfg.Table);
                dynamic dynamicData = JsonConvert.DeserializeObject<ExpandoObject>(jobTemplate);
                dynamicData.job.content[0].reader.parameter.connection[0].table = new[] { cfg.Table };
                dynamicData.job.content[0].reader.parameter.column = columns;
                dynamicData.job.content[0].reader.parameter.preSql = cfg.TablePreSql;
                dynamicData.job.content[0].reader.parameter.postSql = cfg.TablePostSql;

                dynamicData.job.content[0].writer.parameter.connection[0].table = new[]{
                    cfg.TableMapTo ?? cfg.Table
                };

                if (cfg.MapToPreSql.Any())
                {
                    dynamicData.job.content[0].writer.parameter.preSql = cfg.MapToPreSql;
                }

                if (cfg.MapToPostSql.Any())
                {
                    dynamicData.job.content[0].writer.parameter.postSql = cfg.MapToPostSql;
                }

                var destinationColumns = new List<string>();
                if (cfg.Columns.Count == 0)
                    destinationColumns = columns;
                else
                    foreach (var column in columns)
                    {
                        var enumerable =
                            cfg.Columns.Select(map => column.Equals(map.Source) ? map.Destination : column);
                        if (enumerable.Count() > 1)
                            _logger.LogWarning($">> the table {cfg.Table} columns mapping invalid");

                        destinationColumns.AddRange(enumerable);
                    }

                dynamicData.job.content[0].writer.parameter.column = destinationColumns;

                var jsonString = JsonConvert.SerializeObject(dynamicData, Formatting.Indented);
                cfg.Job = jsonString;
                cfg.Record = records;

                configList.Add(cfg);
            }

            var targetList = new List<TablesConfiguration>();
            var tmpConfigurations = configList.OrderBy(x => x.Record).ToList();
            var index = 1;
            foreach (var tmpCfg in tmpConfigurations)
            {
                tmpCfg.Sort = index++;
                targetList.Add(tmpCfg);
            }
            targetList.Sort((a, b) =>
            {
                if (a.Sort > b.Sort) return 1;
                else if (a.Sort == b.Sort) return 1;
                else return -1;
            });

            var jobCheckList = $"{destinationJobDir}/jobs_check_list.csv";


            if (Directory.Exists(destinationJobDir))
            {
                Directory.Delete(destinationJobDir, true);
            }
            Directory.CreateDirectory(destinationJobDir);
            var streamWriter = new StreamWriter(jobCheckList, true, encoding: System.Text.Encoding.UTF8);
            var csvSerializer = new CsvSerializer(streamWriter);
            using (var csvWriter = new CsvWriter(csvSerializer))
            {
                csvWriter.Configuration.HasHeaderRecord = true;
                csvWriter.WriteHeader<JobCheckList>();
                csvWriter.NextRecord();
                csvWriter.WriteRecords(targetList.Select(x => new JobCheckList { TableName = x.Table, Records = x.Record }));
                csvWriter.Flush();
            }
            foreach (var cfg in targetList)
            {
                var tableName = cfg.Table;
                if (!string.IsNullOrEmpty(cfg.TableMapTo) || !string.IsNullOrWhiteSpace(cfg.TableMapTo))
                {
                    tableName = cfg.TableMapTo;
                }
                var jobId = $"JOB_{cfg.Sort.ToString().PadLeft(3, '0')}-{tableName}";
                File.WriteAllText($"{destinationJobDir}/{jobId}.JSON", cfg.Job);
            }


            _logger.LogInformation(">> All tasks done! The generator exit. ");
        }
    }
}