using System;
using System.Collections.Generic;
using System.Text;
using System.Xml;
using System.Collections;
using System.Data;
using MyClever.Lib;

namespace MyClever.DefaultPlugins
{
    public class MergeUniqueRows : MissionPlugin
    {
        [Package(Direction.Input, "Defines the data rows, which already exists.")]
        enum OriginData
        {
        }

        [Package(Direction.Input, "Defines the new rows to save, which could be a duplicate.")]
        enum NewData
        {
        }

        [Package(Direction.Output, "Defines the combinated data rows without duplicates.")]
        enum MergedData
        {
        }

        [Plugin(PluginDevStatus.Production, "Merges all new data rows, ignoring all duplicates.")]
        public MergeUniqueRows()
        {
        }

        public override void Initialize()
        {         
            //This is the first Function called by the host...
            //Put anything needed to start with here first
        }

        public override void Dispose()
        {
            //Put any cleanup code in here for when the program is stopped
        }

        /// <summary>
        /// Generate new Source-content.
        /// </summary>
        protected override void Work()
        {
            MissionPluginPackage outputPackage = this.GetPackageByName(typeof(MergedData));
            outputPackage.CopyDataFromPackage(this.GetPackageByName(typeof(OriginData)));
            
            MissionPluginPackage newDataPackage = this.GetPackageByName(typeof(NewData));

            outputPackage.Merge(newDataPackage, false, MissingSchemaAction.Add);
            this.RemoveDuplicates(outputPackage);

        }

        private void RemoveDuplicates(MissionPluginPackage package)
        {
            int rowID = 0;
            while (rowID < package.Rows.Count - 1)
            {
                List<DataRow> duplicateRows = this.FindDuplicateRows(package, rowID);
                foreach (DataRow duplicateRowOn in duplicateRows)
                {
                    package.Rows.Remove(duplicateRowOn);
                }

                if (duplicateRows.Count == 0)
                {
                    rowID++;
                }
            }
        }

        private List<DataRow> FindDuplicateRows(MissionPluginPackage package, int originRowIndex)
        {
            List<DataRow> duplicateRows = new List<DataRow>();

            // get the origin row, which could have duplicates
            DataRow originRow = package.Rows[originRowIndex];

            // loop through every following row
            for (int i = originRowIndex + 1; i < package.Rows.Count; i++)
            {
                // is the selected row a duplicate?
                if (this.IsDuplicate(originRow, package.Rows[i]))
                {
                    // ... we save it.
                    duplicateRows.Add(package.Rows[i]);
                }
            }
            return duplicateRows;
        }

        private bool IsDuplicate(DataRow originRow, DataRow rowToCheck)
        {
            bool returnValue = true;

            foreach (DataColumn columnOn in originRow.Table.Columns)
            {
                // is there any valid value? if not, we should not compare them!
                if (false == String.IsNullOrEmpty(originRow[columnOn].ToString()) || false == String.IsNullOrEmpty(rowToCheck[columnOn].ToString()))
                {
                    returnValue = returnValue && originRow[columnOn].Equals(rowToCheck[columnOn]);                    
                }                

                if (false == returnValue)
                {
                    break;
                }
            }
            return returnValue;
        }
    }
}
