﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using TextMining;

namespace HierarchicalClustering.Metrics
{
    public class Euclidean : IMetric   // Euclidean distance
    {
        private int _numOfDocuments;   // number of documents in dataset

        public Euclidean(int numOfDocuments)
        {
            _numOfDocuments = numOfDocuments;
        }

        #region IMetric Members

        public double Distance(Document documentA, Document documentB)
        {
            IEnumerator<KeyValuePair<string, Pair>> enumA = documentA.Words.GetEnumerator();
            IEnumerator<KeyValuePair<string, Pair>> enumB = documentB.Words.GetEnumerator();
            bool flagA = true, flagB = true;
            double sum = 0, temp, temp2;

            enumA.MoveNext();
            enumB.MoveNext();
            if (enumA.Current.Value == null)   // no words found in the 1st document
                flagA = false;
            if (enumB.Current.Value == null)   // no words found in the 2nd document
                flagB = false;
            while (flagA != false && flagB != false)   // until both documents have no more words
            {
                if (flagA == true && enumA.Current.Value.Key.IsImportant == false)
                {   // not important word found in the 1st document
                    flagA = enumA.MoveNext();
                    continue;
                }
                if (flagB == true && enumB.Current.Value.Key.IsImportant == false)
                {   // not important word found in the 2nd document
                    flagB = enumB.MoveNext();
                    continue;
                }
                if (flagA == false || enumA.Current.Key.CompareTo(enumB.Current.Key) < 0)
                {   // the 1st document has no more words or 1st document's word is smaller than 2nd document's
                    temp = enumA.Current.Value.Key.TfIdfValue(enumA.Current.Value.Value, documentA.WordCount, _numOfDocuments);
                    sum += temp * temp;
                    flagA = enumA.MoveNext();
                }
                else if (flagB == false || enumA.Current.Key.CompareTo(enumB.Current.Key) > 0)
                {   // the 2nd document has no more words or the 2nd document's word is smaller than 1st document's
                    temp = enumB.Current.Value.Key.TfIdfValue(enumB.Current.Value.Value, documentB.WordCount, _numOfDocuments);
                    sum += temp * temp;
                    flagB = enumB.MoveNext();
                }
                else   // the same word is found in both documents
                {
                    temp = enumA.Current.Value.Key.TfIdfValue(enumA.Current.Value.Value, documentA.WordCount, _numOfDocuments);
                    temp2 = enumB.Current.Value.Key.TfIdfValue(enumB.Current.Value.Value, documentB.WordCount, _numOfDocuments);
                    sum += (temp - temp2) * (temp - temp2);
                    flagA = enumA.MoveNext();
                    flagB = enumB.MoveNext();
                }
            }

            return Math.Sqrt(sum);   // euclidean distance
        }

        #endregion
    }
}
