﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.IO;
using ARD.Configuration;
using ARD.Common;
using SharpObjects.SearchEngine.Common.Indexing;
using SharpObjects.SearchEngine.Common.Search;
using Lucene.Net.Analysis.Tokenattributes;
using Lucene.Net.Analysis;
using Lucene.Net.Analysis.Standard;

namespace SharpObjects.SearchEngine.Indexing
{
    public class TermGroupExtractor
    {
        private enum eState
        {
            OutsideFragment,
            InsideFragment,
            End
        }

        private class TermAttributeInfo
        {
            private string term;
            private int start;
            private int end;
            private string termType;

            public TermAttributeInfo(TermAttribute termAttribute)
            {

                this.term = termAttribute.Term();

                if (termAttribute is TokenWrapper)
                {
                    TokenWrapper tokenWrapper = (TokenWrapper)termAttribute;

                    this.start = tokenWrapper.StartOffset();
                    this.end = tokenWrapper.EndOffset();
                    this.termType = tokenWrapper.Type();
                }
                else
                {
                    string[] parts = termAttribute.ToString().Split(',');
                    this.start = Convert.ToInt32(parts[1]);
                    this.end = Convert.ToInt32(parts[2]);
                    this.termType = parts[3].Substring(5);
                }
            }

            public string Term { get { return this.term; } }
            public int Start { get { return this.start; } }
            public int End { get { return this.end; } }
            public string TermType { get { return this.termType; } }
        }



        private static readonly char[] fragmentBoundaryCharacters;

        private eState state;
        private int maxWordCountPerFragment;
        private int characterOffset;
        private string text;
        private double thresholdFactor;

        static TermGroupExtractor()
        {
            int totalSize = Characters.Punctuation.Length + 4;

            fragmentBoundaryCharacters = new char[totalSize];
            int k = 0;

            for (; k < Characters.Punctuation.Length; k++)
            {
                fragmentBoundaryCharacters[k] = Characters.Punctuation[k];
            }

            fragmentBoundaryCharacters[k++] = Characters.CarriageReturn;
            fragmentBoundaryCharacters[k++] = Characters.FormFeed;
            fragmentBoundaryCharacters[k++] = Characters.LineFeed;
            fragmentBoundaryCharacters[k++] = Characters.VerticalTab;
        }

        public TermGroupExtractor(double thresholdFactor = 0.5, int maxWordCountPerFragment = 50)
        {
            this.thresholdFactor = thresholdFactor;
            this.maxWordCountPerFragment = maxWordCountPerFragment;
        }


        public List<List<TermVectorItem>> ProcessField(IndexField indexField, Dictionary<string, double> fullCharacterizer)
        {
            List<List<TermVectorItem>> groups = new List<List<TermVectorItem>>();

            this.text = indexField.Text;
            List<TermAttributeInfo> termAttributeInfos = new List<TermAttributeInfo>();

            using (MemoryStream stream = new MemoryStream())
            {
                using (StreamWriter streamWriter = new StreamWriter(stream))
                {
                    streamWriter.Write(this.text);
                    streamWriter.Flush();

                    stream.Seek(0, SeekOrigin.Begin);

                    using (StreamReader reader = new StreamReader(stream))
                    {
                        Lucene.Net.Analysis.Standard.StandardAnalyzer standardAnalyzer = new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_CURRENT);
                        TokenStream tokenStream = standardAnalyzer.TokenStream("dummy", reader);

                        TermAttribute termAttribute = (TermAttribute)tokenStream.AddAttribute(typeof(TermAttribute));

                        while (tokenStream.IncrementToken())
                        {
                            TermAttributeInfo termAttributeInfo = new TermAttributeInfo(termAttribute);
                            termAttributeInfos.Add(termAttributeInfo);
                        }
                    }
                }
            }

            this.state = eState.OutsideFragment;
            this.characterOffset = 0;
            int wordCount = 0;

            List<List<string>> fragments = new List<List<string>>();
            List<string> fragment = new List<string>();
            
            for (int k = 0; k < termAttributeInfos.Count; k++)
            {
                string word = termAttributeInfos[k].Term;
                fragment.Add(word);
                wordCount++;

                UpdateState(termAttributeInfos, k);

                if (this.state == eState.OutsideFragment || this.state == eState.End || wordCount > this.maxWordCountPerFragment)
                {
                    fragments.Add(fragment);
                    fragment = new List<string>();
                    wordCount = 0;
                }
            }

            // FILTER OUT NON INFORMATIVE TERMS FROM FRAGMENTS

            foreach (List<string> currentFragment in fragments)
            {
                List<TermVectorItem> fragmentTerms = new List<TermVectorItem>();
                double minScore = double.MaxValue;
                double maxScore = double.MinValue;

                foreach (string word in currentFragment)
                {
                    if(fullCharacterizer.ContainsKey(word))
                    {
                        TermVectorItem termVectorItem = new TermVectorItem() {Term = word, Score = fullCharacterizer[word]};
                        fragmentTerms.Add(termVectorItem);
                        minScore = Math.Min(minScore, termVectorItem.Score);
                        maxScore = Math.Max(maxScore, termVectorItem.Score);
                    }
                }

                double threshold = minScore + (maxScore - minScore) * this.thresholdFactor;

                List<TermVectorItem> group = fragmentTerms.Where(termVectorItem => termVectorItem.Score >= threshold).ToList();

                if(group.Count > 0)
                {
                    groups.Add(group);
                }
            }


            return groups;
        }

        private void UpdateState(List<TermAttributeInfo> termAttributeInfos, int k)
        {
            if (k >= (termAttributeInfos.Count - 1))
            {
                this.state = eState.End;
                return;
            }

            string gap = this.text.Substring(termAttributeInfos[k].End, termAttributeInfos[k + 1].Start - termAttributeInfos[k].End);

            if (gap.IndexOfAny(fragmentBoundaryCharacters) >= 0)
            {
                this.state = eState.OutsideFragment;
            }
            else
            {
                this.state = eState.InsideFragment;
            }
        }


    }
}
