﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Lucene.Net.Analysis;

namespace FisheryPlatform.Search.Core.Analysis
{
    /// <summary>
    /// MMSegmentFilter: Segment CN-Tokens using MMWordIdentifier.
    /// </summary>
    /// <author>gwd, 2006-5-26</author>
    public class MMSegmentFilter : TokenFilter
    {

        protected internal System.Collections.ArrayList _tokens = null;
        protected internal string _dictionaryName;
        protected MMAnalyzer _analyzer;
        public MMSegmentFilter(MMAnalyzer analyzer, TokenStream input, string dictionaryName)
            : base(input)
        {
            _analyzer = analyzer;
            _dictionaryName = dictionaryName;
        }
        /// <summary>
        /// 重载next()
        /// </summary>
        /// <returns></returns>
        public override Token Next()
        {
            if (_tokens == null) Segment();

            if (_tokens.Count == 0) return null;

            // fetch out the first token, then remove it.
            Token token = (Token)_tokens[0];
            _tokens.RemoveAt(0);
            return token;
        }

        /// <summary>
        /// Segment CN-Tokens using MMWordIdentifier.
        /// </summary>
        protected virtual void Segment()
        {
            _tokens = new System.Collections.ArrayList();      // create instance to store real Tokens.

            for (Token token = this.input.Next(); token != null; token = this.input.Next())
            {
                if (token.Type().Equals("CN"))     // do chinese segment
                {
                    Dictionary dic = _analyzer.DictionaryFactory.GetDictionary(_dictionaryName);      // load dictionary.
                    WordIdentifier identifier = new MMWordIdentifier(dic);
                    string[] words = identifier.IdentifyWord(token.TermText());

                    AddToken(token.StartOffset(), words);
                }
                else
                {
                    _tokens.Add(token);
                }
            }
        }

        protected virtual void AddToken(int startOffset, string[] words)
        {
            int wordLength;
            for (int j = 0; j < words.Length; j++)
            {
                wordLength = words[j].Length;
                _tokens.Add(new Token(words[j], startOffset, startOffset + wordLength - 1, "CN_WORD"));
                startOffset += wordLength;
            }
        }

    }
}
