﻿#region
/* ===============================================
 * 作者：guopeng           时间：2012/2/26 22:25:25
 * 
 * 
 * 类描述：
 * 
 * 
 * =============================================== */
#endregion

using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Lucene.Net.Index;
using Lucene.Net.Documents;
using Lucene.Net.Analysis;
using PanGu;
using Lucene.Net.Search;
using Lucene.Net.QueryParsers;
using System.Threading;

namespace Sexybeauty.Core.ExServer.FullSearch
{
    public abstract class Index<T> where T:class,new()
    {
        public Index(int maxMergeFactor, int minMergeDocs)
        {
            this.MaxMergeFactor = maxMergeFactor;
            this.MinMergeDocs = minMergeDocs;
        }
        internal String INDEX_DIR
        {
            get
            {
                return PanGu.Framework.Path.GetAssemblyPath() + @"Indexs";
            }
        }

        internal IndexWriter writer = null;

        internal int MaxMergeFactor
        {
            get
            {
                if (writer != null)
                {
                    return writer.GetMergeFactor();
                }
                else
                {
                    return 0;
                }
            }

            set
            {
                if (writer != null)
                {
                    writer.SetMergeFactor(value);
                }
            }
        }

        internal int MaxMergeDocs
        {
            get
            {
                if (writer != null)
                {
                    return writer.GetMaxMergeDocs();
                }
                else
                {
                    return 0;
                }
            }

            set
            {
                if (writer != null)
                {
                    writer.SetMaxMergeDocs(value);
                }
            }
        }

        internal int MinMergeDocs
        {
            get
            {
                if (writer != null)
                {
                    return writer.GetMaxBufferedDocs();
                }
                else
                {
                    return 0;
                }
            }

            set
            {
                if (writer != null)
                {
                    writer.SetMaxBufferedDocs(value);
                }
            }
        }

        internal void CreateIndex(String indexDir)
        {

            try
            {
                writer = new IndexWriter(indexDir, new PanGuAnalyzer(), false);
            }
            catch
            {
                writer = new IndexWriter(indexDir, new PanGuAnalyzer(), true);
            }

            //writer.Optimize();
            //writer.Close();
        }

        internal void Rebuild(String indexDir)
        {
            writer = new IndexWriter(indexDir, new PanGuAnalyzer(), true);
            writer.Optimize();
            writer.Close();
        }

        internal abstract void IndexString(T article);// string url, string title, DateTime time, string content)

        internal void CloseWithNoOptimize()
        {
            writer.Close();
        }

        internal void Close()
        {
            writer.Optimize();
            writer.Close();
        }

        internal List<string> SplitKeyWords(string keywords, Analyzer analyzer)
        {
            System.IO.StreamReader reader = new System.IO.StreamReader(PanGu.Framework.Stream.WriteStringToStream(keywords,
                Encoding.UTF8), Encoding.UTF8);

            TokenStream tokenStream = analyzer.TokenStream("", reader);

            global::Lucene.Net.Analysis.Token token = tokenStream.Next();

            List<string> result = new List<string>();

            while (token != null)
            {
                result.Add(keywords.Substring(token.StartOffset(), token.EndOffset() - token.StartOffset()));
                token = tokenStream.Next();
            }

            return result;

        }

        internal string GetKeyWordsSplitBySpace(string keywords, PanGuTokenizer ktTokenizer)
        {
            StringBuilder result = new StringBuilder();

            ICollection<WordInfo> words = ktTokenizer.SegmentToWordInfos(keywords);

            foreach (WordInfo word in words)
            {
                if (word == null)
                {
                    continue;
                }

                result.AppendFormat("{0}^{1}.0 ", word.Word, (int)Math.Pow(3, word.Rank));
            }

            return result.ToString().Trim();
        }

        public abstract List<T> Search(string key, int pageLen, int pageNo, out int recCount);
    }
}
