﻿using System;
using System.Collections.Generic;
using System.Text;

using Lucene.Net;
using Lucene.Net.Analysis;
using Lucene.Net.Documents;
using Lucene.Net.Index;
using Lucene.Net.Search;
using Lucene.Net.QueryParsers;
using Lucene.Net.Store;

using PanGu;
using Lucene.Net.Analysis.PanGu;
using CMN.Entity;
using CMN.Common;

namespace CMN.Common
{
    public   class LuceneHelper
    {
        private static  object obj = new object();
         public  static  IndexReader IReader
        {
            get
            {
                lock (obj)
                {
                    if (reader == null)
                    {
                        Directory dir = FSDirectory.Open(m_IndexPath);
                        Utils.CreateDir(m_IndexPath);
                        reader = IndexReader.Open(dir, true);

                    }
                }
                return reader;
            }
            set
            {
                reader = value;
            }
         }
        private static  IndexReader reader = null;
        /// <summary>
        /// 分词器
        /// </summary>
        //private Analyzer m_Analyzer = new Lucene.Net.Analysis.Standard.StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30);
        private Analyzer m_Analyzer = new PanGuAnalyzer();
        public static PanGuTokenizer KtTokenizer = new PanGuTokenizer();
        /// <summary>
        /// 索引路径
        /// </summary>
        private static string m_IndexPath = Utils.GetServerPath() + System.Configuration.ConfigurationManager.AppSettings["lucenePath"].ToString();


        #region 索引查询

        /// <summary>
        /// 索引查询
        /// </summary>
        /// <param name="strSeach">查询的关键词为分词</param>
        /// <param name="channelId">栏目Id</param>
        /// <returns></returns>
        public List<Module_ListInfo> Search(QueryParam qp, string channelPath)
        {

            return Search(qp, channelPath, 100,2);

        }
        /// <summary>
        /// 索引查询
        /// </summary>
        /// <param name="qp">查询条件</param>
        /// <param name="channelPath">栏目路径</param>
        /// <param name="isSearchTitle">是否只搜索标题</param>
        /// <returns></returns>
        public List<Module_ListInfo> Search(QueryParam qp, string channelPath, bool isSearchTitle)
        {
            if (isSearchTitle)

                return Search(qp, channelPath, 120, 2);
            else

                return Search(qp, channelPath, 120, 1);

        }

        /// <summary>
        /// 搜索自定义列
        /// </summary>
        /// <param name="searchContent"></param>
        /// <param name="fieldName"></param>
        /// <returns></returns>
        public List<Normal_SearchContentEntity> Search(QueryParam qp,  int searchCount)
        {
            List<Normal_SearchContentEntity> lstAtc = new List<Normal_SearchContentEntity>();
            if (string.IsNullOrEmpty(qp.Where))
            {
                throw new Exception("索引查询除失败,参数strSeach为空", null);

            }
            IndexReader newReader = IReader.Reopen();
            if (newReader != reader)
            {
                IReader.Dispose();
                IReader = newReader;
            }
            IndexSearcher searcher = new IndexSearcher(IReader);
            try
            {


                //构造布尔查询，用于拼装各查询的逻辑
                BooleanQuery query = new BooleanQuery();
                string strSeach = QueryParser.Escape(qp.Where);
                strSeach = GetKeyWordsSplitBySpace(strSeach, KtTokenizer);

                if (string.IsNullOrEmpty(strSeach))
                {
                    return lstAtc;
                }


                query.Add(new MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_30, new string[] { "SearchContent" }, m_Analyzer).Parse(strSeach), Lucene.Net.Search.Occur.MUST);


                TopDocs hits = searcher.Search(query, searchCount);

                // lbTime.Text = lbTime.Text + ";;共" + hits.Length().ToString() + "::" + DateTime.Now.Second.ToString() + "--" + DateTime.Now.Millisecond.ToString();
                qp.Count = (hits.TotalHits > 100 ? 100 : hits.TotalHits);
                int startNum = (qp.PageIndex - 1) * qp.PageSize;
                int endNum = (qp.PageIndex - 1) * qp.PageSize + qp.PageSize;
                if (endNum > qp.Count)
                    endNum = qp.Count;
                if (startNum > -1 && endNum <= qp.Count)
                {

                    for (int i = startNum; i < endNum; i++)
                    {

                        ScoreDoc scoredoc = hits.ScoreDocs[i];
                        Document doc = searcher.Doc(scoredoc.Doc);
                        Normal_SearchContentEntity searchContent = new Normal_SearchContentEntity();
                        if (doc.Get("SearchType") != null)
                            searchContent.SearchType=doc.Get("SearchType");
                        if (doc.Get("SearchContent") != null)
                            searchContent.SearchContent= doc.Get("SearchContent");
                        lstAtc.Add(searchContent);
                    }
                }
            }

            catch (Exception ex)
            {
                throw new Exception("索引查询失败", ex);

            }
            finally
            {
                searcher.Dispose();
            }
            return lstAtc;

        }
        // <summary>
        /// 索引查询
        /// </summary>
       
        /// <param name="qp">查询参数</param>
        /// <param name="channelPath">栏目地址</param>
        /// <param name="searchCount">查询返回数量</param>
        /// <param name="SearchType">查询类别，1 所有，2标题，3tag</param>
        /// <returns></returns>
        public List<Module_ListInfo> Search(QueryParam qp, string channelPath, Int32 searchCount, int  searchType)
        {

            List<Module_ListInfo> lstAtc = new List<Module_ListInfo>();
            if (string.IsNullOrEmpty(qp.Where))
            {
                throw new Exception("索引查询除失败,参数strSeach为空", null);

                return lstAtc;
            }
            IndexReader newReader = IReader.Reopen();
            if (newReader != reader)
            {
                IReader.Dispose();
                IReader = newReader;
            }
            IndexSearcher searcher = new IndexSearcher(IReader);
            try
            {


                //构造布尔查询，用于拼装各查询的逻辑
                BooleanQuery query = new BooleanQuery();
                string strSeach = QueryParser.Escape(qp.Where);
               
                 strSeach = GetKeyWordsSplitBySpace(strSeach, KtTokenizer);
                 //AppLog.Write("channelPath" + channelPath+"--"+strSeach,AppLog.LogMessageType.Debug);
                if (string.IsNullOrEmpty(strSeach))
                {
                    return lstAtc;
                }
                if (!string.IsNullOrEmpty(channelPath))
                    query.Add(new WildcardQuery(new Term("Path", string.Concat(channelPath, "*"))), Lucene.Net.Search.Occur.MUST);
                    //query.Add(new PrefixQuery(new Term("Path", channelPath)), Lucene.Net.Search.Occur.MUST);
                //query.Add(new QueryParser(Lucene.Net.Util.Version.LUCENE_30, "ChannelId", m_Analyzer).Parse(channelPath), Lucene.Net.Search.Occur.MUST);
                //词条可同时匹配多个字段，根据各字段的索引权重决定匹配的优先
                //lbTime.Text = DateTime.Now.Second.ToString() + "--" + DateTime.Now.Millisecond.ToString();
                if (searchType==1)
                    query.Add(new MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_30, new string[] { "Title", "TextContent", "Tag" }, m_Analyzer).Parse(strSeach), Lucene.Net.Search.Occur.MUST);
                else if(searchType==2)
                    query.Add(new MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_30, new string[] { "Title" ,"Tag"}, m_Analyzer).Parse(strSeach), Lucene.Net.Search.Occur.SHOULD);
                else if (searchType == 3)
                    query.Add(new MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_30, new string[] { "Tag" }, m_Analyzer).Parse(qp.Where), Lucene.Net.Search.Occur.MUST);
                
                //Query query = new MultiFieldQueryParser(new string[] { "ID", "NameCN", "ReginCode", "Address", "Grade", "YiBao" }, new SimpleAnalyzer()).Parse(parserString);

                //使用构建的布尔索引，可通过query.ToString()打印出查询表达式来分析
                //排序
                //Sort sort = new Sort(new SortField[] { new SortField("ID", SortField.INT, false) });

                #region 搜索
                //                7.1.1 TermQuery
                //首先介绍最基本的查询，如果你想执行一个这样的查询：“在content域中包含‘lucene’的document”，那么你可以用TermQuery：

                //Term t = new Term("content", " lucene";
                //Query query = new TermQuery(t);

                //7.1.2 BooleanQuery
                //如果你想这么查询：“在content域中包含java或perl的document”，那么你可以建立两个TermQuery并把它们用BooleanQuery连接起来：

                //TermQuery termQuery1 = new TermQuery(new Term("content", "java");
                //TermQuery termQuery 2 = new TermQuery(new Term("content", "perl");
                //BooleanQuery booleanQuery = new BooleanQuery();
                //booleanQuery.add(termQuery 1, BooleanClause.Occur.SHOULD);
                //booleanQuery.add(termQuery 2, BooleanClause.Occur.SHOULD);

                //7.1.3 WildcardQuery
                //如果你想对某单词进行通配符查询，你可以用WildcardQuery，通配符包括’?’匹配一个任意字符和’*’匹配零个或多个任意字符，例如你搜索’use*’，你可能找到’useful’或者’useless’：

                //Query query = new WildcardQuery(new Term("content", "use*");

                //7.1.4 PhraseQuery
                //你可能对中日关系比较感兴趣，想查找‘中’和‘日’挨得比较近（5个字的距离内）的文章，超过这个距离的不予考虑，你可以：

                //PhraseQuery query = new PhraseQuery();
                //query.setSlop(5);
                //query.add(new Term("content ", “中”));
                //query.add(new Term(“content”, “日”));

                //那么它可能搜到“中日合作……”、“中方和日方……”，但是搜不到“中国某高层领导说日本欠扁”。

                //7.1.5 PrefixQuery
                //如果你想搜以‘中’开头的词语，你可以用PrefixQuery：

                //PrefixQuery query = new PrefixQuery(new Term("content ", "中");

                //7.1.6 FuzzyQuery
                //FuzzyQuery用来搜索相似的term，使用Levenshtein算法。假设你想搜索跟‘wuzza’相似的词语，你可以：

                //Query query = new FuzzyQuery(new Term("content", "wuzza");

                //你可能得到‘fuzzy’和‘wuzzy’。

                //7.1.7 RangeQuery
                //另一个常用的Query是RangeQuery，你也许想搜索时间域从20060101到20060130之间的document，你可以用RangeQuery：

                //RangeQuery query = new RangeQuery(new Term(“time”, “20060101”), new Term(“time”, “20060130”), true);

                //最后的true表示用闭合区间。

                //7.2 QueryParser
                //看了这么多Query，你可能会问：“不会让我自己组合各种Query吧，太麻烦了！”当然不会，lucene提供了一种类似于SQL语句的查询语句，我们姑且叫它lucene语句，通过它，你可以把各种查询一句话搞定，lucene会自动把它们查分成小块交给相应Query执行。下面我们对应每种 Query演示一下：
                //TermQuery可以用“field:key”方式，例如“content:lucene”。
                //BooleanQuery中‘与’用‘+’，‘或’用‘ ’，例如“content:java contenterl”。
                //WildcardQuery仍然用‘?’和‘*’，例如“content:use*”。
                //PhraseQuery用‘~’，例如“content:"中日"~5”。
                //PrefixQuery用‘*’，例如“中*”。
                //FuzzyQuery用‘~’，例如“content: wuzza ~”。
                //RangeQuery用‘[]’或‘{}’，前者表示闭区间，后者表示开区间，例如“time:[20060101 TO 20060130]”，注意TO区分大小写。
                //你可以任意组合query string，完成复杂操作，例如“标题或正文包括lucene，并且时间在20060101到20060130之间的文章” 可以表示为：“+ (title:lucene content:lucene) +time:[20060101 TO 20060130]”。代码如下：
                //Hits hits = searcher.Search(query, sort);
                #endregion

                TopDocs hits = searcher.Search(query, searchCount);

                // lbTime.Text = lbTime.Text + ";;共" + hits.Length().ToString() + "::" + DateTime.Now.Second.ToString() + "--" + DateTime.Now.Millisecond.ToString();
                qp.Count = (hits.TotalHits > searchCount ? searchCount : hits.TotalHits);
                int startNum = (qp.PageIndex - 1) * qp.PageSize;
                int endNum = (qp.PageIndex - 1) * qp.PageSize + qp.PageSize;
                if (endNum > qp.Count)
                    endNum = qp.Count;
                if (startNum > -1 && endNum <= qp.Count)
                {

                    for (int i = startNum; i < endNum; i++)
                    {

                        ScoreDoc scoredoc = hits.ScoreDocs[i];
                        Document doc = searcher.Doc(scoredoc.Doc);
                        Module_ListInfo atl = new Module_ListInfo();
                        //if (doc.GetField("Id") != null)
                        atl.Id = Utils.StringToInt(doc.Get("Id"), atl.Id);
                        //if (doc.GetField("Title") != null)
                        atl.Title = doc.Get("Title");
                        //atl.ClassId = Utils.StringToInt(doc.GetField("ClassId").StringValue(), atl.ClassId);
                        if (doc.Get("Path") != null)
                            atl.Path = doc.Get("Path");
                        if (doc.Get("ChannelId") != null)
                            atl.ChannelId = Utils.StringToInt(doc.Get("ChannelId"), atl.ChannelId);
                        if (doc.Get("OrderNum") != null)
                            atl.OrderNum = Utils.StringToInt(doc.Get("OrderNum"), atl.OrderNum);
                        if (doc.Get("Summary") != null)
                            atl.Summary = doc.Get("Summary");
                          if (doc.Get("Img") != null)
                              atl.Img = doc.Get("Img");
                          if (doc.Get("Tag") != null)
                              atl.Tags = doc.Get("Tag");
                       
                        lstAtc.Add(atl);
                    }
                }
            }

            catch (Exception ex)
            {
                throw new Exception("索引查询失败", ex);

            }
            finally
            {

                searcher.Dispose();
            }
            return lstAtc;

        }

        #endregion 

        #region 删除索引
        /// <summary>
        /// 删除索引
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        public  bool DeleteDocuments(string id)
        {
            if (string.IsNullOrEmpty(id))
            {
                throw new Exception("索引查询删除失败,参数Id为空", null);
                //EventMessage.MessageBox(1, "索引查询", "索引查询删除失败,参数Id为空", Icon_Type.Alert, "", "<script>history.go(-1);</script>");
                return false;
            }

            Directory dir = FSDirectory.Open(m_IndexPath);
            IndexReader indexreader = IndexReader.Open(dir, false);

            //为文档标识删除
            try
            {
                indexreader.DeleteDocuments(new Term("Id", id));

            }
            catch (Exception ex)
            {
                throw new Exception("索引查询删除失败", ex);
                return false;
            }
            finally
            {

                //撤消删除
                //indexreader.UndeleteAll();

                //关闭并执行删除
                indexreader.Dispose();
                dir.Dispose();
            }
            return true;
        }
        #endregion

        #region 新增索引

        /// <summary>
        /// 新增单列索引
        /// </summary>
        /// <param name="articlEntityList"></param>
        /// <returns></returns>
        public bool CreateIndex(List<Normal_SearchContentEntity> ContentList)
        {
            if (ContentList.Count < 1)
            {
                throw new Exception("索引新增失败,articlEntityList为空", null);
                //EventMessage.MessageBox(1, "索引查询", "索引新增失败,参数articlEntity.TextContent为空", Icon_Type.Alert, "", "<script>history.go(-1);</script>");
                return false;
            }
            Directory dir = FSDirectory.Open(m_IndexPath);
            IndexWriter writer = new IndexWriter(dir, m_Analyzer, IndexWriter.MaxFieldLength.UNLIMITED);//UNLIMITED 无限长的

            try
            {
                foreach (Normal_SearchContentEntity conent in ContentList)
                {
                    Document doc = new Document();

                    doc.Add(new Field("SearchContent", conent.SearchContent, Field.Store.YES, Field.Index.ANALYZED));
                    doc.Add(new Field("SearchType", conent.SearchType, Field.Store.YES, Field.Index.NOT_ANALYZED));
                    writer.AddDocument(doc);
                }

            }
            catch (Exception ex)
            {
                throw new Exception("索引新增失败," + ex.Message, null);
                return false;
            }
            finally
            {

                writer.Dispose();
                dir.Dispose();
            }
            return true;
        }
       

        /// <summary>
        /// 新建索引
        /// </summary>
        /// <param name="articlEntity"></param>
        /// <returns></returns>
        public bool CreateIndex(Module_ArticleEntity articlEntity)
        {
            List<Module_ArticleEntity> list = new List<Module_ArticleEntity>();
            list.Add(articlEntity);
          return   CreateIndex(list);
        }

     

        /// <summary>
        /// 新增索引
        /// </summary>
        /// <param name="articlEntityList"></param>
        /// <returns></returns>
        public  bool CreateIndex(List< Module_ArticleEntity> articlEntityList)
        {
            if (articlEntityList.Count<1)
            {
                throw new Exception("索引新增失败,articlEntityList为空", null);
                //EventMessage.MessageBox(1, "索引查询", "索引新增失败,参数articlEntity.TextContent为空", Icon_Type.Alert, "", "<script>history.go(-1);</script>");
                return false;
            }
            Directory dir = FSDirectory.Open(m_IndexPath);
            IndexWriter writer = new IndexWriter(dir, m_Analyzer, IndexWriter.MaxFieldLength.UNLIMITED);//UNLIMITED 无限长的
            try
            {
                foreach (Module_ArticleEntity articlEntity in articlEntityList)
                {
                    
                    Document doc = new Document();
                    doc.Add(new Field("Id", articlEntity.Id.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED));
                    doc.Add(new Field("Title", articlEntity.Title.ToString(), Field.Store.YES, Field.Index.ANALYZED));
                    doc.Add(new Field("Img", articlEntity.Img, Field.Store.YES, Field.Index.NOT_ANALYZED));
                    doc.Add(new Field("Tag", articlEntity.Tags, Field.Store.YES, Field.Index.ANALYZED));
                    doc.Add(new Field("Summary", articlEntity.Summary.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED));
                    doc.Add(new Field("ChannelId", articlEntity.ChannelId.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED));
                    doc.Add(new Field("Path", articlEntity.Path, Field.Store.YES, Field.Index.NOT_ANALYZED));
                    doc.Add(new Field("TextContent", articlEntity.TextContent, Field.Store.NO, Field.Index.ANALYZED));
                    doc.Add(new Field("OrderNum", articlEntity.OrderNum.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED));

                    //设置名字域权重
                    doc.GetField("Title").Boost = 3.5F;
                    doc.GetField("Tag").Boost = 5.0F;
                    writer.AddDocument(doc);
                }
               
            }
            catch (Exception ex)
            {
                throw new Exception("索引新增失败,"+ex.Message, null);

                return false;
            }
            finally
            {
               
                writer.Dispose();
                dir.Dispose();
            }
            return true;
        }
        #endregion 

        #region 更新索引
        /// <summary>
        /// 更新索引
        /// </summary>
        /// <param name="articleEntity"></param>
        /// <returns></returns>
        public bool UpdateIndex(Module_ArticleEntity articleEntity)
        {
            Directory dir = FSDirectory.Open(m_IndexPath);
            IndexWriter writer = new IndexWriter(dir, m_Analyzer, false, IndexWriter.MaxFieldLength.UNLIMITED);//UNLIMITED 无限长的
            try
            {
                Document doc = new Document();

                doc.Add(new Field("Id", articleEntity.Id.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED));
                doc.Add(new Field("Title", articleEntity.Title.ToString(), Field.Store.YES, Field.Index.ANALYZED));
                doc.Add(new Field("Img", articleEntity.Img, Field.Store.YES, Field.Index.NOT_ANALYZED));
                doc.Add(new Field("Tag", articleEntity.Tags, Field.Store.YES, Field.Index.ANALYZED));
                doc.Add(new Field("Summary", articleEntity.Summary.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED));
                doc.Add(new Field("ChannelId", articleEntity.ChannelId.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED));
                doc.Add(new Field("Path", articleEntity.Path, Field.Store.YES, Field.Index.NOT_ANALYZED));
                doc.Add(new Field("TextContent", articleEntity.TextContent, Field.Store.NO, Field.Index.ANALYZED));
                doc.Add(new Field("OrderNum", articleEntity.OrderNum.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED));

                //设置名字域权重
                doc.GetField("Title").Boost = 3.5F;
                doc.GetField("Tag").Boost = 5.0F;
                writer.UpdateDocument(new Term("Id", articleEntity.Id.ToString()), doc);

                return true;
            }
            catch (Exception ex)
            {
                AppLog.Write("更新索引失败" + articleEntity.Id.ToString(), AppLog.LogMessageType.Error, ex);
            }
            finally
            {
                writer.Dispose();
                dir.Dispose();
            }

            return false;
        }
        #endregion 

        /// <summary>
        /// 优化索引
        /// </summary>
        /// <returns></returns>
        public bool Optimize()
        {
            Directory dir = FSDirectory.Open(m_IndexPath);
            IndexWriter writer = new IndexWriter(dir, m_Analyzer, IndexWriter.MaxFieldLength.UNLIMITED);//UNLIMITED 无限长的
            try
            {
                writer.Optimize();
                
            }
            catch (Exception ex)
            {
                throw new Exception("索引优化失败," + ex.Message, null);

                return false;
            }
            finally
            {
                
                writer.Dispose();
                dir.Dispose();
            }
            return true;
        }


        /// <summary>
        /// 删除全部索引
        /// </summary>
        /// <returns></returns>
        public bool DeleteAll()
        {
            Directory dir = FSDirectory.Open(m_IndexPath);
            IndexWriter writer = new IndexWriter(dir, m_Analyzer, IndexWriter.MaxFieldLength.UNLIMITED);//UNLIMITED 无限长的
            try
            {
                writer.DeleteAll();

            }
            catch (Exception ex)
            {
                throw new Exception("索引删除失败," + ex.Message, null);

                return false;
            }
            finally
            {
                
                writer.Dispose();
                dir.Dispose();
            }
            return true;
          
        }
        /// <summary>
        /// 盘古分词
        /// </summary>
        /// <param name="keywords"></param>
        /// <param name="ktTokenizer"></param>
        /// <returns></returns>
        public  string GetKeyWordsSplitBySpace(string keywords, PanGuTokenizer ktTokenizer)
        {
            StringBuilder result = new StringBuilder();
            ICollection<WordInfo> words = ktTokenizer.SegmentToWordInfos(keywords);
            foreach (WordInfo word in words)
            {
                if (word == null)
                {
                    continue;
                }

                if ((word.Word.Length > 1 || word.Pos == POS.POS_D_N || word.Pos == POS.POS_UNK) && word.Pos != POS.POS_A_Q && word.Pos != POS.POS_D_C && word.Pos != POS.POS_D_U && word.Pos != POS.POS_D_P)
                //result.AppendFormat("{0}^{1}.0 ", word.Word, (int)Math.Pow(3, word.Rank));
                    result.AppendFormat("{0} ", word.Word);
            }
            return result.ToString().Trim();
           
        }
        /// <summary>
        /// 获取分词结果
        /// </summary>
        /// <param name="keywords"></param>
        /// <returns>字符窜数组</returns>
        public  List<string> GetKeyWordsList(string keyword)
        {
            List<string> keywords = new List<string>();

            ICollection<WordInfo> words = KtTokenizer.SegmentToWordInfos(keyword);
            
            foreach (WordInfo word in words)
            {
                if (word == null)
                {
                    continue;
                }
                //(word.Pos==POS.POS_D_N ||  word.Word.Length > 1) &&
                if (word.Pos != POS.POS_A_Q && word.Pos != POS.POS_UNK && word.Pos != POS.POS_D_C && word.Pos != POS.POS_D_D && word.Pos != POS.POS_D_U && word.Pos != POS.POS_D_R && word.Pos != POS.POS_D_P )
                    keywords.Add( word.Word);
            }
            return keywords;
        }
    }
}
