﻿using Publicuse.Entity;
using Publicuse.Entity.AttributeUtil;
using Lucene.Net.Analysis;
using Lucene.Net.Analysis.PanGu;
using Lucene.Net.Analysis.Standard;
using Lucene.Net.Documents;
using Lucene.Net.Index;
using Lucene.Net.QueryParsers;
using Lucene.Net.Search;
using Lucene.Net.Store;
using Microsoft.Office.Interop.Word;
using Org.BouncyCastle.Bcpg.OpenPgp;
using System;
using System.Collections.Generic;
using System.Data;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Document = Lucene.Net.Documents.Document;
using Field = Lucene.Net.Documents.Field;
using Task = System.Threading.Tasks.Task;

namespace Publicuse.Util.Lucence
{
    public class BuildUtil
    {
        /// <summary>
        /// 根据名称获取列表信息
        /// </summary>
        /// <typeparam name="T"></typeparam>
        /// <param name="sname"></param>
        /// <param name="scode"></param>
        /// <param name="path"></param>
        /// <returns></returns>
        public static async Task<CommonResult> GetList<T>(string sname,string scode,string path) where T : new()
        {
           return  await Task.Factory.StartNew(() =>
            {
                CommonResult result = new CommonResult();
                if (!FileUtil.ExistsDir(path))
                {
                    result.msg = "索引不存在！";
                    return result;
                }
                List<T> list = new List<T>();
                FSDirectory dir = FSDirectory.Open(path);
                IndexSearcher searcher = new IndexSearcher(dir);
                {
                    #region TermQuery
                    //首先介绍最基本的查询，如果你想执行一个这样的查询：
                    //“在content域中包含‘lucene’的document．rdquo;，那么你可以用TermQuery：
                    //TermQuery query = new TermQuery(new Term(filename, name));
                    #endregion

                    #region BooleanQuery
                    //如果你想这么查询：“在content域中包含java或perl的document．rdquo;，
                    //那么你可以建立两个TermQuery并把它们用BooleanQuery连接起来：
                    //TermQuery termQuerya = new TermQuery(new Term("sName", sname));
                    //TermQuery termQueryb = new TermQuery(new Term("sCode",scode));
                    //BooleanQuery booleanQuery = new BooleanQuery();
                    //booleanQuery.Add(termQuerya, Occur.SHOULD);
                    //booleanQuery.Add(termQueryb, Occur.SHOULD);
                    #endregion

                    #region WildcardQuery
                    //如果你想对某单词进行通配符查询，你可以用WildcardQuery，通配符包括’?’
                    //匹配一个任意字符和’*’匹配零个或多个任意字符
                    //，例如你搜索’use*’，你可能找到’useful’或者’useless’：
                    Query query = new WildcardQuery(new Term("sName", $"*{sname}*"));
                    #endregion

                    #region PhraseQuery
                    //你可能对中日关系比较感兴趣，想查找‘中’和‘日’
                    //挨得比较近（5个字的距离内）的文章，超过这个距离的不予考虑，你可以：
                    //PhraseQuery query = new PhraseQuery();
                    //query.setSlop(5);
                    //query.add(new Term("content ", "中"));
                    //query.add(new Term(“content”,"日"));
                    #endregion

                    #region  PrefixQuery
                    //如果你想搜以‘中’开头的词语，你可以用PrefixQuery：
                    //PrefixQuery query = new PrefixQuery(new Term("content ", "中");
                    #endregion

                    #region FuzzyQuery
                    //FuzzyQuery用来搜索相似的term，使用Levenshtein算法。假设你想搜索跟‘wuzza’相似的词语，你可以：
                    //Query query = new FuzzyQuery(new Term("sCode", scode));
                    #endregion

                    //QueryParser parser = new QueryParser(Lucene.Net.Util.Version.LUCENE_30, filename, new PanGuAnalyzer());//解析器
                    //Query query = parser.Parse(name);
                    TopDocs docs = searcher.Search(query, null, 10000);
                    Type type = typeof(T);
                    foreach (ScoreDoc sd in docs.ScoreDocs)
                    {
                        Document doc = searcher.Doc(sd.Doc);
                        T entity = new T();
                        foreach (var pi in type.GetPropertiesWithNoQuery())
                        {
                            pi.SetValue(entity, doc.Get(pi.GetMappingName()).GetValue(pi));
                        }
                        list.Add(entity);
                    }
                }
                result.count = list.Count;
                result.data = list;
                return result;
            });
        }

        /// <summary>
        /// 初始化索引编辑
        /// </summary>
        /// <param name="IndexPath"></param>
        /// <param name="msg"></param>
        /// <param name="actionType"></param>
        /// <param name="analyzer"></param>
        /// <param name="action"></param>
        private static void InitIndexWrite(string IndexPath, out string msg, BusinessActionType actionType, Analyzer analyzer,Action<IndexWriter> action)
        {
            msg = string.Empty;
            try
            {
                DirectoryInfo dirInfo = System.IO.Directory.CreateDirectory(IndexPath);
                FSDirectory directory = FSDirectory.Open(dirInfo);
                bool isCreate = dirInfo.GetFiles().Count() == 0;//下面没有文件则为新建索引 
                using (IndexWriter writer = new IndexWriter(directory, analyzer, isCreate, IndexWriter.MaxFieldLength.LIMITED))
                {
                    writer.SetMaxBufferedDocs(100);//控制写入一个新的segent前内存中保存的doc的数量 默认10  
                    writer.MergeFactor = 100;//控制多个segment合并的频率，默认10
                    writer.UseCompoundFile = true;//创建复合文件 减少索引文件数量
                    action.Invoke(writer);
                }
            }
            catch (Exception ex)
            {
                if(actionType == BusinessActionType.add)
                {
                    //LogUtil.GetInstance.WriteLogOld("新增索引异常", ex.Message + "|" + ex.StackTrace, true);
                }
                else if(actionType== BusinessActionType.update)
                {
                    //LogUtil.GetInstance.WriteLogOld("修改索引异常", ex.Message + "|" + ex.StackTrace, true);
                }
                msg = ex.Message;
            }        
        }

        /// <summary>
        /// 新增索引数据
        /// </summary>
        /// <typeparam name="T"></typeparam>
        /// <param name="IndexPath"></param>
        /// <param name="msg"></param>
        /// <param name="data"></param>
        public static void AddDataToIndex<T>(string IndexPath,out string msg, params T[] data)
        {
            msg = string.Empty;
            if (data == null || data.Length == 0)
            {
                return;
            }
            else
            {
                InitIndexWrite(IndexPath, out msg, BusinessActionType.add, new PanGuAnalyzer(), writer => {
                    foreach (T entity in data)
                    {
                        Document doc = new Document();
                        Type type = typeof(T);
                        foreach (var pi in type.GetPropertiesWithNoQuery())
                        {
                            var protype = pi.GetCustomAttributes(typeof(ANALYZEDAttribute), true);
                            //一个字段  列名  值   是否保存值  是否分词
                            if (protype.Count() > 0)
                            {
                                doc.Add(new Field(pi.GetMappingName(), pi.GetValue(entity).ToString(), Field.Store.YES, Field.Index.ANALYZED));
                            }
                            else
                            {
                                doc.Add(new Field(pi.GetMappingName(), pi.GetValue(entity).ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED));
                            }
                        }
                        writer.AddDocument(doc);
                    }
                    writer.Optimize();
                });
            }
        }

        /// <summary>
        /// 删除所有数据
        /// </summary>
        /// <typeparam name="T"></typeparam>
        /// <param name="data"></param>
        /// <param name="IndexPath"></param>
        /// <param name="msg"></param>
        public static void DeleteToIndex<T>(string IndexPath,out string msg, params T[] data)
        {
            msg = string.Empty;
            IndexReader reader = null;
            try
            {
                if (data == null || data.Length == 0) return;

                Analyzer analyzer = new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30);
                DirectoryInfo dirInfo = System.IO.Directory.CreateDirectory(IndexPath);
                Lucene.Net.Store.Directory directory = FSDirectory.Open(dirInfo);
                reader = IndexReader.Open(directory, false);
                var key = TableAtributeUtil.GetPrimaryKey<T>();
                Type type = typeof(T);
                var pi = type.GetPropertiesWithKey().FirstOrDefault();
                foreach (var item in data)
                {
                    var keyvalue = pi.GetValue(item);
                    reader.DeleteDocuments(new Term(key, keyvalue.ToString()));
                }
            }
            catch (Exception ex)
            {
                //LogUtil.GetInstance.WriteLogOld("删除索引异常", ex.Message + "|" + ex.StackTrace, true);
            }
            finally
            {
                if (reader != null)
                {
                    reader.Dispose();
                }
            }
        }

        /// <summary>
        /// 修改索引数据
        /// </summary>
        /// <typeparam name="T"></typeparam>
        /// <param name="IndexPath"></param>
        /// <param name="msg"></param>
        /// <param name="data"></param>
        public static void UpdatToIndex<T>(string IndexPath, out string msg, params T[] data)
        {
            InitIndexWrite(IndexPath, out msg, BusinessActionType.update,new PanGuAnalyzer(), writer =>
            {
                var key = TableAtributeUtil.GetPrimaryKey<T>();
                Type type = typeof(T);
                var pi = type.GetPropertiesWithKey().FirstOrDefault();
                foreach (var item in data)
                {
                    var keyvalue=pi.GetValue(item);
                    writer.UpdateDocument(new Term(key, keyvalue.ToString()), ParseToDoc(item));
                }
            });
        }

        /// <summary>
        /// 创建分析器
        /// </summary>
        /// <returns></returns>
        private PerFieldAnalyzerWrapper CreateAnalyzerWrapper()
        {
            Analyzer analyzer = new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30);
            PerFieldAnalyzerWrapper analyzerWrapper = new PerFieldAnalyzerWrapper(analyzer);
            analyzerWrapper.AddAnalyzer("title", new PanGuAnalyzer());
            analyzerWrapper.AddAnalyzer("categoryid", new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30));
            return analyzerWrapper;
        }

        /// <summary>
        /// 将对象转换成doc
        /// </summary>
        /// <param name="entity"></param>
        /// <returns></returns>
        private static Document ParseToDoc<T>(T entity)
        {
            Document doc = new Document();
            Type type = typeof(T);
            foreach (var pi in type.GetPropertiesWithNoQuery())
            {
                var protype = pi.GetCustomAttributes(typeof(ANALYZEDAttribute), true);
                //一个字段  列名  值   是否保存值  是否分词
                if (protype.Count() > 0)
                {
                    doc.Add(new Field(pi.GetMappingName(), pi.GetValue(entity).ToString(), Field.Store.YES, Field.Index.ANALYZED));
                }
                else
                {
                    doc.Add(new Field(pi.GetMappingName(), pi.GetValue(entity).ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED));
                }
            }
            return doc;
        }
    }
}
