package com.pengtu.dao.asset.Message;

import java.io.File;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.DateTools;
import org.apache.lucene.document.DateTools.Resolution;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.CachingWrapperFilter;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.search.TopScoreDocCollector;
import org.apache.lucene.search.highlight.Formatter;
import org.apache.lucene.search.highlight.Highlighter;
import org.apache.lucene.search.highlight.QueryScorer;
import org.apache.lucene.search.highlight.SimpleHTMLFormatter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.util.Version;
import org.springframework.stereotype.Repository;
import org.wltea.analyzer.lucene.IKAnalyzer;
import org.wltea.analyzer.lucene.IKQueryParser;
import org.wltea.analyzer.lucene.IKSimilarity;

import com.pengtu.dao.hibernate.HibernateDao;
import com.pengtu.entity.asset.Message.Publish;
import com.pengtu.model.PublishModel;
import com.pengtu.utils.DateUtils;


@Repository
public class PublishDao extends HibernateDao<Publish, Serializable>{

	public List<Publish> getTopNotice() {
		
		String hql = "from Publish where rownum <= 10 and delFlag = 0 order by createDate desc ";
		return this.find(hql);	
		}

	public void createIndexByLucene(String index) {
		try {
			File fsDir = new File(index+"//publish");
			Analyzer analyzer = new IKAnalyzer();
			IndexWriterConfig iwc = new IndexWriterConfig(Version.LUCENE_31,
					analyzer);
			Directory dir = FSDirectory.open(fsDir);
			iwc.setOpenMode(IndexWriterConfig.OpenMode.CREATE);// 总是重新创建
			if (IndexWriter.isLocked(dir)) {
				IndexWriter.unlock(dir);
				}
			IndexWriter iw = new IndexWriter(dir, iwc);
			List<Publish> publishList = super.getAll();
			int size = publishList.size();
			long startTime = new Date().getTime();
			
			//添加到索引里去
			Document doc;
			for (Publish publish : publishList) {
				doc = new Document();
				doc.add(new Field("id", publish.getId(), Field.Store.YES,
						Field.Index.ANALYZED));
				doc.add(new Field("title", publish.getTitle(), Field.Store.YES,
						Field.Index.ANALYZED));
				doc.add(new Field("publishContent", publish.getPublishContent(),
						Field.Store.YES, Field.Index.ANALYZED));
//				doc.add(new Field("userid", publish.getPublishPerson().getId(),
//						Field.Store.YES, Field.Index.ANALYZED));
				doc.add(new Field("publishPerson", publish.getPublishPerson()
						.getName(), Field.Store.YES, Field.Index.ANALYZED));
				if (publish.getCreateDate() != null)
					doc.add(new Field("createDate", DateTools.dateToString(
							publish.getCreateDate(), Resolution.MINUTE),
							Field.Store.YES, Field.Index.NOT_ANALYZED));
				iw.addDocument(doc);
			}
			// 自动优化合并索引文件
			iw.optimize();
			iw.close();
			long endTime = new Date().getTime();
			System.out.println("一共" + size + ",这花费了" + (endTime - startTime)
					+ " 毫秒来把文档增加到索引里面去!");
		} catch (Exception e) {
			e.printStackTrace();
		}
		
	}

	public List<PublishModel> SearchByLucene(String index, String keyword,
			String startDate, String endDate) {
		File fsDir = new File(index+"//publish");
		Analyzer analyzer = new IKAnalyzer();
		List<PublishModel> publishList = new ArrayList<PublishModel>();
		try {
			// 索引查询
			IndexReader reader = IndexReader
					.open(FSDirectory.open(fsDir), true); // only searching, so
															// read-only=true
			IndexSearcher isearcher = new IndexSearcher(reader);

			BooleanQuery booleanQuery = new BooleanQuery();

			Query query1 = IKQueryParser.parse("title", keyword);// 分析检索词
			query1.setBoost(1.5f);
			booleanQuery.add(query1, Occur.SHOULD);

			Query query2 = IKQueryParser.parse("publishContent", keyword);// 分析检索词
			query2.setBoost(1.0f);
			booleanQuery.add(query2, Occur.SHOULD);

			Query query3 = IKQueryParser.parse("publishPerson", keyword);// 分析检索词
			query3.setBoost(1.0f);
			booleanQuery.add(query3, Occur.SHOULD);

			BooleanQuery filterBooleanQuery = new BooleanQuery();
			TermRangeQuery rangeQuery = new TermRangeQuery("createDate",startDate, endDate, true, true);
			filterBooleanQuery.add(rangeQuery, BooleanClause.Occur.MUST);

			// 将booleanQuery封装到Filter中
			Filter filter = new CachingWrapperFilter(new QueryWrapperFilter(
					filterBooleanQuery));

			TopScoreDocCollector collector = TopScoreDocCollector.create(100,
					true);
			//设置相识度
			isearcher.setSimilarity(new IKSimilarity());
			isearcher.search(booleanQuery, filter, collector);

			ScoreDoc[] hits = collector.topDocs(0, 100).scoreDocs;
			QueryScorer qs1 = new QueryScorer(query1);
			QueryScorer qs2 = new QueryScorer(query2);
			QueryScorer qs3 = new QueryScorer(query3);
			for (ScoreDoc h : hits) {
				PublishModel publishModel = new PublishModel();
				Document d = isearcher.doc(h.doc);
				String title = d.get("title");
				String publishContent = d.get("publishContent");
				String publishPerson = d.get("publishPerson");
				//高亮效果
				Formatter formatter = new SimpleHTMLFormatter("<font style='background-color:#F9F400;'>", "</font>");
				Highlighter h1 = new Highlighter(formatter, qs1);
				Highlighter h2 = new Highlighter(formatter, qs2);
				Highlighter h3 = new Highlighter(formatter, qs3);
				String title1 = h1.getBestFragment(analyzer, "title", title);
				String publishContent1 = h2.getBestFragment(analyzer,"publishContent", publishContent);
				String publishPerson1 = h3.getBestFragment(analyzer, "publishPerson",publishPerson);
				publishModel.setId(d.get("id"));
				if (title1 != null) {
					publishModel.setTitle(title1);
				} else {
					publishModel.setTitle(title);
				}
				if (publishContent1 != null) {
					//转化html代码
					publishContent1 = com.pengtu.utils.StringUtils.changeHTML(publishContent1);
					//将font包含的文当转化成html代码
					publishContent1 = publishContent1
							.replace("&lt;font&nbsp;", "<font ")
							.replace("'&gt;", "'>")
							.replace("&lt;/font", "</font")
							.replace("font&gt;", "font>");
					publishModel.setPublishContent(publishContent1);
				} else {
					publishContent = com.pengtu.utils.StringUtils.changeHTML(publishContent);
					if(publishContent.length()>100){ publishContent = publishContent.substring(0, 100);}
					publishModel.setPublishContent(publishContent);
				}
				if(publishPerson1 != null){
					publishModel.setPublishPerson(publishPerson1);
				}else{
					publishModel.setPublishPerson(publishPerson);
				}
				publishModel.setCreateDate(DateUtils.toDate(d.get("createDate")));
				publishList.add(publishModel);
			}
			System.out.println("在公告Table中找到：" + hits.length + " 个");
			isearcher.close();

		} catch (Exception e) {
			e.printStackTrace();
		}
		return publishList;
	}

}
