package org.grayrabbit.cms.service.lucene.impl;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import org.apache.commons.lang.StringUtils;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Field.Index;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.WildcardQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.LockObtainFailedException;
import org.apache.lucene.util.Version;
import org.grayrabbit.cms.dao.search.KeywordsDao;
import org.grayrabbit.cms.dao.search.KeywordsRecommendDao;
import org.grayrabbit.cms.entity.Keywords;
import org.grayrabbit.cms.entity.KeywordsRecommend;
import org.grayrabbit.cms.service.lucene.KeywordsLuceneService;
import org.grayrabbit.cms.util.FilePathSptUtil;
import org.grayrabbit.cms.util.lucene.AnalyzerUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.wltea.analyzer.lucene.IKQueryParser;
import org.wltea.analyzer.lucene.IKSimilarity;


@Service
@Transactional
public class KeywordsLuceneServiceImpl implements KeywordsLuceneService {

	private final static Logger logger = LoggerFactory.getLogger(KeywordsLuceneServiceImpl.class);
	private final static String ID = "keywordsId";
	private final static String KEYWORD = "keyword";
	private final static String FCKEYWORD = "fckeyword";
	private final static String PY = "py";
	private final static String FC = "fc";
	private final static String SZM = "szm";
	private final static String TJIDS = "tjIds";

	private final static String PATH_INDEX = FilePathSptUtil.LUCENE_INDEX + File.separator + "keywords";
	private final static String PATH_INDEX_NEW = FilePathSptUtil.LUCENE_INDEX + File.separator + "keywords_NEW";
	private final static int MAX_INDEX_COUNT = 500;
	@Autowired
	@Qualifier("keywordsRecommendDaoImpl")
	private KeywordsRecommendDao keywordsRecommendDao;

	@Autowired
	@Qualifier("keywordsDaoImpl")
	private KeywordsDao keywordsDao;

	public boolean deleteIndex(String id) {
		IndexWriter indexWriter = null;
		Directory d = null;
		try {
			d = FSDirectory.open(new File(PATH_INDEX));
			while (d != null && IndexWriter.isLocked(d)) {// 如果文件锁住,等待解锁
				Thread.sleep(1000);
				logger.error("索引已经锁住，正在等待....");
			}
			IndexWriterConfig indexWriterConfig = new IndexWriterConfig(Version.LUCENE_30, AnalyzerUtil.getIkAnalyzer());
			indexWriter = new IndexWriter(d, indexWriterConfig);
			Term term = new Term(ID, id);
			indexWriter.deleteDocuments(term);
			indexWriter.optimize();
			indexWriter.commit();
			logger.debug("共有索引{}个", indexWriter.numDocs());
			indexWriter.close();
			return true;
		} catch (CorruptIndexException e) {
			e.printStackTrace();
			logger.error("索引删除异常", e);
		} catch (LockObtainFailedException e) {
			e.printStackTrace();
			logger.error("索引删除异常", e);
		} catch (IOException e) {
			e.printStackTrace();
			logger.error("索引不存在", e);
		} catch (Exception e) {
			e.printStackTrace();
			logger.error("索引删除异常", e);
		} finally {
			if (indexWriter != null) {
				try {
					indexWriter.close();
				} catch (CorruptIndexException e) {
					e.printStackTrace();
					logger.error("索引关闭异常", e);
				} catch (IOException e) {
					e.printStackTrace();
					logger.error("索引关闭异常", e);
				} finally {
					try {
						if (d != null && IndexWriter.isLocked(d)) {
							IndexWriter.unlock(d);
						}
					} catch (IOException e) {
						e.printStackTrace();
						logger.error("解锁异常", e);
					}
				}
			}
		}
		return false;
	}

	public boolean saveIndex(Keywords k, List<KeywordsRecommend> list) {

		IndexWriter indexWriter = null;
		Directory d = null;
		try {
			d = FSDirectory.open(new File(PATH_INDEX));
			while (d != null && IndexWriter.isLocked(d)) {// 如果文件锁住,等待解锁
				Thread.sleep(1000);
				logger.error("索引已经锁住，正在等待....");
			}
			IndexWriterConfig conf = new IndexWriterConfig(Version.LUCENE_31, AnalyzerUtil.getIkAnalyzer());
			indexWriter = new IndexWriter(d, conf);
			Term term = new Term(ID, k.getKeywords());
			indexWriter.deleteDocuments(term);// 不管更新与否，先删除原来的

			Document doc = getDocument(k, list);
			indexWriter.addDocument(doc);
			indexWriter.optimize();
			indexWriter.commit();
			logger.debug("共有索引{}个", indexWriter.numDocs());
			return true;
		} catch (CorruptIndexException e) {
			e.printStackTrace();
			logger.error("索引添加异常", e);
		} catch (LockObtainFailedException e) {
			e.printStackTrace();
			logger.error("索引添加异常", e);
		} catch (IOException e) {
			e.printStackTrace();
			logger.error("索引不存在", e);
		} catch (Exception e) {
			e.printStackTrace();
			logger.error("索引添加异常", e);
		} finally {
			if (indexWriter != null) {
				try {
					indexWriter.close();
				} catch (CorruptIndexException e) {
					e.printStackTrace();
					logger.error("索引关闭异常", e);
				} catch (IOException e) {
					e.printStackTrace();
					logger.error("索引关闭异常", e);
				} finally {
					try {
						if (d != null && IndexWriter.isLocked(d)) {
							IndexWriter.unlock(d);
						}
					} catch (IOException e) {
						e.printStackTrace();
						logger.error("解锁异常", e);
					}
				}
			}
		}
		return false;
	}

	public void createIndex() {
		File file = new File(PATH_INDEX_NEW);
		if (file.exists()) {
			if (file.isFile()) {
				file.delete();
			} else {
				File[] f = file.listFiles();
				for (int i = 0; i < f.length; i++) {
					f[i].delete();
				}
			}
		} else {
			file.mkdirs();
		}
		IndexWriter indexWriter = null;
		Directory d = null;
		try {
			d = FSDirectory.open(file);
			IndexWriterConfig conf = new IndexWriterConfig(Version.LUCENE_31, AnalyzerUtil.getIkAnalyzer());
			// 创建索引模式：CREATE，覆盖模式； APPEND，追加模式
			conf.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
			indexWriter = new IndexWriter(d, conf);

			int count = keywordsDao.countEnable();
			int i = 0;
			List<Keywords> list = null;
			while (count > 0) {
				list = keywordsDao.listEnable(i * MAX_INDEX_COUNT, MAX_INDEX_COUNT);
				i++;
				for (Keywords k : list) {
					Document doc = getDocument(k, keywordsRecommendDao.list(k.getKeywordsId()));
					indexWriter.addDocument(doc);
				}
				count = count - MAX_INDEX_COUNT;
				list.clear();
				keywordsDao.clear();
				logger.debug("当前共有索引{}个", indexWriter.numDocs());
				Thread.sleep(100);
			}
			logger.debug("索引结束,共有索引{}个", indexWriter.numDocs());
			// 自动优化合并索引文件
			indexWriter.optimize();
			indexWriter.commit();
		} catch (CorruptIndexException e) {
			e.printStackTrace();
			logger.error("索引添加异常", e);
		} catch (LockObtainFailedException e) {
			e.printStackTrace();
			logger.error("索引添加异常", e);
		} catch (IOException e) {
			e.printStackTrace();
			logger.error("索引不存在", e);
		} catch (Exception e) {
			e.printStackTrace();
			logger.error("索引添加异常", e);
		} finally {
			if (indexWriter != null) {
				try {
					indexWriter.close();
				} catch (CorruptIndexException e) {
					e.printStackTrace();
					logger.error("索引关闭异常", e);
				} catch (IOException e) {
					e.printStackTrace();
					logger.error("索引关闭异常", e);
				} finally {
					try {
						if (d != null && IndexWriter.isLocked(d)) {
							IndexWriter.unlock(d);
						}
					} catch (IOException e) {
						e.printStackTrace();
						logger.error("解锁异常", e);
					}
				}
			}
		}
		logger.debug("替换原有索引开始.....");
		copyDirectory();
		logger.debug("替换原有索引结束.....");
	}

	private void copyDirectory() {
		// 删除原来的
		File file = new File(PATH_INDEX);
		if (file.exists()) {
			if (file.isFile()) {
				file.delete();
			} else {
				File[] f = file.listFiles();
				for (int i = 0; i < f.length; i++) {
					f[i].delete();
				}
			}
		} else {
			file.mkdirs();
		}
		IndexWriter indexWriter = null;
		Directory d = null;
		try {
			d = FSDirectory.open(file);
			IndexWriterConfig conf = new IndexWriterConfig(Version.LUCENE_31, AnalyzerUtil.getIkAnalyzer());
			// 创建索引模式：CREATE，覆盖模式； APPEND，追加模式
			conf.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
			indexWriter = new IndexWriter(d, conf);
			indexWriter.addIndexes(FSDirectory.open(new File(PATH_INDEX_NEW)));// 合并原来的索引
			// 自动优化合并索引文件
			indexWriter.optimize();
			indexWriter.commit();
			indexWriter.close();
		} catch (CorruptIndexException e) {
			e.printStackTrace();
			logger.error("索引添加异常", e);
		} catch (LockObtainFailedException e) {
			e.printStackTrace();
			logger.error("索引添加异常", e);
		} catch (IOException e) {
			e.printStackTrace();
			logger.error("索引不存在", e);
		} catch (Exception e) {
			e.printStackTrace();
			logger.error("索引添加异常", e);
		} finally {
			if (indexWriter != null) {
				try {
					indexWriter.close();
				} catch (CorruptIndexException e) {
					e.printStackTrace();
					logger.error("索引关闭异常", e);
				} catch (IOException e) {
					e.printStackTrace();
					logger.error("索引关闭异常", e);
				} finally {
					try {
						if (d != null && IndexWriter.isLocked(d)) {
							IndexWriter.unlock(d);
						}
					} catch (IOException e) {
						e.printStackTrace();
						logger.error("解锁异常", e);
					}
				}
			}
		}
	}

	public List<Keywords> likeWord(String word) {
		List<Keywords> list = new ArrayList<Keywords>();
		IndexReader reader = null;
		try {
			reader = IndexReader.open(FSDirectory.open(new File(PATH_INDEX)));
			IndexSearcher searcher = new IndexSearcher(reader);
			searcher.setSimilarity(new IKSimilarity());
			BooleanQuery bq = new BooleanQuery();
			bq.add(IKQueryParser.parse(FCKEYWORD, word), Occur.SHOULD);
			bq.add(new TermQuery(new Term(KEYWORD,word)),Occur.MUST_NOT);//不包含自己
			Term pyt = new Term(PY,"*"+word+"*"); 
			Term szmt = new Term(SZM,"*"+word+"*");
			Term fc = new Term(FC,"*"+word+"*");
			WildcardQuery pyq = new WildcardQuery(pyt); 
			WildcardQuery szmq = new WildcardQuery(szmt); 
			WildcardQuery fcq = new WildcardQuery(fc);
			bq.add(pyq, Occur.SHOULD);
			bq.add(szmq, Occur.SHOULD);		
			bq.add(fcq, Occur.SHOULD);
			TopDocs docs = searcher.search(bq, 10);
			for (int i = 0; i < docs.totalHits; i++) {				
				ScoreDoc s_doc = docs.scoreDocs[i];
				Document doc = searcher.doc(s_doc.doc);
				Keywords k = new Keywords();
				k.setKeywordsId(doc.get(ID));
				k.setKeywords(doc.get(KEYWORD));
				k.setFirstLetter(doc.get(SZM));
				k.setIsEnable(true);
				k.setPinyin(doc.get(PY));
				k.setSplitWords(doc.get(FC));
				list.add(k);
			}
		} catch (Exception e) {
			e.printStackTrace();
			logger.error("搜索异常", e);
			return new ArrayList<Keywords>();
		}
		finally{
			if(reader!=null){
				try {
					reader.close();
				} catch (IOException e) {
					e.printStackTrace();
					logger.error("Reader 关闭异常", e);
				}
			}
		}
		return list;
	}

	public List<Keywords> beginWord(String word) {
		List<Keywords> list = new ArrayList<Keywords>();
		IndexReader reader = null;
		try {
			reader = IndexReader.open(FSDirectory.open(new File(PATH_INDEX)));
			IndexSearcher searcher = new IndexSearcher(reader);
			Term kwt = new Term(KEYWORD, word+"*");
			Term pyt = new Term(PY,word.toLowerCase()+"*"); 
			Term szmt = new Term(SZM,word.toLowerCase()+"*");
			BooleanQuery query = new BooleanQuery();
			query.add(new WildcardQuery(kwt), Occur.SHOULD);
			query.add(new WildcardQuery(pyt), Occur.SHOULD);
			query.add(new WildcardQuery(szmt), Occur.SHOULD);
			TopDocs docs = searcher.search(query, 10);
			for (int i = 0; i < docs.totalHits; i++) {				
				ScoreDoc s_doc = docs.scoreDocs[i];
				Document doc = searcher.doc(s_doc.doc);
				Keywords k = new Keywords();
				k.setKeywordsId(doc.get(ID));
				k.setKeywords(doc.get(KEYWORD));
				k.setFirstLetter(doc.get(SZM));
				k.setIsEnable(true);
				k.setPinyin(doc.get(PY));
				k.setSplitWords(doc.get(FC));
				list.add(k);
			}
		} catch (Exception e) {
			e.printStackTrace();
			logger.error("搜索异常", e);
			return new ArrayList<Keywords>();
		}
		finally{
			if(reader!=null){
				try {
					reader.close();
				} catch (IOException e) {
					e.printStackTrace();
					logger.error("Reader 关闭异常", e);
				}
			}
		}
		return list;
	}

	public Keywords suggestWord(String word) {

		return null;
	}

	public List<KeywordsRecommend> listRecomend(String word) {
		List<KeywordsRecommend> list = new ArrayList<KeywordsRecommend>();
		IndexReader reader = null;
		try {
			reader = IndexReader.open(FSDirectory.open(new File(PATH_INDEX)));
			IndexSearcher searcher = new IndexSearcher(reader);
			Term t = new Term(KEYWORD, word);
			String ids = null;
			TopDocs docs = searcher.search(new TermQuery(t), 1);
			for (int i = 0; i < docs.totalHits; i++) {				
				ScoreDoc s_doc = docs.scoreDocs[i];
				Document doc = searcher.doc(s_doc.doc);
				ids = doc.get(TJIDS);
			}
			if(StringUtils.isNotBlank(ids)){
				String[] id = ids.split(",");
				for(int i=0;i<id.length;i++){
					KeywordsRecommend kr = keywordsRecommendDao.findById(id[i]);
					keywordsRecommendDao.initialize(kr);
					list.add(kr);
				}
			}
			
		} catch (Exception e) {
			e.printStackTrace();
			logger.error("搜索异常", e);
		}
		if(reader!=null){
			try {
				reader.close();
			} catch (IOException e) {
				e.printStackTrace();
				logger.error("Reader 关闭异常", e);
			}
		}
		return list;
	}

	private Document getDocument(Keywords k, List<KeywordsRecommend> list) {
		Document doc = new Document();
		doc.add(new Field(ID, k.getKeywordsId(), Store.YES, Index.NOT_ANALYZED));
		doc.add(new Field(KEYWORD, k.getKeywords(), Store.YES, Index.NOT_ANALYZED));
		doc.add(new Field(FCKEYWORD, k.getKeywords(), Store.YES, Index.ANALYZED));
		doc.add(new Field(PY, k.getPinyin().toLowerCase(), Store.YES, Index.NOT_ANALYZED));
		//设定分词，查找包含该分词的词语，作为相关关键字
		doc.add(new Field(FC, StringUtils.trimToEmpty(k.getSplitWords()), Store.YES, Index.NOT_ANALYZED));
		doc.add(new Field(SZM, k.getFirstLetter().toLowerCase(), Store.YES, Index.NOT_ANALYZED));
		if(list.isEmpty()){
			doc.add(new Field(TJIDS, "", Store.YES, Index.NOT_ANALYZED));
		}
		else{
			StringBuffer sb = new StringBuffer();
			for(KeywordsRecommend kr:list){
				sb.append(kr.getKwrecommendId());
				sb.append(",");
			}
			doc.add(new Field(TJIDS, sb.substring(0,sb.length()-1), Store.YES, Index.NOT_ANALYZED));
		}
		
		return doc;
	}

}
