package com.mijie.homi.search.service.index;

import java.io.File;
import java.io.IOException;
import java.io.StringReader;
import java.sql.SQLException;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.Set;

import net.sf.json.JSONArray;
import net.sf.json.JSONObject;

import org.apache.log4j.Logger;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.IntField;
import org.apache.lucene.document.LongField;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.NIOFSDirectory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.wltea.analyzer.lucene.BigramAnalyzer;

import com.mijie.core.ApplicationProperties;
import com.mijie.homi.common.domain.SearchResult;
import com.mijie.homi.search.domain.topic.Topic;
import com.mijie.homi.search.domain.topic.TopicOptions;
import com.mijie.homi.search.service.topic.ClassifyService;
import com.mijie.homi.search.service.topic.TopicService;
import com.mijie.util.DateUtil;

@Service
public class TopicIndexService extends AbstractIndexService{

	@Autowired
	private TopicService topicService;
	@Autowired
	private ClassifyService classifyService;
	
	private static Logger logger = Logger.getLogger(TopicIndexService.class);
	private Analyzer bigramAnalyzer = new BigramAnalyzer();
	private static String topicIndexDir = ApplicationProperties.get("resource.path")+ApplicationProperties.get("topic.index.home");
	private final Object newTopicLock = new Object();
	
	@Override
	protected void reIndexBefore() {
		topicService.clearTrash();
	}
	
	public void RAMIndex(int topicId) throws Exception{
		synchronized(newTopicLock){
			//防止重复索引
			if(topicId<=0 || inRAMIndexSet(topicId)){
				logger.info("RAMIndex topic id:"+topicId+" less than 0 or equal 0 or already indexed.");
				return;
			}
			addToRAMIndexSet(topicId);
		}
		Document doc = null;
		Topic topic = topicService.getTopicById(topicId);
		if(topic!=null){
			StringBuilder token = new StringBuilder();
			token.append(getToken(topic.getTitle()));
			List<TopicOptions> options = topicService.listOptions(topic.getId());
			for(TopicOptions option : options){
				token.append(" ").append(getToken(option.getOptions()));
			}
			logger.info("visible:"+topic.getVisible()+",verifyAt->"+topic.getVerifyAt());
			doc = makeDocumentForIndex(topic,token.toString());
		}
		logger.info("RAMIndex index topic id:"+topicId+",title:"+topic.getTitle());
		
		if(doc==null) return;
		indexDocToRAM(doc);
	}
	
	
	
	@Override
	protected void index() throws Exception{
		long s = System.currentTimeMillis();
		Date indexStartAt = new Date();
		IndexWriterConfig config = new IndexWriterConfig(version, getAnalyzer());
		config.setOpenMode(OpenMode.CREATE);
		long currentTimeMillis = System.currentTimeMillis();
		String indexDirpath = getIndexDir()+currentTimeMillis;
		File indexDir = new File(indexDirpath);
		Directory topicDir = new NIOFSDirectory(indexDir);
		IndexWriter indexWriter  = new IndexWriter(topicDir,config);
		logger.info("=========================index topic begin!!===============================");
		logger.info("topic index file -> "+indexDir.getAbsolutePath());
		int limit = 1000;
		Field field = null;
		int count = 0;
		int offset = 0;
		StringBuilder token = new StringBuilder();
		int minuteInterval = 0;
		List<Topic> list = null;
		List<TopicOptions>  options = null;
		classifyService.beginTraining();
		while(true){
			list = topicService.listTopic(offset, limit);
			if(list.isEmpty()) break;
			offset+=limit;
			for(Topic topic : list){
				classifyService.addToTraining(topic);
				
				token.setLength(0);
				token.append(getToken(topic.getTitle()));
				options = topicService.listOptions(topic.getId());
				for(TopicOptions option : options){
					token.append(" ").append(getToken(option.getOptions()));
				}
				minuteInterval = Math.abs(DateUtil.getMinuteInterval(indexStartAt,topic.getVerifyAt()));
				if(minuteInterval<=1){
					synchronized(newTopicLock){
						if(inRAMIndexSet(topic.getId())){
							logger.info("index topic alread exits,id:"+topic.getId()+",title:"+topic.getTitle());
							continue;
						}
						addToRAMIndexSet(topic.getId());
					}
				}
				indexWriter.addDocument(makeDocumentForIndex(topic,token.toString()));
			}
		}
		indexWriter.commit();
		indexWriter.close();
		indexDone(indexDir);
		classifyService.training();
		long e = System.currentTimeMillis();
		logger.info("=========================index topic done!! keyword size:"+classifyService.getKeywordSize()+" spends "+(e-s)+" millis===============================");
	}
	
	
	
	static FieldType dontStoreButTokenized = new FieldType();
	static{
		dontStoreButTokenized.setIndexed(true);
		dontStoreButTokenized.setStored(false);
		dontStoreButTokenized.setOmitNorms(true);
		dontStoreButTokenized.setIndexOptions(IndexOptions.DOCS_ONLY);
		dontStoreButTokenized.setTokenized(true);
		dontStoreButTokenized.freeze();
	}
	
	private Document makeDocumentForIndex(Topic topic,String token) throws SQLException, IOException{
		Set<String> keywords = classifyService.getKeywords(topic.getTitle());
		StringBuilder keyword = new StringBuilder();
		for(String kw : keywords){
			keyword.append(kw).append(" ");
		}
		Document doc = new Document();
		doc.add(new Field("keyword",keyword.toString(),dontStoreButTokenized));
		doc.add(new Field("moodType",topic.getMoodType(),dontStoreButTokenized));
		doc.add(new Field("type",topic.getType(),dontStoreButTokenized));
		doc.add(new Field("tag",topic.getTag(),dontStoreButTokenized));
		doc.add(new Field("token",token,dontStoreButTokenized));
		doc.add(new IntField("numOfParticipate",topic.getNumOfParticipate(),Store.YES));
		doc.add(new IntField("isTop",topic.getIsTop(),Store.NO));
		doc.add(new IntField("id",topic.getId(),Store.YES));
		doc.add(new LongField("lastAccessAt",topic.getLastAccessAt().getTime(),Store.NO));
		doc.add(new LongField("recommendAt",topic.getIsTop()==1?topic.getRecommendAt().getTime():0,Store.NO));
		doc.add(new LongField("verifyAt",topic.getVerifyAt().getTime(),Store.NO));
		return doc;
	}
	
	public SearchResult smartMatch(String idxf,Collection<Set<String>> keywordGroup,String type,int offset,int limit) throws Exception{
		BooleanQuery bq = new BooleanQuery();
		TopDocs topDocs = null;
		BooleanQuery subQuery = null;
		StringBuilder sb = new StringBuilder();
		for(Set<String> group : keywordGroup){
			subQuery = new BooleanQuery();
			for(String keyword : group){
				subQuery.add(new TermQuery(new Term("keyword",keyword)), Occur.MUST);
				sb.append(keyword).append(" ");
			}
			sb.append(" or ");
			bq.add(subQuery,Occur.SHOULD);
		}
		logger.info("@@@smartMatch query str->"+sb.toString());
			
		
		if(type!=null && !type.isEmpty()){
			BooleanQuery query = new BooleanQuery();
			query.add(new TermQuery(new Term("type",type)), Occur.MUST);
			query.add(bq, Occur.MUST);
			return search(idxf, offset, limit, query, null);
		}else{
			return search(idxf, offset, limit, bq, null);
		}
	}
	
	public SearchResult searchByKeyword(String idxf,String keyword,int offset,int limit) throws Exception{
		if(keyword!=null){
			BooleanQuery query = new BooleanQuery();
			BooleanQuery tagQuery = new BooleanQuery();
			BooleanQuery keywordQuery = new BooleanQuery();
			TermQuery tq = null;
			TokenStream ts = bigramAnalyzer.tokenStream("token", new StringReader(keyword));
			CharTermAttribute term = ts.getAttribute(CharTermAttribute.class);;
			while(ts.incrementToken()){
				tq = new TermQuery(new Term("token",term.toString()));
				keywordQuery.add(tq,Occur.MUST);
				tq = new TermQuery(new Term("tag",term.toString()));
				tagQuery.add(tq, Occur.MUST);
			}
			query.add(tagQuery, Occur.SHOULD);
			query.add(keywordQuery, Occur.SHOULD);
			Sort sort = new Sort(new SortField("verifyAt", SortField.Type.LONG, true));
			return search(idxf,offset,limit,query,sort);
		}
		return null;
	}
	
	public SearchResult listTopicOfTop(String idxf,int mood,int offset,int limit) throws Exception{
		Query query = NumericRangeQuery.newIntRange("isTop", 1, 1, true, true);
		Sort sort = new Sort(new SortField("recommendAt", SortField.Type.LONG, true));
		return search(idxf,offset,limit,query,sort);
	}
	
	public SearchResult listTopicOfMoodMatch(String idxf,int mood,int offset,int limit) throws Exception{
		if(mood>0){
			Query query =new TermQuery(new Term("moodType",String.valueOf(mood)));
			return search(idxf,offset,limit,query,null);
		}
		return null;
	}
	
	public SearchResult listTopicOfNew(String idxf,int mood,int offset,int limit) throws Exception{
		Query query = NumericRangeQuery.newIntRange("isTop", 0, 1, true, true);
		Sort sort = new Sort(new SortField("verifyAt", SortField.Type.LONG, true));
		return search(idxf,offset,limit,query,sort);
	}
	
	public SearchResult listTopicOfActive(String idxf,int mood,int offset,int limit) throws Exception{
		Query query = NumericRangeQuery.newIntRange("isTop", 0, 1, true, true);
		Sort sort = new Sort(new SortField("lastAccessAt", SortField.Type.LONG, true));
		return search(idxf,offset,limit,query,sort);
	}
	
	public SearchResult listTopicOfHot(String idxf,int mood,int offset,int limit) throws Exception{
		Query stateCondition = NumericRangeQuery.newIntRange("isTop", 0, 1, true, true);
		Date startAt = DateUtil.str2date(DateUtil.getPreDay(7, false)+" 00:00:00", true);
		Query timeCondition = NumericRangeQuery.newLongRange("verifyAt", startAt.getTime(), Long.MAX_VALUE, true, true);
		BooleanQuery query = new BooleanQuery();
		query.add(stateCondition, Occur.MUST);
		query.add(timeCondition, Occur.MUST);
		Sort sort = new Sort(new SortField("numOfParticipate", SortField.Type.INT, true));
		return search(idxf,offset,limit,query,sort);
	}
	
	
	@Override
	protected Object makeResultItem(String idxf,float socre,Document doc){
		JSONObject topic = new JSONObject();
		topic.accumulate("idxf", idxf);
		topic.accumulate("numOfParticipate", Integer.parseInt(doc.get("numOfParticipate")));
		topic.accumulate("id", Integer.parseInt(doc.get("id")));
		return topic;
	}

	@Override
	public Analyzer getAnalyzer() {
		return bigramAnalyzer;
	}

	@Override
	public String getIndexDir() {
		return topicIndexDir;
	}

	@Override
	protected void reIndexDone() {
		// TODO Auto-generated method stub
		
	}
	
}
