package org.wltea.analyzer.lucene;

import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;

import org.apache.commons.lang.StringUtils;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.util.ResourceLoader;
import org.apache.lucene.analysis.util.ResourceLoaderAware;
import org.apache.lucene.analysis.util.TokenizerFactory;
import org.apache.lucene.util.AttributeSource.AttributeFactory;
import org.wltea.analyzer.cfg.DefaultConfig;
import org.wltea.analyzer.dic.Dictionary;

import com.hunteron.jdbc.bean.DicKeyword;
import com.hunteron.jdbc.bean.SameWord;
import com.hunteron.jdbc.dao.DicKeywordDao;
import com.hunteron.jdbc.dao.SameWordDao;
import com.hunteron.util.DateFormatUtil;
import com.hunteron.util.PropertieUtil;

public class IKTokenizerFactory extends TokenizerFactory implements
		ResourceLoaderAware {
	
	//是否启动分词器，词库更新线程
	private static Boolean isThreadStart = false;
	//字典更新时间间隔，秒
	private static int DIC_UPDATE_SECONDS = PropertieUtil.getIntProp("resource", "dicUpdateTime");
	//字典重载间隔，小时
	private static int DIC_RELOAD_HOUR = PropertieUtil.getIntProp("resource", "dicReLoadTime");
	
	public IKTokenizerFactory(Map<String, String> args) {
		super(args);
		assureMatchVersion();
		useSmart = getBoolean(args, "useSmart", false);
		dicPath = get(args, "dicPath");

		synchronized (isThreadStart) {
			if(!isThreadStart) {
				isThreadStart = true;
				ScheduledExecutorService executor = Executors.newScheduledThreadPool(4);
				executor.scheduleWithFixedDelay(new Runnable() {
					public void run() {
						updateDic();
					}
				}, DIC_UPDATE_SECONDS, DIC_UPDATE_SECONDS, TimeUnit.SECONDS);
				
				executor.scheduleWithFixedDelay(new Runnable() {
					public void run() {
						reLoadDic();
					}
				}, 0, DIC_RELOAD_HOUR, TimeUnit.HOURS);
			}
		}
	}

	private boolean useSmart = false;
	
	String dicPath = null;
	
	
	public boolean useSmart() {
		return useSmart;
	}
	
	// 通过这个实现，调用自身分词器
	public Tokenizer create(AttributeFactory attributeFactory, Reader in) { // 会多次被调用

		return new IKTokenizer(in, this.useSmart());
	}
	
	public void inform(ResourceLoader loader) throws IOException {
		// 在启动时初始化一次
		
		if (dicPath != null && !dicPath.trim().isEmpty()) {
			System.out.println("get dicPath: " + dicPath);

			System.out.println("<IKTokenizerFactory>begin split dicPath: ");
			List<String> dicPaths = Util.SplitFileNames(dicPath);
			System.out.println(dicPaths);

			List<InputStream> inputStreamList = new ArrayList<InputStream>();
			for (String path : dicPaths) {
				if ((path != null && !path.isEmpty())) {
					InputStream is = loader.openResource(path);

					if (is != null) {
						inputStreamList.add(is);
					}
				}
			}

			if (!inputStreamList.isEmpty()) {
				Dictionary.addDic2MainDic(inputStreamList); // load dic to MainDic
			}
		}
		
	}
	//默认从启动时间20秒之前的数据开始更新；以后，都是从上一次结束的时间开始更新
	private static long dicUpdateTime = System.currentTimeMillis() - 600000;
	private static long sameUpdateTime = System.currentTimeMillis() - 600000;
	public void updateDic() {
		long minAddTime = dicUpdateTime;
		DicKeywordDao dicKeywordDao = new DicKeywordDao();
		dicKeywordDao.setUpdateTime(minAddTime);
		dicUpdateTime = dicKeywordDao.maxTime(minAddTime);
		
		if(dicUpdateTime == minAddTime) {
			dicUpdateTime = minAddTime + 1000;
		}

		System.out.println("update dic at : " + DateFormatUtil.formatLong(minAddTime) + ", next time at : " + DateFormatUtil.formatLong(dicUpdateTime));
		while(dicKeywordDao.hasNext()) {
			List<DicKeyword> list = dicKeywordDao.more();
			if(list != null) {
				for(DicKeyword k : list) {
					String w = k.getKeyword();
					if(k.getIsDic() == -1) {
						System.out.println("\tdelete word : " + w);
						Dictionary.getSingleton().disableWord(w);
					} else {
						System.out.println("\tadd word : " + w);
						Dictionary.getSingleton().addWord(w);
					}
				}
			}
		}
		
		minAddTime = sameUpdateTime;
		//动态数据库加载
		SameWordDao sameWordDao = new SameWordDao();
		sameWordDao.setUpdateTime(minAddTime);
		
		sameUpdateTime = sameWordDao.maxTime(minAddTime);
		if(sameUpdateTime == minAddTime) {
			sameUpdateTime = minAddTime + 1000;
		}

		while(sameWordDao.hasNext()) {
			List<SameWord> words = sameWordDao.more();
			if(words != null && words.size() > 0) {
				for(SameWord w : words) {
					loadWord(w);
				}
			}
		}
	}
	/**
	 * 重载分词词库
	 */
	private void reLoadDic() {
		Dictionary.reload(DefaultConfig.getInstance());
		DicKeywordDao dicKeywordDao = new DicKeywordDao();
		dicUpdateTime = dicKeywordDao.maxTime(-1);
		while(dicKeywordDao.hasNext()) {
			List<DicKeyword> list = dicKeywordDao.more();
			if(list != null) {
				for(DicKeyword k : list) {
					String w = k.getKeyword();
					if(k.getIsDic() == -1) {
						Dictionary.getSingleton().disableWord(w);
					} else {
						Dictionary.getSingleton().addWord(w);
					}
				}
			}
		}

		//动态数据库加载
		SameWordDao sameWordDao = new SameWordDao();
		sameUpdateTime = sameWordDao.maxTime(-1);
		while(sameWordDao.hasNext()) {
			List<SameWord> words = sameWordDao.more();
			if(words != null && words.size() > 0) {
				for(SameWord w : words) {
					loadWord(w);
				}
			}
		}
	}
	
	private void loadWord(SameWord w) {
		String[] kws = w.getKeyword();
		for(String kw : kws) {
			if(StringUtils.isNotBlank(kw) && kw.length() > 1) {
				Dictionary.getSingleton().addWord(kw);
			}
		}
	}
}

