/**
 * 2017年5月21日
 */
package cn.edu.bjtu.workbench.model;

import java.io.File;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;

import org.deeplearning4j.api.storage.StatsStorageRouter;
import org.deeplearning4j.eval.Evaluation;
import org.deeplearning4j.iterator.CnnSentenceDataSetIterator;
import org.deeplearning4j.iterator.LabeledSentenceProvider;
import org.deeplearning4j.iterator.provider.CollectionLabeledSentenceProvider;
import org.deeplearning4j.models.embeddings.loader.WordVectorSerializer;
import org.deeplearning4j.models.word2vec.Word2Vec;
import org.deeplearning4j.nn.api.Layer;
import org.deeplearning4j.text.tokenization.tokenizerfactory.TokenizerFactory;
import org.deeplearning4j.ui.stats.StatsListener;
import org.deeplearning4j.util.SerializationUtils;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.dataset.api.DataSet;
import org.nd4j.linalg.dataset.api.iterator.DataSetIterator;

import cn.edu.bjtu.workbench.api.CNNDataSetIteratorProviderHandler;
import cn.edu.bjtu.workbench.api.CNNNetworkDesignHandler;
import cn.edu.bjtu.workbench.api.ConfigChangeListener;
import cn.edu.bjtu.workbench.configuration.TextCategorizationCNNConfig;
import cn.edu.bjtu.workbench.core.LoggerSupport;
import cn.edu.bjtu.workbench.core.TextCategorizationManager;
import cn.edu.bjtu.workbench.core.TextCategorizationManager.ClassificationPair;
import cn.edu.bjtu.workbench.datasource.lsp.TrainCNNSentenceProvider;
import cn.edu.bjtu.workbench.tokenization.AnsjTokenzierFactory;

/**
 * 需要写一个日志配置文件,把这个类输出的日志单独记录一下,不要和WEB服务器其他日志混杂到一起
 * 因为这个和模型调优有关,非常重要.
 * 除了时间较短的操作,基本所有的方法都是异步的
 * 也就是说,这个类的方法大多都会在短时间之内返回
 * @author Alex
 * @param <ClassificationPair>
 *
 */
public class TextCategorizationCNNModel extends LoggerSupport implements ConfigChangeListener,Thread.UncaughtExceptionHandler{
	
	private final TextCategorizationCNNConfig config = TextCategorizationCNNConfig.get();
	private static TextCategorizationCNNModel inst = new TextCategorizationCNNModel();

	//Property Atomic Ref 
	private AtomicReference<TokenizerFactory> tokenizerFactoryAtomicRef = new AtomicReference<TokenizerFactory>(new AnsjTokenzierFactory(false));
	private AtomicReference<CNNNetworkDesignHandler> designHandlerAtomicRef = new AtomicReference<CNNNetworkDesignHandler>(null);
	private AtomicReference<CNNDataSetIteratorProviderHandler> dsItProviderHandlerAR = new AtomicReference<CNNDataSetIteratorProviderHandler>(null);
	
	private volatile StatsStorageRouter stats = null;
	
	public CNNDataSetIteratorProviderHandler configDataSetIteratorHandlerProvider(CNNDataSetIteratorProviderHandler handler){
		return dsItProviderHandlerAR.getAndSet(handler);
	}
	
	public StatsStorageRouter config(StatsStorageRouter ssr){
		stats = ssr;
		return ssr;
	}
	
	public TokenizerFactory configNewTokenizerFactory(TokenizerFactory tf){
		//设置新的,返回旧的
		return tokenizerFactoryAtomicRef.getAndSet(tf);
	}
	
	/**
	 * 返回旧的
	 * 上午10:29:21 2017年5月26日
	 * @param nh 新的。
	 * @return
	 */
	public CNNNetworkDesignHandler configNetworkDesignHandler(CNNNetworkDesignHandler nh){
		return designHandlerAtomicRef.getAndSet(nh);
	}
	public CNNNetworkDesignHandler getNetworkDesignHandler(){
		return designHandlerAtomicRef.get();
	}
	
	public TextCategorizationCNNConfig getCNNConfig(){
		return config;
	}
	
	//DATALOCK
	//这两个list里面存放的数据是用户反馈比较好用来更新模型的,只要操作这两个labels请先获取dataLock这个锁
	private final Lock dataLock = new ReentrantLock();
	private CollectionLabeledSentenceProvider docsToUpateModel = null;
	private List<String> labels = null;
	private List<String> sentences = null;
	
	//MODELLOCK
	//模型锁,加载,更新,存储模型的时候使用写锁,其他使用读
	private final ReadWriteLock modelRWLock = new ReentrantReadWriteLock();
	private Word2Vec w2v = null;
	private Deep4jNetworkWrapper net = null;
	
	private volatile int epoches = 10;
	
	//COMMON EXECUTORS,用来执行任务,比如神经网络模型的更新,当时也可以提交别的,这个以后在拓展,
	//这个线程池设置为守护.
	private ThreadFactory _tf = new ThreadFactory() {
		AtomicInteger idx = new AtomicInteger(0);
		@Override
		public Thread newThread(Runnable r) {
			Thread t = new Thread(r);
			t.setUncaughtExceptionHandler(TextCategorizationCNNModel.this);
			t.setDaemon(true);
			t.setName("Text-Cate-CNN-Model-BackThread-"+idx.getAndIncrement());
			return t;
		}
	};
	private ExecutorService taskExecutors = Executors.newFixedThreadPool(3,_tf);
	/**
	 * 构造函数进行初始化
	 * 只加载wordvec不加载net
	 * 因为后面调用restoreFromFile的几率远大于调用buildNetwork(buildNetwork会重新加载w2v,restoreFromFile并不重新加载w2v)
	 * 这是一个单例,因为有一个字段inst = new TextCategorizationCNNModel();这个类构造函数会在类加载的时候执行clinit期间执行.
	 */
	private TextCategorizationCNNModel(){
		initUpdateDocs();
		initListeners();
		try{
			logger.info("loading word2vec model ..... . ");
			modelRWLock.writeLock().lock();
			w2v = WordVectorSerializer.readWord2VecModel(new File(config.getW2VModelPath()));
			if(w2v == null){
				throw new IllegalArgumentException("word2vec read failed! check path in your cnnconfig.properties 'WORD2VEC_MODEL_PATH'");
			}
			logger.info("loading word2vec model  ... done  ..... . ");
		}catch(Exception e){
			logException(e);
			throw new RuntimeException(e.getMessage(), e);
		}finally{
			modelRWLock.writeLock().unlock();
		}
	}

	private void initUpdateDocs(){
		 try{
			 dataLock.lock();
			 sentences = new ArrayList<String>(config.getBathNumberToUpdate());
			 labels = new ArrayList<String>(config.getBathNumberToUpdate());
			 docsToUpateModel = null;
		 }finally{
			 dataLock.unlock();
		 }
	}
	/**
	 * 需要时间较长
	 */
	private void initW2vAndNet(){
		try{
			modelRWLock.writeLock().lock();
			w2v = WordVectorSerializer.readWord2VecModel(new File(config.getW2VModelPath()));
			
			if(w2v == null){
				throw new IllegalArgumentException("word2vec read failed! check path in your cnnconfig.properties 'WORD2VEC_MODEL_PATH'");
			}
			try{
				TextCategorizationManager.restore();
				net = newNetworkByTypeNotInit();
				net.load();
			}catch(Exception e){
				logger.warn("failed to read in cnn network  becasue of {}",e.getMessage());
				//扔给上一 层,让上一层根据异常返回不同结果
				throw new RuntimeException(e.getMessage(),e);
			}
			logger.info("Success load neuron network,type is {} ,at {}",net.getActualType(),new Date());
		}finally{
			modelRWLock.writeLock().unlock();
		}
	}

	/**
	 * 同步方法,但是更新模型是异步
	 * 当文档够数了,自动更新模型
	 * @param sentence
	 * @param label
	 * @return
	 */
	public boolean addDocumentForUpdate(String sentence,String label){
		try{
			dataLock.lock();
			logger.info("Add a document for model update,content : {} ,label: {} ",sentence,label);
			return sentences.add(sentence) && labels.add(label);
		}finally{
			dataLock.unlock();
			if(sentences.size() == config.getBathNumberToUpdate()){
				taskExecutors.execute(()->{
					updateModel();
				});
			}
		}
	}
	private void checkNetNull(){
		try{
			modelRWLock.readLock().lock();
			if(net == null)
				throw new IllegalArgumentException("net work null ,please build network first");
		}finally{
			modelRWLock.readLock().unlock();
		}
			
	}
	public void restoreFromFile(){
		initW2vAndNet();
	}
	
	/**
	 * 同步方法
	 * 这个返回的是每个类别的概率,它与{@link TrainCNNSentenceProvider }里面保存着所有的类标,配合使用
	 * @param doc
	 * @return
	 */
	public INDArray[]/* 当明确这个结果的维度之后,可以替换成double数组,更直观,或者直接返回文本的种类 */ predictDocument(String doc){
		checkNetNull();
		List<String> docs = new ArrayList<String>(1);
		docs.add(doc);
		List<String> labels = new ArrayList<>(1);
		//FIXME:这个可能需要改,不能为1
		labels.add("1");
		LabeledSentenceProvider lsp = new CollectionLabeledSentenceProvider(docs,labels);
		modelRWLock.readLock().lock();
		INDArray[] res = null;
		try{
			//一般只会有一个文档,所以直接output当多于batchSize时,会出现问题,不过一般没问题,当处理大批量文档的时候需要重写一个这个返回的结果
			DataSetIterator dsi = getCNNDataSet(this.tokenizerFactoryAtomicRef.get(),lsp,1,config.getSentenceLength());
			while(dsi.hasNext()){
				DataSet ds = dsi.next();
				res = net.output(ds.getFeatures());
			}
			return res;
		}finally{
			modelRWLock.readLock().unlock();
		}
	}
	public ClassificationPair[] predictDocumentLabelString(String doc){
		INDArray[] res = predictDocument(doc);
		INDArray labelVal = res[0];
		return TextCategorizationManager.get().getDesc(labelVal);
	}
	
	public Word2Vec getW2v(){
		return this.w2v;
	}
	public Deep4jNetworkWrapper getNet(){
		return this.net;
	}
	private Deep4jNetworkWrapper newNetworkByTypeNotInit(){
		return (net = new Deep4jNetworkWrapper(this));
	}
	/**
	 * 建立CNN模型肯定异步方法
	 */
	public void buildNetworkModel(){
		taskExecutors.execute(()->{
			try{
				modelRWLock.writeLock().lock();
				CNNDataSetIteratorProviderHandler handler = dsItProviderHandlerAR.get();
				if(handler==null){
					dsItProviderHandlerAR.set(new DefaultCNNDataSetIteratorProviderHandler());
					handler = dsItProviderHandlerAR.get();
				}
				DataSetIterator train = handler.handleTrain(config,w2v, tokenizerFactoryAtomicRef.get(),config.getBathNumberToUpdate(), config.getSentenceLength());
				DataSetIterator test = handler.handleTest(config,w2v, tokenizerFactoryAtomicRef.get(),config.getBathNumberToUpdate(), config.getSentenceLength());;
				TextCategorizationManager.get().setLabels(train.getLabels());
				
				logger.info("Train Set labels :{} " ,train.getLabels());
				net = newNetworkByTypeNotInit();
				net.init();
				if(stats != null){
					net.setListeners(new StatsListener(stats,1));
				}
				logger.info("Number of parameters by layer:");
				for(Layer l : net.getLayers() ){
				  logger.info("\n\t" + l.conf().getLayer().getLayerName() + "\t" + l.numParams());
				}
				logger.info("train cnn network .... ");
				//如果直接修改TextCategorizationCNNConfig这个单例里面的值,是不会触发监听器的运行的,所以训练之前重新获取下
				this.epoches = config.getCNNEpoch();
				logger.info("Total Epoches:{}",this.epoches);
				for(int i =0 ;i<epoches;i++){
					net.fit(train);
					Evaluation e = net.evaluate(test);
					logger.info("Epoch {} done , total epoches :{} ...",i,this.epoches);
					logger.info("\n{}",e.stats());
					logger.info("\n{}",e.confusionToString());
					train.reset();
					test.reset();
				}
			}catch(Exception e){
				logger.info(e.getMessage(),e);
			}finally{
				modelRWLock.writeLock().unlock();
			}
		});
	}
	/**
	 * 在以后的过程中,对于分类效果好的文档调用addDocumentForUpdate方法,当达到batchSize时,就更新网络
	 * 就是调用网络的fit方法,传入数据集.
	 */
	private void updateModel(){
		checkNetNull();
		try{
			dataLock.lock();
			modelRWLock.writeLock().lock();
			//原来判断方法是有问题的,这里需要二次判断，必须在获取锁之后判断
			if(sentences.size() < config.getBathNumberToUpdate()){
				logger.info("更新文档数量小于设定值,updateModel返回");
				return ;
			}
			CnnSentenceDataSetIterator iter = updateModel0();
			logger.info("begin upadate model at {} ,with documents number {}",new Date(),sentences.size());
			net.fit(iter);
			logger.info("end upadate model at {} ",new Date());
			sentences.clear();
			labels.clear();
		} catch (Exception e) {
			logger.info("Exception occured when try to updating model : {}",e.getMessage(),e);
		}finally{
			modelRWLock.writeLock().unlock();
			dataLock.unlock();
		}
	}
	/**
	 * 同步方法,同时保存word2vec和cnn
	 */
	public void saveModel(){
		try{
			modelRWLock.writeLock().lock();
			TextCategorizationManager.get().saveLabels();
			if(w2v==null || net == null){
				logger.error("W2V OR NET IS NULL ,SAVE FAILURE ,AND RETURN ");
				return ;
			}
			logger.info("At {} ,save model invoked",new Date());
			WordVectorSerializer.writeWord2VecModel(w2v, new File(config.getNetworkSaveFile()));
			net.saveModel(config.getNetworkSaveFile());
			logger.info("At {} ,save model to {} invoked done",new Date(),config.getNetworkSaveFile());
		} catch (Exception e) {
			logException(e);
			//扔给 controller 层
			throw new RuntimeException(e.getMessage(),e);
		}finally{
			modelRWLock.writeLock().unlock();
		}
	}
	/**
	 * 只保存cnn ,主要是照顾swing程序中可以选择保存位置.
	 * Web应用中应该调用saveModel方法,将类标管理器里面数据,CNN网络数据,WORD2VEC模型一起保存
	 * @param f
	 */
	@Deprecated
	public void saveCNNModel(File f){
		try{
			modelRWLock.writeLock().lock();
			logger.info("At {} ,saving CNN network ,not include word2vec ....",new Date());
			SerializationUtils.saveObject(net, f);
			logger.info("At {} ,save CNN network done",new Date());
		}finally{
			modelRWLock.writeLock().unlock();
		}
	}
	
	public static TextCategorizationCNNModel get(){
		return inst;
	}
	
	private CnnSentenceDataSetIterator updateModel0(){
		docsToUpateModel = new CollectionLabeledSentenceProvider(this.sentences, this.labels);
		return getCNNDataSet(tokenizerFactoryAtomicRef.get(), docsToUpateModel, config.getBathNumberToUpdate(), config.getSentenceLength());
	}
	private CnnSentenceDataSetIterator getCNNDataSet(TokenizerFactory tf,
			LabeledSentenceProvider sp,int batch,int senLen){
		return new CnnSentenceDataSetIterator.Builder()
				.tokenizerFactory(tf)
		        .sentenceProvider(sp)
		        .wordVectors(w2v)
		        .minibatchSize(batch)
		        .maxSentenceLength(senLen)
		        .useNormalizedWordVectors(false)
		        .build();
	}

	
	private void initListeners(){
		config.addListener(this);
		
	}

	//这个方法会被TextCategorizationCNNConfig中的守护线程调用,而训练分类器是在这个类中的taskExecutros线程调用,
	//所以,相对于执行net.fit的方法来说(不是调用buildNetworkModel)来说
	// epoch被其他线程修改了,为了保证可见性,所以要用volatile
	//.daemonWather10秒检测一次
	@Override
	public void invoke(TextCategorizationCNNConfig o) {
		//当模型类型发生改变之后。是不是要换？
		if(o.getDeep4jModelType() != net.getActualType()){
			//?do something
//			initW2vAndNet();
		}
		logger.warn("changing epoch from {} to {}!!",this.epoches,config.getCNNEpoch());
		this.epoches = o.getCNNEpoch();
	}

	@Override
	public void uncaughtException(Thread t, Throwable e) {
		logger.info("In Thread {} exception {}",t,e);
	}
	
}
