/**
 * DefaultCNNSentenceProviderHandler.java created by zhangzhidong 
 * at 下午2:50:31 2017年5月26日
 */
package cn.edu.bjtu.model;

import java.io.File;
import java.io.IOException;

import org.datavec.api.split.FileSplit;
import org.deeplearning4j.iterator.CnnSentenceDataSetIterator;
import org.deeplearning4j.models.embeddings.wordvectors.WordVectors;
import org.deeplearning4j.text.tokenization.tokenizerfactory.TokenizerFactory;
import org.nd4j.linalg.dataset.api.iterator.DataSetIterator;

import cn.edu.bjtu.configuration.TextCategorizationCNNConfig;
import cn.edu.bjtu.core.CNNDataSetIteratorProviderHandler;
import cn.edu.bjtu.core.Deep4jModelType;
import cn.edu.bjtu.datasource.dsiter.LeNetCNNSentenceDataSetIterator;
import cn.edu.bjtu.datasource.lsp.TrainCNNSentenceProvider;

/**
 * @author zhangzhidong<br>
 * comment generated at 2017年5月26日下午2:50:31<br>
 * 
 */
public class DefaultCNNDataSetIteratorProviderHandler implements CNNDataSetIteratorProviderHandler{

	
	@Override
	public DataSetIterator handleTrain(TextCategorizationCNNConfig config,WordVectors wv, TokenizerFactory tf, int batch, int senLen) throws IOException, InterruptedException {
		// TODO Auto-generated method stub
		TrainCNNSentenceProvider sp = new TrainCNNSentenceProvider();
		sp.initialize(new FileSplit(new File(config.getDataSetDirOrFile())));
		if(config.getDeep4jModelType() == Deep4jModelType.MultiLayerNetwork){
			return new LeNetCNNSentenceDataSetIterator.Builder()
					.tokenizerFactory(tf)
			        .sentenceProvider(sp)
			        .wordVectors(wv)
			        .minibatchSize(batch)
			        .maxSentenceLength(senLen)
			        .useNormalizedWordVectors(false)
			        .build();
		}
		return new CnnSentenceDataSetIterator.Builder()
				.tokenizerFactory(tf)
		        .sentenceProvider(sp)
		        .wordVectors(wv)
		        .minibatchSize(batch)
		        .maxSentenceLength(senLen)
		        .useNormalizedWordVectors(false)
		        .build();
	}

	/**
	 * @throws InterruptedException 
	 * @throws IOException 
	 * @Param
	 * @Author zhangzhidong
	 */
	
	@Override
	public DataSetIterator handleTest(TextCategorizationCNNConfig config,WordVectors wv, TokenizerFactory tf, int batch, int senLen) throws IOException, InterruptedException {
		TrainCNNSentenceProvider sp = new TrainCNNSentenceProvider();
		sp.initialize(new FileSplit(new File(config.getTestDataSetDirOrFile())));
		
		if(config.getDeep4jModelType() == Deep4jModelType.MultiLayerNetwork){
			return new LeNetCNNSentenceDataSetIterator.Builder()
					.tokenizerFactory(tf)
			        .sentenceProvider(sp)
			        .wordVectors(wv)
			        .minibatchSize(batch)
			        .maxSentenceLength(senLen)
			        .useNormalizedWordVectors(false)
			        .build();
		}
		return new CnnSentenceDataSetIterator.Builder()
				.tokenizerFactory(tf)
		        .sentenceProvider(sp)
		        .wordVectors(wv)
		        .minibatchSize(batch)
		        .maxSentenceLength(senLen)
		        .useNormalizedWordVectors(false)
		        .build();
	}

}
