package main;

import java.io.File;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

import mulan.classifier.MultiLabelLearner;
import utils.TagClustering;
import utils.Utils;
import weka.core.SelectedTag;
import weka.core.SerializationHelper;
import weka.experiment.Task;
import weka.experiment.TaskStatusInfo;
import classifiers.ConcurrentBinaryRelevance.ConcurrentBinaryRelevanceBuilder;
import classifiers.LibSVM;
import classifiers.helper.FeatureExtractionMethod;
import dictionary.Dictionary;
import dictionary.StemmerDictionary;
import dictionary.StemmerWithCache;
import dictionary.stemming.SnowballEnglishStemmer;
import evaluation.MulanEvaluator;
import evaluation.PerformanceEvaluation;
import features.FeatureExtractor;
import features.LabelExtractor;
import features.NominalFeatureExtractor;
import features.WordCountFeatureExtractor;
import features.word_counter.BodyAndTitleWordCounter;
import features.word_counter.WordCounter;

public class ExtractAllFeatureTypes {
	
	public static void main(String[] args) throws Exception {
		final TagClustering tc = new TagClustering();
		final String out_directory_name = "D:\\Data\\StackOverflow data\\test\\arff";
		final String directory_name = "D:\\Data\\StackOverflow data\\test";
		final Dictionary dict = new StemmerDictionary(new StemmerWithCache(new SnowballEnglishStemmer()));
		dict.load("new_dictionary.txt");
		final WordCounter wc = new BodyAndTitleWordCounter(dict, 3);
		List<FeatureExtractor> fes = new LinkedList<FeatureExtractor>();
		for ( FeatureExtractionMethod method : FeatureExtractionMethod.values() )
			fes.add(FeatureExtractor.getFeatureExtractor(wc,method));
		File datasets_directory = new File(directory_name);
		List<String> filenames = new ArrayList<String>();
		for ( File file : datasets_directory.listFiles() ) {
			if (file.isFile()) {
				filenames.add(file.getAbsolutePath());
		    }
		}
		int desired_num_threads = Runtime.getRuntime().availableProcessors()+4;
		int total_num_threads = Math.max(Runtime.getRuntime().availableProcessors()+4,filenames.size()*fes.size());;
		
		ThreadPoolExecutor executor = new ThreadPoolExecutor(desired_num_threads, total_num_threads, Long.MAX_VALUE, TimeUnit.MINUTES, new ArrayBlockingQueue<Runnable>(total_num_threads));
		for (  final String file_name : filenames ) {
			for ( final FeatureExtractor ffe : fes ) {
				executor.execute(new Runnable() {
					
					@Override
					public void run() {
						FeatureExtractor fe = ffe.copy();
						fe.setLabel_extractor(LabelExtractor.getLabelExtractor(tc,file_name));
						try {
							fe.extractFeatures(file_name, out_directory_name);
						} catch (Exception e) {
							e.printStackTrace();
						}
					}
				});
			}
		}
		executor.shutdown();
		executor.awaitTermination(Long.MAX_VALUE, TimeUnit.MINUTES);
		System.out.println("ALL DONE.. EXITING");
		System.exit(0);
	}

}
