package classifiers;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.PrintWriter;
import java.util.Scanner;
import java.util.TreeSet;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

import mulan.data.InvalidDataFormatException;
import mulan.data.MultiLabelInstances;
import weka.classifiers.AbstractClassifier;
import weka.classifiers.Classifier;
import weka.core.Instances;
import weka.core.SerializationHelper;
import classifiers.helper.ClassificationMethod;
import classifiers.helper.ConcurrentBinaryRelevanceTransformation;

public class KillSafeBinaryRelevanceModel {
	
	Classifier base;
	ConcurrentBinaryRelevanceTransformation cbrt;
	
	int desired_num_threads = Runtime.getRuntime().availableProcessors()+1;
	
	public KillSafeBinaryRelevanceModel(Classifier classifier) {
		 try {
			this.base = AbstractClassifier.makeCopy(classifier);
		} catch (Exception e) {
			e.printStackTrace();
		};
	}
	
	public void trainModels(MultiLabelInstances train) {
		final int numLabels = train.getNumLabels();
		final int queue_min_size = Math.max(desired_num_threads,numLabels);
		final String text_file_name = "progress_"+getFileName(train.getDataSet().relationName())+".txt";
		TreeSet<Integer> classifiers_to_train = new TreeSet<Integer>();
		for ( int i = 0 ; i < numLabels ; ++i ) {
			classifiers_to_train.add(i);
		}
		try {
			File progress_file = new File(text_file_name);
			if ( ! progress_file.exists() ) progress_file.createNewFile();
			Scanner jin = new Scanner(progress_file);
			while ( jin.hasNextInt() ) {
				int k = jin.nextInt();
				classifiers_to_train.remove(k);
			}
		} catch (Exception e2) {
			e2.printStackTrace();
		}
		/*
		System.out.println("Models left to train:"+classifiers_to_train.size());
		System.out.println(classifiers_to_train);
		*/
		 cbrt = new ConcurrentBinaryRelevanceTransformation(train);
//        cbrt = new BinaryRelevanceTransformation(train);
        ThreadPoolExecutor executor = new ThreadPoolExecutor(desired_num_threads, queue_min_size, Long.MAX_VALUE,TimeUnit.MINUTES, new ArrayBlockingQueue<Runnable>(queue_min_size));
        int label_indices[] = train.getLabelIndices();
       for ( final int idx : classifiers_to_train ) {
        	try {
        	final Classifier base_classifier = AbstractClassifier.makeCopy(base);
        	final int label_idx = label_indices[idx];
        	final ConcurrentBinaryRelevanceTransformation cbrt_final =  cbrt;
        	final String model_name = getFileName(train.getDataSet().relationName())+"_"+idx+".model";
         //  final BinaryRelevanceTransformation cbrt_final = cbrt;
        	executor.execute(new Runnable() {

				@Override
				public void run() {
					Instances transformed_instances;
					try {
						transformed_instances = cbrt_final.transformInstances(label_idx);
//						transformed_instances = cbrt_final.transformInstances(idx);
						base_classifier.buildClassifier(transformed_instances);
						SerializationHelper.write(model_name, base_classifier);
						PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(text_file_name, true)));
						out.println(idx);
						out.close();
					} catch (Exception e) {
						e.printStackTrace();
					}
				}
				
			});
        	} catch (Exception e1) {
				e1.printStackTrace();
			}
        }
        executor.shutdown();
        try {
			executor.awaitTermination(Long.MAX_VALUE, TimeUnit.MINUTES);
		} catch (InterruptedException e) {
			e.printStackTrace();
		}
	}
	
	public ConcurrentBinaryRelevance getConcurrentBinaryRelevance(MultiLabelInstances train, double threshold) {
		MultiLabelInstances fake_train = null;
		try {
			fake_train = new MultiLabelInstances(new Instances(train.getDataSet(),0,1),train.getLabelsMetaData());
		} catch (InvalidDataFormatException e1) {
			// TODO Auto-generated catch block
			e1.printStackTrace();
		}
		final String text_file_name = "progress_"+getFileName(train.getDataSet().relationName())+".txt";
		final int numLabels = train.getNumLabels();
		TreeSet<Integer> classifiers_to_train = new TreeSet<Integer>();
		for ( int i = 0 ; i < numLabels ; ++i ) {
			classifiers_to_train.add(i);
		}
		try {
			File progress_file = new File(text_file_name);
			if ( ! progress_file.exists() ) progress_file.createNewFile();
			Scanner jin = new Scanner(progress_file);
			while ( jin.hasNextInt() ) {
				int k = jin.nextInt();
				classifiers_to_train.remove(k);
			}
		} catch (Exception e2) {
			e2.printStackTrace();
		}
		if ( classifiers_to_train.size() > 0 )
			System.err.println("not all models have been build. Are you sure you caller trainModels first??");
		ConcurrentBinaryRelevance res = new ConcurrentBinaryRelevance.ConcurrentBinaryRelevanceBuilder(base).withThreshold(threshold).build();
		try {
			res.build(fake_train);
		} catch (Exception e1) {
			e1.printStackTrace();
		}
		res.prepareToBuild(train);
		for ( int i = 0 ; i < numLabels ; ++i ) {
			final String model_name = getFileName(train.getDataSet().relationName())+"_"+i+".model";
			Classifier model = null;
			try {
				model = (Classifier)SerializationHelper.read(model_name);
			} catch (Exception e) {
				e.printStackTrace();
			}
			res.ensemble[i] = model;
		}
		return res;
	}
	
	private String getFileName(String unqiue_description) {
		return unqiue_description+"_"+toString();
	}
	
	@Override
    public String toString() {
		return ConcurrentBinaryRelevance.getNameForCalssifier(base);
	}
	
}
