package dp.sgd;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;

import org.apache.mahout.classifier.sgd.AdaptiveLogisticRegression;
import org.apache.mahout.classifier.sgd.CrossFoldLearner;
import org.apache.mahout.classifier.sgd.L1;
import org.apache.mahout.classifier.sgd.ModelSerializer;
import org.apache.mahout.ep.State;
import org.apache.mahout.math.Vector;
import org.apache.mahout.vectorizer.encoders.Dictionary;

import com.google.common.collect.HashMultiset;
import com.google.common.collect.Multiset;

import dp.utils.Utils;

/**
 * inspired by TrainNewsGroups & TrainAdaptiveLogistic Train model by given
 * files
 * 
 * usage: SGDTrainer [path-to-training-dataset-dir] [path-to-store-model(optional)]
 * 
 */
public class SGDTrainer {

	private static final int LEARNING_ALGORITHM_INTERVAL = 800;
	private static final int LEARNING_ALGORITHM_AVERAGING_WINDOW = 500;

	private static final int PROBES_COUNT = 2;

	private static final String MODEL_FILE_NAME = "sgd-model";
	private static final String DEFAULT_MODEL_PATH = "c:/dp/tmp/";

	public static void main(String[] args) throws IOException {

		if (args.length < 1) {
			System.err.println("Chyba parameter: <cesta-k-datasetu>"); // TODO: logovat
			System.exit(1);
		}

		File base = new File(args[0]);

		String modelPath = DEFAULT_MODEL_PATH;
		if (args.length > 1) {
			modelPath = args[1];
		}

		String fullModelPath = modelPath + "/" + MODEL_FILE_NAME;

		int categoriesCount = base.listFiles().length;
		Multiset<String> overallCounts = HashMultiset.create();

		Dictionary categoryGroups = new Dictionary();

		Helper helper = new Helper();

		helper.getEncoder().setProbes(PROBES_COUNT);

		// nainstancovanie a konfiguracia trenovacieho algoritmu
		AdaptiveLogisticRegression learningAlgorithm = new AdaptiveLogisticRegression(
				categoriesCount, Helper.FEATURES, new L1());
		learningAlgorithm.setInterval(LEARNING_ALGORITHM_INTERVAL);
		learningAlgorithm
				.setAveragingWindow(LEARNING_ALGORITHM_AVERAGING_WINDOW);

		List<Record> records = new ArrayList<Record>();
		for (File file : base.listFiles()) {
			categoryGroups.intern(file.getName());
			records = SGDAnalyzer.addRecordsToCollection(file, records);
		}

		// premiesanie zoznamu (aby algoritmus negrupoval zaznamy po skupinach)
		Collections.shuffle(records);
		Utils.log("training records: " + records.size());

		SGDInfo info = new SGDInfo();

		int k = 0;

		for (Record record : records) {
			String category = record.getCategory();
			int actual = categoryGroups.intern(category);

			// pre kazdy zaznam spravi vektor charakteristickych vlastnosti (v
			// tomto pripade vektor Math.log1p(words.count(word)))
			Vector v = helper
					.encodeFeatureVector(record, actual, overallCounts);

			// natrenovanie modelu vektorom
			learningAlgorithm.train(actual, v);

			k++;

			/*
			 * Adaptivna logisticka regresia pralelne pusti niekolko ucitelov,
			 * ktori maju rozne konfiguracie ucenia pomocou genetickeho
			 * algoritmu sa potom urci najlepsi z ucitelov (fitnes funkcia je
			 * AUC), ktory sa vracia pomocou learningAlgorithm.getBest() vrati
			 * sa zabaleny ucitel s aktualnymi udajmi o kvalite modelu vid
			 * Mahout in Action, str. 305
			 */
			State<AdaptiveLogisticRegression.Wrapper, CrossFoldLearner> best = learningAlgorithm
					.getBest();
			SGDAnalyzer.analyzeState(k, best, info, fullModelPath);
		}

		learningAlgorithm.close();

		SGDAnalyzer.dissect(categoryGroups, learningAlgorithm, records,
				overallCounts);

		// System.out.println("exiting main");

		ModelSerializer.writeBinary(fullModelPath + ".model", learningAlgorithm
				.getBest().getPayload().getLearner().getModels().get(0));

	}

}
