package dp.sgd;

import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;

import org.apache.mahout.classifier.sgd.AdaptiveLogisticRegression;
import org.apache.mahout.classifier.sgd.CrossFoldLearner;
import org.apache.mahout.classifier.sgd.ModelDissector;
import org.apache.mahout.classifier.sgd.ModelSerializer;
import org.apache.mahout.ep.State;
import org.apache.mahout.math.Vector;
import org.apache.mahout.vectorizer.encoders.Dictionary;

import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Multiset;

public final class SGDAnalyzer {
	
	private static final int RECORDS_FOR_DISSECTION = 500;
	private static final int VALUES_PRINTED = 50;

	private SGDAnalyzer() {
	}

	public static void dissect(Dictionary dictionary, AdaptiveLogisticRegression learningAlgorithm,
			Iterable<Record> records, Multiset<String> overallCounts) throws IOException {
		CrossFoldLearner model = learningAlgorithm.getBest().getPayload().getLearner();
		model.close();

		Map<String, Set<Integer>> traceDictionary = Maps.newTreeMap();
		ModelDissector md = new ModelDissector();

		Helper helper = new Helper();
		helper.getEncoder().setTraceDictionary(traceDictionary);
		helper.getBias().setTraceDictionary(traceDictionary);

		for (Record record : permute(records, helper.getRandom()).subList(0, RECORDS_FOR_DISSECTION)) {
			String category = record.getCategory();
			int actual = dictionary.intern(category);

			traceDictionary.clear();
			Vector v = helper.encodeFeatureVector(record, actual, overallCounts);
			md.update(v, traceDictionary, model);
		}

		List<String> categories = Lists.newArrayList(dictionary.values());
		List<ModelDissector.Weight> weights = md.summary(VALUES_PRINTED);
		System.out.println("============");
		System.out.println("Model Dissection");
		for (ModelDissector.Weight w : weights) {
			System.out.printf("%s\t%.1f\t%s\n", w.getFeature(), w.getWeight(),
					categories.get(w.getMaxImpact() + 1));
		}
		
	}

	public static List<Record> permute(Iterable<Record> records, Random rand) {
		List<Record> r = Lists.newArrayList();
		for (Record record : records) {
			int i = rand.nextInt(r.size() + 1);
			if (i == r.size()) {
				r.add(record);
			} else {
				r.add(r.get(i));
				r.set(i, record);
			}
		}
		return r;
	}

	public static void analyzeState(int k, State<AdaptiveLogisticRegression.Wrapper, CrossFoldLearner> best,
			SGDInfo info, String modelPath) throws IOException {

		// pocitanie, ze ako casto sa ma zobrazit real-time analyza modelu
		int bump = info.getBumps()[(int) Math.floor(info.getStep()) % info.getBumps().length];
		int scale = (int) Math.pow(10, Math.floor(info.getStep() / info.getBumps().length));

		if (best != null) {
			CrossFoldLearner state = best.getPayload().getLearner();
			info.setAverageCorrect(state.percentCorrect());
			info.setAverageLL(state.logLikelihood());
		}

		if (k % (bump * scale) == 0) {
			if (best != null) {
				ModelSerializer.writeBinary(modelPath + k + ".model", best.getPayload().getLearner().getModels().get(0));
			}
			info.setStep(info.getStep() + 0.25);

			System.out.printf("k = %d\tavgLL = %.3f\tavgCorrect = %.2f\t\n", k, info.getAverageLL(), info.getAverageCorrect() * 100);
		}

	}

	/**
	 * prida vsetky zaznamy zo zuboru do zoznamu vsetkych zaznamov
	 */
	public static List<Record> addRecordsToCollection(File file, List<Record> records) {
		try {
			FileInputStream fstream = new FileInputStream(file);
			DataInputStream in = new DataInputStream(fstream);
			BufferedReader br = new BufferedReader(new InputStreamReader(in));
			String strLine;
			while ((strLine = br.readLine()) != null) {
				records.add(new Record(strLine));
			}
			in.close();
		} catch (Exception e) {
			e.printStackTrace();
		}

		return records;
	}

}
