package vsvm.classifier;

import java.awt.Component;
import java.io.File;
import java.io.IOException;

import javax.swing.JFileChooser;
import javax.swing.JOptionPane;
import javax.swing.event.InternalFrameAdapter;
import javax.swing.event.InternalFrameEvent;

import libsvm.svm;
import libsvm.svm_model;
import libsvm.svm_node;
import libsvm.svm_parameter;
import libsvm.svm_problem;
import vsvm.classifier.optimization.AbstractOptimizer;
import vsvm.classifier.optimization.GeneticOptimizer;
import vsvm.classifier.optimization.IOptimizable;
import vsvm.classifier.optimization.OptimizableParameter;
import vsvm.data.generators.ChessDataGenerator;
import vsvm.data.model.AbstractDataModel;
import vsvm.gui.Environment;
import vsvm.gui.forms.ChessTestFrame;
import vsvm.gui.forms.ConfigureSvmLibDialog;
import vsvm.gui.forms.OutputFrame;
import vsvm.gui.forms.TestClassifierForm;
import vsvm.math.statistics.ROCAnalysis;

public class LibSvmClassifier extends AbstractClassifier implements IOptimizable{
	private static final long serialVersionUID = 100L;
	
	private static final String AC_CHESS = "Chess test";
	private static final String AC_OPTIMIZE = "Optimize parameters";
	
	private static final int OP_DEGREE = 1;
	private static final int OP_GAMMA = 2;
	private static final int OP_COEF0 = 3;
	private static final int OP_NU = 4;
	private static final int OP_C = 5;
	
	//private svm_model model = null;
	//private svm_parameter param = null;
        
    private svm_model model = null;
	protected svm_parameter param = null;
	
	public LibSvmClassifier() {
		param = new svm_parameter();
		param.svm_type = svm_parameter.C_SVC;
		param.kernel_type = svm_parameter.POLY;
		param.degree = 3;
		param.gamma = 1;	// 1/k
		param.coef0 = 1;
		param.nu = 0.5;
		param.cache_size = 100;
		param.C = 1;
		param.eps = 1e-3;
		param.p = 0.1;
		param.shrinking = 1;
		param.probability = 1;
		param.nr_weight = 0;
		param.weight_label = new int[0];
		param.weight = new double[0];
	}
	
	
		
	public void train(AbstractDataModel mod) {
		svm_problem p = convertDataModel(mod);
		model = svm.svm_train(p, param);
		status = STATUS_TRAINED;
	}
	
	protected svm_parameter getParameter() { return param; }
	
	protected svm_problem convertDataModel(AbstractDataModel dm) {
    
		svm_problem problem = new svm_problem();
		int n = dm.getAttributeCount();

		problem.l = dm.getVectorCount();
		problem.x = new svm_node[problem.l][];
		problem.y = new double[problem.l];
		
		for (int i = 0; i < problem.l; ++i) {
			problem.x[i] = new svm_node[n-1];
			problem.y[i] = dm.getClass(i);
			
			double[] vec = dm.getVector(i);
			for (int j = 0, k = 0; j < n; ++j) {
				if (dm.getClassAttributeIndex() != j) {
					problem.x[i][k] = new svm_node();
					problem.x[i][k].index = j;
					problem.x[i][k].value = vec[j];
					++k;
				}
			}
		}
		return problem;
	}

	@Override
	public void classify(AbstractDataModel data) {
		// TODO Auto-generated method stub
		
	}

	@Override
	public AbstractTestSet test(TestingParameters params) {
		return new MultiLibSvmTestSet(this.getCleanDuplicate(), params);
	}
	
	private void ChessTest() {
		AbstractDataModel dm = ChessDataGenerator.generateProblem(512);
		//Environment.getEnvironment().getWorkspace().addDataModel(dm);
		svm_problem p = convertDataModel(dm);
		//for (int i = 0; i < 500; ++i)
		//	System.out.println(p.x[i][0].value + " " + p.x[i][1].value + " " + p.y[i]);
		param.probability = 0;
		svm_model m = svm.svm_train(p, param);
		
		int N = 512;
		boolean[][] mx = new boolean[N][];
		
		for (int i = 0; i < N; ++i) {
			mx[i] = new boolean[N];
			for (int j = 0; j < N; ++j) {
				svm_node[] node = new svm_node[2];
				node[0] = new svm_node();
				node[1] = new svm_node();
				node[0].index = 0;
				node[1].index = 1;
				node[0].value = i/(double)N;
				node[1].value = j/(double)N;
				double c = svm.svm_predict(m, node);
				mx[i][j] = c > 0;
			}
		}
		
		ChessTestFrame f = new ChessTestFrame("Chess test", mx);
		f.setVisible(true);
		Environment.getEnvironment().getMainFrame().addWindow(f);
	}
	
	private LibSvmClassifier getCleanDuplicate() {
		LibSvmClassifier c = new LibSvmClassifier();
		c.param = new svm_parameter();
		c.param.svm_type = param.svm_type;
		c.param.kernel_type = param.kernel_type;
		c.param.degree = param.degree;
		c.param.gamma = param.gamma;
		c.param.coef0 = param.coef0;
		c.param.nu = param.nu;
		c.param.cache_size = param.cache_size;
		c.param.C = param.C;
		c.param.eps = param.eps;
		c.param.p = param.p;
		c.param.shrinking = param.shrinking;
		c.param.probability = param.probability;
		c.param.nr_weight = param.nr_weight;
		c.param.weight_label = new int[0];
		c.param.weight = new double[0];
		
		return c;
	}

	public void configure() {
		ConfigureSvmLibDialog d = new ConfigureSvmLibDialog(Environment.getEnvironment().getMainFrame(), this);
		d.setVisible(true);		
	}

	public void executeAction(String action) {
		if (action.equals(AC_CHESS)) {
			ChessTest();			
		} else if (action.equals(AC_OPTIMIZE)) {
			optimizeParameters();
		}
	}

	public String[] getActions() {
		// TODO Auto-generated method stub
		if (param.kernel_type == svm_parameter.RBF && param.svm_type == svm_parameter.C_SVC)
			return new String[] {AC_CHESS, AC_OPTIMIZE};
		else
			return new String[] {AC_CHESS};
	}
	
	
	public svm_parameter getParams() {
		return param;
	}
	
	public void setParams(svm_parameter p) {
		param = p;
	}

	private String getKernelString() {
		switch(param.kernel_type) {
		case svm_parameter.LINEAR:
			return "linear";
		case svm_parameter.POLY:
			return "poly";
		case svm_parameter.RBF:
			return "RBF";
		case svm_parameter.SIGMOID:
			return "sigmoid";
		}
		
		return "unknown";		
	}
	
	private String getTypeString() {
		switch(param.svm_type) {
		case svm_parameter.C_SVC:
			return "C_SVC";
		case svm_parameter.NU_SVC:
			return "NU_SVC";
		}
		
		return "unknown";		
	}

	public String getQuickInfo() {
		return "LibSVM classifier" + Environment.newline +
		"SVM type: " + getTypeString() + Environment.newline +
		"Kernel type: " + getKernelString();
	}

	private class OptimizeThread extends Thread {
		public void run() {
			OutputFrame fr = new OutputFrame("Genetic optimization of " + name);
			fr.addInternalFrameListener(new InternalFrameAdapter() {

				@SuppressWarnings("deprecation")
				@Override
				public void internalFrameClosed(InternalFrameEvent arg0) {
					super.internalFrameClosed(arg0);
					OptimizeThread.this.stop();
					
				}
									
			});
			Environment.getEnvironment().getMainFrame().addWindow(fr);
			fr.setVisible(true);
			AbstractOptimizer ro = new GeneticOptimizer(fr);
			OptimizableParameter p[] = new OptimizableParameter[2];
			p[0] = new OptimizableParameter(OP_GAMMA, "Gamma", -15, 3, true);
			p[1] = new OptimizableParameter(OP_C, "C", -5, 15, true);
			ro.optimize(p, LibSvmClassifier.this);
		}		
	}
	
	private TestingParameters optimizationParams;
	private void optimizeParameters() {
		// Cia veiksmu seka:
		// 1) Useris pasirenka norima optimizeri ir jo parametrus
		// 2) Nusetinami optimizuojami parametrai (intervalai)
		// 3) Nusetinami testavimo parametrai (datamodelis ir testavimo tipas) <- reusint testform(?)
		// 4) Sukuriamas optimizavimo objektas (?)
		// 5) Sukurimas optimizavimo langas su ankstesniu objektu ir executinama 
		//    optimizacija. Lange rodomas progresas ir pan.
		//
		// Tik 2 punktas yra priklausomas nuo klasifikatoriaus, todel galbut reikia optimizavima 
		// atskirti nuo klasifikatoriaus. Tokiu atveju klasifikatorius gali tik grazinti galimu
		// optimizuoti parametru aibe (OptimizableParameter masyva) ir implementines IOptimizable
		// interfeisa - evaluatinti save su pateiktais parametrais.
		// Taigi tokiu atveju:
		// Is 1 punkto ateina AbstractOptimizer
		// Is 2 punkto ateina OptimizableParameter[] ir IOptimizable (klasifikatorius)
		// Is 3 punkto ateina TestingParameters
		// Tada algoritmas:
		
		TestClassifierForm f = new TestClassifierForm(Environment.getEnvironment().getMainFrame(), this);
		f.setNoTest();
		f.setVisible(true);
		optimizationParams = f.getParameters();
		if (optimizationParams == null) return;
		new OptimizeThread().start();
		//System.out.println(op[0]);
		//System.out.println(op[1]);
	}

	public double evaluate(OptimizableParameter[] params) {
		// decode parameters
		for (int i = 0; i < params.length; ++i) {
			switch (params[i].getId()) {
			case OP_C:
				param.C = params[i].getValue();
				break;
			case OP_COEF0:
				param.coef0 = params[i].getValue();
				break;
			case OP_DEGREE:
				param.degree = (int)params[i].getValue();
				break;
			case OP_GAMMA:
				param.gamma = params[i].getValue();
				break;
			case OP_NU:
				param.nu = params[i].getValue();
				break;
			}
		}
		
		AbstractTestSet ts = test(optimizationParams);
		ts.execute();
		ROCAnalysis ra = new ROCAnalysis(ts);
		
		return ra.getWStatistic();
		//return ts.getAccuracy();
	}
	
	public void train(AbstractDataModel mod, Component c) {
		svm_problem problem = convertDataModel(mod);
		model = svm.svm_train(problem, param);
		final JFileChooser fc = new JFileChooser(".");
		int returnVal = fc.showSaveDialog(c);
		if(returnVal == JFileChooser.APPROVE_OPTION && model != null){
			try {
				File f = fc.getSelectedFile();
				//System.out.println(f.getPath());
				svm.svm_save_model(f.getPath(), model);
				status = STATUS_TRAINED; 
			} catch (IOException e) {
				JOptionPane.showMessageDialog(c, "Model saving failed.", "Error", JOptionPane.ERROR_MESSAGE);
				//e.printStackTrace();
			}
		}
	}
	
}
