/*
 * File:   TestRprop.cpp
 * Author: vvs
 *
 * Created on Aug 8, 2012, 3:42:02 PM
 */

#include "TestRprop.h"
#include "../../Learning/MultilayerPerceptron.h"
#include "../../Learning/SingleSampleOptimizationSupervisor.h"
#include "../../Learning/ArrayOptimizationSupervisor.h"
#include "../../Learning/IRpropPlus.h"
#include "../common/TestUtils.h"

#include <memory>
#include "ReClaM/MSEFFNet.h"
#include "ReClaM/createConnectionMatrix.h"
#include "ReClaM/Rprop.h"
#include "ReClaM/Perceptron.h"
#include "ReClaM/Dataset.h"
#include <vector>
#include <cmath>

CPPUNIT_TEST_SUITE_REGISTRATION(TestRprop);

using namespace std;

TestRprop::TestRprop() {
}

TestRprop::~TestRprop() {
}

void TestRprop::setUp() {
}

void TestRprop::tearDown() {
}

cl_device_id getCPUDevice() {
	vector<cl_device_id> devices;
	enumOpenCLDevices(devices, CL_DEVICE_TYPE_CPU);
	return devices[1];
}

cl_device_id getGPUDevice() {
    vector<cl_device_id> devices;
    enumOpenCLDevices(devices, CL_DEVICE_TYPE_GPU);
    return devices[0];
}

int compareFloatDouble(float f, double d, double eps) {
	double base =  max((double)abs(f), abs(d));
	if (abs(d - f) < min(1.0, base) * eps) return 0;
	if (f < d) return -1;
	return 1;
}

int compareFloatAndDoubleArrays(float *f, double *d, double eps, int count) {
	for (int i = 0; i < count; i++) {
		int c = compareFloatDouble(f[i], d[i], eps);
		if (c != 0) return c;
	}
	return 0;
}

void copyVectorTo(vector<float> &v, Array<double> &array) {
	int size = v.size();
	if (array.dim(0) != size) { 
		array.resize(size, false);
	}
	
	for (int i = 0; i < size; i++) {
		array(i) = v[i];
	}
}

void fillWithRandomData(vector<float> &f, Array<double> &d, 
		float minValue = -1, float maxValue = 1) 
{
	int count  = f.size();
	d.resize(count, false);
	for (int i = 0; i < count; i++) {
		d(i) = f[i] = getRandom(minValue, maxValue);
	}
}

void fillWithRandomData(learn::ArrayOptimizationSupervisor &outProvider, 
		Array<double> &inputs, Array<double> &outputs, int sampleCount) 
{
	int inputCount = outProvider.getInputLength();
	int outputCount = outProvider.getTargetLength();
	vector<float> floatInputs(inputCount);
	vector<float> floatOutputs(outputCount);
	
	inputs.resize(sampleCount, inputCount, false);
	outputs.resize(sampleCount, outputCount, false);
	
	for (int i = 0; i < sampleCount; i++) {
		
		for (int j = 0; j < inputCount; j++) {			
			inputs(i, j) = floatInputs[j] = getRandom(-1, 1);
		}
		
		for (int j = 0; j < outputCount; j++) {
			outputs(i, j) = floatOutputs[j] = getRandom(-1, 1);
		}
		
		outProvider.addSample(&floatInputs[0], &floatOutputs[0]);
	}
}

void testErrorDerivative(learn::MultilayerPerceptron &net, learn::OptimizationSupervisor &supervisor,
	MSEFFNet &sharkNet, Array<double> &inputs, Array<double> &targets,
	double epsilon) 
{
	int numParams = sharkNet.getParameterDimension();
	vector<float> netdEdW = vector<float>(numParams);
	Array<double> sharkdEdW = Array<double>(numParams);
	
	// test error derivative
	float netMSE = net.getErrorDerivative(net, supervisor, &netdEdW[0]);
	double sharkMSE = sharkNet.errorDerivative(sharkNet, inputs, targets, sharkdEdW);
	
	if (supervisor.getSampleCount() == 1) {
		CPPUNIT_ASSERT(compareFloatDouble(netMSE, sharkMSE, epsilon) == 0);
	}
	CPPUNIT_ASSERT(compareFloatAndDoubleArrays(&netdEdW[0], &sharkdEdW(0), epsilon, numParams) == 0);
}

void testRpropOptimizing(
	learn::MultilayerPerceptron &net, learn::Optimizer &optimizer, learn::OptimizationSupervisor &learnSupervisor,
	MSEFFNet &sharkNet, IRpropPlus &sharkOptimizer, Array<double> &inputs, Array<double> &targets,
	double epsilon) 
{
	int numParams = sharkNet.getParameterDimension();
	vector<float> params = vector<float>(numParams);
			
	// test iRprop+ teacher		
	int stepCount = 5;
	for (int i = 0; i < stepCount; i++) {
		sharkOptimizer.optimize(sharkNet, sharkNet, inputs, targets);
		optimizer.optimize(net, net, learnSupervisor);

		// check parameters to be identical
		net.getParameters(&params[0], 0, numParams);
		for (int j = 0; j < numParams; j++) {
			CPPUNIT_ASSERT(compareFloatDouble(params[j], sharkNet.getParameter(j), epsilon) == 0);
		}		
	}
}

void TestRprop::testMultilayerPerceptronRun() {
    cl_command_queue queue = createQueue(getCPUDevice(), CL_QUEUE_PROFILING_ENABLE | CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE);
    
	double eps = 1E-5;
	int inputCount = 5;
	int outputCount = 3;
	int hiddenCount = 5;
	
	// create nets
	Array<int> connections;
	createConnectionMatrix(connections, inputCount, hiddenCount, outputCount, true, false, false, true);
	MSEFFNet sharkNet = MSEBFFNet(inputCount, outputCount, connections);
	int numParams = sharkNet.getParameterDimension();
		
	learn::MultilayerPerceptron::Builder builder;
	auto_ptr<learn::MultilayerPerceptron> net = auto_ptr<learn::MultilayerPerceptron>(builder
			.setInputCount(inputCount)
			.addHiddenLayer(hiddenCount)
			.addHiddenLayer(outputCount)
			.create(queue));
		
	CPPUNIT_ASSERT(numParams == net->getParameterCount());
	
	// randomize parameters
	vector<float> params(numParams);
	for (int i = 0; i < numParams; i++) {
		sharkNet.setParameter(i, params[i] = getRandom(-0.1f, 0.1f));
	}
	net->setParameters(&params[0], 0, numParams);
		
	// run
	vector<float> inputs(inputCount);
	vector<float> outputs(outputCount);
	Array<double> sharkInput(inputCount);
	Array<double> sharkOutput(outputCount);
	
	fillRandom(&inputs[0], inputCount);
	copyVectorTo(inputs, sharkInput);
		
	net->run(&inputs[0], &outputs[0]);	
	sharkNet.model(sharkInput, sharkOutput);
	
	// compare run results
	CPPUNIT_ASSERT(compareFloatAndDoubleArrays(&outputs[0], &sharkOutput(0), eps, outputCount) == 0);
}

void TestRprop::testMultilayerPerceptronErrorDerivative() {
    cl_command_queue queue = createQueue(getCPUDevice(), CL_QUEUE_PROFILING_ENABLE | CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE);
    
	double eps = 1E-5;
	int inputCount = 5;
	int outputCount = 3;
	int hiddenCount = 5;
	
	// create nets
	Array<int> connections;
	createConnectionMatrix(connections, inputCount, hiddenCount, outputCount, true, false, false, true);
	MSEFFNet sharkNet = MSEBFFNet(inputCount, outputCount, connections);
	int numParams = sharkNet.getParameterDimension();
		
	learn::MultilayerPerceptron::Builder builder;
	auto_ptr<learn::MultilayerPerceptron> net = auto_ptr<learn::MultilayerPerceptron>(builder
			.setInputCount(inputCount)
			.addHiddenLayer(hiddenCount)
			.addHiddenLayer(outputCount)
			.create(queue));
		
	// randomize parameters
	vector<float> params(numParams);
	for (int i = 0; i < numParams; i++) {
		sharkNet.setParameter(i, params[i] = getRandom(-0.1f, 0.1f));
	}
	net->setParameters(&params[0], 0, numParams);
						
	// single sample	
	Array<double> sharkSingleInputs = Array<double>(inputCount);
	Array<double> sharkSingleTargets = Array<double>(outputCount);
	vector<float> singleInputs(inputCount);
	vector<float> singleTargets(outputCount);
	
	fillWithRandomData(singleInputs, sharkSingleInputs);
	fillWithRandomData(singleTargets, sharkSingleTargets);
	
	learn::SingleSampleOptimizationSupervisor data(
		&singleInputs[0], inputCount,
		&singleTargets[0], outputCount);
	
	testErrorDerivative(*net, data,
			sharkNet, sharkSingleInputs, sharkSingleTargets,
			eps);
	
	// multiple samples
	int dataSize = 50;
	Array<double> sharkInputs = Array<double>(0, 0);
	Array<double> sharkTargets = Array<double>(0, 0);	
	learn::ArrayOptimizationSupervisor trainData(inputCount, outputCount);			
	
	fillWithRandomData(trainData, sharkInputs, sharkTargets, dataSize);	
	
	testErrorDerivative(*net, trainData,
			sharkNet, sharkInputs, sharkTargets,
			eps);
}

void TestRprop::testRpropOptimizer() {
    cl_command_queue queue = createQueue(getCPUDevice(), CL_QUEUE_PROFILING_ENABLE | CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE);
	
	double eps = 1E-5;
	int inputCount = 5;
	int outputCount = 3;
	int hiddenCount = 5;
	
	// create nets
	Array<int> connections;
	createConnectionMatrix(connections, inputCount, hiddenCount, outputCount, true, false, false, true);
	MSEFFNet sharkNet = MSEBFFNet(inputCount, outputCount, connections);
	int numParams = sharkNet.getParameterDimension();
		
	learn::MultilayerPerceptron::Builder builder;
	auto_ptr<learn::MultilayerPerceptron> net = auto_ptr<learn::MultilayerPerceptron>(builder
			.setInputCount(inputCount)
			.addHiddenLayer(hiddenCount)
			.addHiddenLayer(outputCount)
			.create(queue));
		
	// randomize parameters
	vector<float> params(numParams);
	for (int i = 0; i < numParams; i++) {
		sharkNet.setParameter(i, params[i] = getRandom(-0.1f, 0.1f));
	}
	net->setParameters(&params[0], 0, numParams);
	
	// test learning
	int dataSize = 50;
	learn::ArrayOptimizationSupervisor trainData(inputCount, outputCount);
	Array<double> sharkInputs = Array<double>(0, 0);
	Array<double> sharkTargets = Array<double>(0, 0);		
	fillWithRandomData(trainData, sharkInputs, sharkTargets, dataSize);	
	
	learn::IRpropPlus myOptimizer = learn::IRpropPlus(queue, numParams);
	IRpropPlus sharkOptimizer;
	sharkOptimizer.init(sharkNet);
	
	testRpropOptimizing(*net, myOptimizer, trainData,
			sharkNet, sharkOptimizer, sharkInputs, sharkTargets,
			eps);
}

void TestRprop::benchmarkMyiRprop() {	
	cl_command_queue queue = createQueue(getCPUDevice(), CL_QUEUE_PROFILING_ENABLE | CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE);
	
	int inputCount = 50;	
	int hiddenCount = 50;
	int outputCount = 4;
	
	int sampleCount = 500;
	int numIterations = 50;
	
	learn::MultilayerPerceptron::Builder builder;
	auto_ptr<learn::MultilayerPerceptron> net = auto_ptr<learn::MultilayerPerceptron>(builder
			.setInputCount(inputCount)
			.addHiddenLayer(hiddenCount)
			.addHiddenLayer(outputCount)
			.create(queue));
	
	srand(123456);
	
	// randomize weights	
	size_t parameterCount = net->getParameterCount();
	vector<float> parameters(parameterCount);
	fillRandom(&parameters[0], parameterCount);
	net->setParameters(&parameters[0], 0, parameters.size());
			
	// fill supervisor's data
	learn::ArrayOptimizationSupervisor supervisor(inputCount, outputCount);	
	vector<float> inputs(inputCount);
	vector<float> outputs(outputCount);
	for (int i = 0; i < sampleCount; i++) {
		fillRandom(&inputs[0], inputCount);
		fillRandom(&outputs[0], outputCount);
		supervisor.addSample(&inputs[0], &outputs[0]);
	}
	
	// benchmark
	learn::IRpropPlus optimizer(queue, parameterCount);	
	
	{
		Measure m("benchmark iRprop+");	
		for (int i = 0; i < numIterations; i++) {
			optimizer.optimize(*net, *net, supervisor);
		}
	}
}

void TestRprop::benchmarkSharkiRprop() {
	int inputCount = 50;	
	int hiddenCount = 50;
	int outputCount = 4;
	
	int sampleCount = 500;
	int numIterations = 50;
	
	Array<int> connections;
	createConnectionMatrix(connections, inputCount, hiddenCount, outputCount, true, false, false, true);
	MSEFFNet sharkNet = MSEBFFNet(inputCount, outputCount, connections);
	int numParams = sharkNet.getParameterDimension();
	
	srand(123456);
	
	// randomize weights	
	for (int i = 0; i < numParams; i++) {
		sharkNet.setParameter(i, getRandom());
	}
	
	// fill supervisor's data
	Array<double> inputs = Array<double>(sampleCount, inputCount);
	Array<double> targets = Array<double>(sampleCount, outputCount);
		
	for (int i = 0; i < sampleCount; i++) {		
		for (int j = 0; j < inputCount; j++) {			
			inputs(i, j) = getRandom(-1, 1);
		}
		
		for (int j = 0; j < outputCount; j++) {
			targets(i, j) = getRandom(-1, 1);
		}		
	}
	
	IRpropPlus sharkOptimizer;
	sharkOptimizer.init(sharkNet);
	
	{
		Measure m("benchmark shark iRprop+");	
		for (int i = 0; i < numIterations; i++) {
			sharkOptimizer.optimize(sharkNet, sharkNet, inputs, targets);		
		}
	}
}
