#include <fann.h>
#include <assert.h>
#include <memory.h>

#define MIMO_FANN

#undef ENABLE_RTRL			// To enable RTRL training (NOTE: DEBUGGING)

#undef PRINT_WEIGHTS_DURING_TRAINING

#define MAX_MSE		0.001	// MSE threshold to stop training
#define MAX_ITERS	100000	// Max iterations to stop training
#define LEARNING_RATE   0.2	// Learning rate during training

#define NUM_CHARS   95
#define NUM_INPUT   NUM_CHARS
#define NUM_OUTPUT  NUM_CHARS
#define NUM_HIDDEN  200

/* Run network so that we can test for correct
     MSE computation and run computation */
void TestRun(struct fann *ann)
{
	struct fann_layer  *layer  = NULL;
	struct fann_neuron *neuron = NULL;
	unsigned int i = 0;
	fann_type *desired_output = NULL;

	assert(ann != NULL);
	assert(ann->first_layer != NULL);

	layer = ann->first_layer;
	neuron = layer->first_neuron;

	// Create desired outputs of 0
	desired_output = calloc(ann->num_output, sizeof(fann_type));

	// Note that network outputs are the
	//   last 'ann->num_outputs' neurons!
	printf("\nNETWORK OUTPUTS (pre-run, last neuron only):\n");
	for (i=0; i<ann->num_output; i++)
	{
		printf("%f ", ann->output[i]);
	}
	printf("\n");

	// Compute MSE initially
	fann_reset_MSE(ann);
	fann_compute_MSE_fully_recurrent(ann, desired_output);
	printf("MSE: %f (counted %d)\n", fann_get_MSE(ann), ann->training_params->num_MSE);

#ifdef PRINT_WEIGHTS_DURING_TRAINING
	// Display weights
	for (neuron=ann->first_layer->first_neuron; neuron != ann->first_layer->last_neuron; neuron++)
	{
		printf("\nNeuron %d:\n", neuron - ann->first_layer->first_neuron);

		for (i=0; i<neuron->num_weights; i++)
		{
			// Inputs come from net inputs and neurons
			//   +1 for bias input neuron
			if (i < ann->num_input + 1)
			{
				printf("Weight %d: %f; Input %d: %f\n", 
					i, neuron->weights[i], i, layer->inputs[i]);
			}
			else
			{
				printf("Weight %d: %f; Neuron %d: %f\n",
					i, neuron->weights[i], i - ann->num_input - 1, 
					layer->outputs[i - ann->num_input - 1]);
			}
		}
	}
#endif // PRINT_WEIGHTS_DURING_TRAINING

	// Run network
	layer->run(ann, layer);
	
	// Display afterward outputs
	printf("\nNETWORK OUTPUTS (post-run, last neuron only):\n");
	for (i=0; i<ann->num_output; i++)
	{
		printf("%f ", ann->output[i]);
	}
	printf("\n");

	// Display final MSE
	fann_reset_MSE(ann);
	fann_compute_MSE_fully_recurrent(ann, desired_output);
	printf("MSE: %f (counted %d)\n", fann_get_MSE(ann), ann->training_params->num_MSE);
}

struct fann_train_data* read_train_data(const char* filename, unsigned int skip, unsigned int count) {
	unsigned int i      = 0;
	struct fann_train_data* data = NULL;

	FILE *fp_train = fopen(filename, "r");
	if(!fp_train) {
		fprintf(stderr, "error: cannot open training file %s\n", filename);
		exit(1);
	}
	unsigned int num_training_samples = 0, input_sz = 0, output_sz = 0;
	if(fscanf(fp_train, "%d %d %d", &num_training_samples, &input_sz, &output_sz) == EOF) {
		fprintf(stderr, "error: premature end of file\n");
		exit(1);
	}
	num_training_samples -= skip;
	if(num_training_samples > count) num_training_samples = count;
	data = fann_create_train(num_training_samples, input_sz, output_sz);
	int v[95];
	unsigned int line = 0;
	while(fscanf(fp_train, "%d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d %d", &v[0], &v[1], &v[2], &v[3], &v[4], &v[5], &v[6], &v[7], &v[8], &v[9], &v[10], &v[11], &v[12], &v[13], &v[14], &v[15], &v[16], &v[17], &v[18], &v[19], &v[20], &v[21], &v[22], &v[23], &v[24], &v[25], &v[26], &v[27], &v[28], &v[29], &v[30], &v[31], &v[32], &v[33], &v[34], &v[35], &v[36], &v[37], &v[38], &v[39], &v[40], &v[41], &v[42], &v[43], &v[44], &v[45], &v[46], &v[47], &v[48], &v[49], &v[50], &v[51], &v[52], &v[53], &v[54], &v[55], &v[56], &v[57], &v[58], &v[59], &v[60], &v[61], &v[62], &v[63], &v[64], &v[65], &v[66], &v[67], &v[68], &v[69], &v[70], &v[71], &v[72], &v[73], &v[74], &v[75], &v[76], &v[77], &v[78], &v[79], &v[80], &v[81], &v[82], &v[83], &v[84], &v[85], &v[86], &v[87], &v[88], &v[89], &v[90], &v[91], &v[92], &v[93], &v[94]) != EOF) {
		if(skip > 0) {skip--; continue;}
		if(!num_training_samples--) break;
		for(i = 0; i < 95; i++) {
			if(line & 1) data->output[line >> 1][i] = 2.0 * v[i] - 1.0;
			else data->input[line >> 1][i] = 2.0 * v[i] - 1.0;
		}
	}
	fclose(fp_train);
	return data;
}

int main()
{
	struct fann_train_data* data = NULL;
	struct fann *ann    = NULL;
	fann_type *calc_out = NULL;
	fann_type *output   = NULL;
	unsigned int i      = 0;

	/* read train file: */
	data = read_train_data("../../../recurrent_text_gen.dat", 0, 10000);

	/* Create the recurrent net and train it */
	ann = fann_create_fully_recurrent(NUM_HIDDEN, NUM_INPUT, NUM_OUTPUT);

	// Run the network for training
	TestRun(ann);

	// Run RTRL training if enabled
#ifdef ENABLE_RTRL
	printf("Training RTRL ...\n");
	fann_train_rtrl(ann, data, MAX_MSE, MAX_ITERS, LEARNING_RATE);

	// XOR Test after RTRL
	for(i = 0; i < fann_length_train_data(data); i++)
	{
		fann_reset_MSE(ann);
		calc_out = fann_test(ann, data->input[i], data->output[i]);
#ifdef FIXEDFANN
		printf("XOR test (%d, %d) -> %d, should be %d, difference=%f\n",
			   data->input[i][0], data->input[i][1], calc_out[0], data->output[i][0],
			   (float) fann_abs(calc_out[0] - data->output[i][0]) / fann_get_multiplier(ann));

		if((float) fann_abs(calc_out[0] - data->output[i][0]) / fann_get_multiplier(ann) > 0.2)
		{
			printf("Test failed\n");
			ret = -1;
		}
#else
		printf("XOR test (%f, %f) -> %f, should be %f, difference=%f\n",
			   data->input[i][0], data->input[i][1], calc_out[0], data->output[i][0],
			   (float) fann_abs(calc_out[0] - data->output[i][0]));
    }
#endif
#endif


	// Display final statistics
    /*fann_print_connections(ann);
    fann_print_parameters(ann);
	fann_destroy(ann); */

	return 0;
}

/*
 * vim: ts=2 smarttab smartindent shiftwidth=2 nowrap noet
 */
