/* CS181 Neural Network code adapted for Plants final project */


#include "data-reader.h"
#include "neural-net.h"
#include <assert.h>
#include <cstdlib>
#include <cstdio>
#include <sstream>
#include <iostream>
#include <map>

using std::cout;
using std::endl;
using std::map;
using std::stringstream;

void InitInputs(NeuralNetwork* network) {
  for (int i = 0; i < 6 * 6; ++i) {
    Node* node = new Node();
    network->AddNode(node, NeuralNetwork::INPUT);
  }
}

void InitMultiHiddenLayerNetwork(int n, NeuralNetwork* network) {
  InitInputs(network);
  for (int i = 0; i < n; ++i) {
    Node* node = new Node();
    for (int j = 0; j < (int) network->inputs_.size(); ++j) {
      node->AddInput(network->inputs_[j], NULL, network);
    }
    network->AddNode(node, NeuralNetwork::HIDDEN);
  }
  /*
  int size = 2 - n + 1;
  for (int i = 0; i < 3 - size; ++i) {
    for (int j = 0; j < 3 - size; ++j) {
      Node* node = new Node();
      for (int k = 0; k < size; ++k) {
        for (int l = 0; l < size; ++l) {
          node->AddInput(network->inputs_[(i + k) * 2 + j + l], NULL, network);
        }
      }
      network->AddNode(node, NeuralNetwork::HIDDEN);
    }
  }*/
  for (int i = 0; i < 2; ++i) {
    Node* node = new Node();
    for (size_t j = 0; j < network->hidden_nodes_.size(); ++j) {
      node->AddInput(network->hidden_nodes_[j], NULL, network);
    }
    network->AddNode(node, NeuralNetwork::OUTPUT);
  }
}

/**
 * Simple network without any hidden layers.
 */
void InitSingleLayerNetwork(NeuralNetwork* network) {
  // Initialize the network
  // 6 * 6 inputs_
  InitInputs(network);
  for (int i = 0; i < 2; ++i) {
    Node* node = new Node();
    for (size_t j = 0; j < network->inputs_.size(); ++j) {
      node->AddInput(network->inputs_[j], NULL, network);
    }
    network->AddNode(node, NeuralNetwork::OUTPUT);
  }
}

/**
 * Simple network with hidden nodes, one for each row.
 */
void InitSimpleHiddenNetwork(NeuralNetwork* network) {
  InitInputs(network);
  for (int i = 0; i < 6; ++i) {
    Node* horiz_node = new Node();
    Node* vert_node = new Node();
    for (int j = 0; j < 6; ++j) {
      horiz_node->AddInput(network->inputs_[i * 6 + j], NULL, network);
      vert_node->AddInput(network->inputs_[j * 6 + i], NULL, network);
    }
    network->AddNode(horiz_node, NeuralNetwork::HIDDEN);
    network->AddNode(vert_node, NeuralNetwork::HIDDEN);
  }
  // Connect all hidden nodes to the outputs_.
  for (int i = 0; i < 2; ++i) {
    Node* node = new Node();
    for (size_t j = 0; j < network->hidden_nodes_.size(); ++j) {
      node->AddInput(network->hidden_nodes_[j], NULL, network);
    }
    network->AddNode(node, NeuralNetwork::OUTPUT);
  }
}

/**
 * Custom built for plant recognition
 */
void InitPlantNetwork(NeuralNetwork* network) {
  InitInputs(network);
  for (int i = 0; i < 6; ++i) {
    Node* horiz_node = new Node();
    //Node* vert_node = new Node();
    for (int j = 0; j < 6; ++j) {
      horiz_node->AddInput(network->inputs_[i * 6 + j], NULL, network);
      //vert_node->AddInput(network->inputs_[j * 6 + i], NULL, network);
    }
    network->AddNode(horiz_node, NeuralNetwork::HIDDEN);
    //network->AddNode(vert_node, NeuralNetwork::HIDDEN);
  }
  // Connect all hidden nodes to the outputs_.
  for (int i = 0; i < 2; ++i) {
    Node* node = new Node();
    for (size_t j = 0; j < network->hidden_nodes_.size(); ++j) {
      node->AddInput(network->hidden_nodes_[j], NULL, network);
    }
    network->AddNode(node, NeuralNetwork::OUTPUT);
  }
}


/////////////////////////////////////////////////////////

class PlantNetwork {
 public:
  PlantNetwork(const string& type) {
    if (type == "hidden") {
      printf("Using hidden network.\n");
      InitSimpleHiddenNetwork(&network_);
    } else if (type.find("multi-hidden-") == 0) {
      int nodes = atoi(type.substr(string("multi-hidden-").size()).c_str());
      InitMultiHiddenLayerNetwork(nodes, &network_);
      printf("Using single hidden layer with %d nodes\n", nodes);
    } else if (type.find("plant") == 0) {
			InitPlantNetwork(&network_);
    } else {
      printf("Using single layer network.\n");
      InitSingleLayerNetwork(&network_);
    }
  }

  Target EncodeLabel(int label) {
    Target target;
    for (int i = 0; i < 2; ++i) {
      if (label == i) {
        target.values.push_back(1);
      } else {
        target.values.push_back(0);
      }
    }
    
    return target;
  }

  int GetNetworkLabel() {
    // Indeterminate if there is more than one output that is a one.
    int label = 0;
    double best_score = 0.0;
    for (size_t i = 0; i < network_.outputs_.size(); ++i) {
      Node* node = network_.outputs_[i];
      if (i == 0 || node->transformed_value_ > best_score) {
        label = i;
        best_score = node->transformed_value_;
      }
    }
    
    //printf("LABEL: %d, score[0]=%f, score[1]=%f\n", label, network_.outputs_[0]->transformed_value_, network_.outputs_[1]->transformed_value_);
    return label;
  }

  Input Convert(Image* image) {
    Input input;
    for (size_t i = 0; i < image->pixels_.size(); ++i) {
      for (size_t j = 0; j < image->pixels_[i].size(); ++j) {
        //assert(image->pixels_[i][j] < 256);
        //input.values.push_back(image->pixels_[i][j] / 256.0);
        input.values.push_back(image->pixels_[i][j]);
      }
    }
    return input;
  }

  int Classify(Image* image) {
    Input input = Convert(image);
    network_.FeedForward(input);
    return GetNetworkLabel();
  }

  double Performance(const vector<Image*>& images) {
    int num_correct = 0;
    for (size_t i = 0; i < images.size(); ++i) {
      if (Classify(images[i]) == images[i]->label_) {
        num_correct++;
      }
    }
    return num_correct * 1.0 / images.size();
  }

  void Train(const vector<Image*>& images,
             const vector<Image*>& validation_images,
             double learning_rate, int epochs,
             int max_periods_without_improvement,
             double improvement_perc,
						 int printFlag) {
    vector<Input> inputs_;
    vector<Target> targets;
    for (size_t i = 0; i < images.size(); ++i) {
      inputs_.push_back(Convert(images[i]));
      targets.push_back(EncodeLabel(images[i]->label_));
    }
    int periods_without_improvement = 0;
    double best = 0.0;
    for (int i = 0; i < epochs; ++i) {
      network_.Train(inputs_, targets, learning_rate, 1, printFlag);
      double validation = Performance(validation_images);
      printf("Training Perf: %.3f Validation Perf: %.3f\n", Performance(images), validation);
      if (validation > best) {
        if (validation > (1 + improvement_perc) * best) {
          periods_without_improvement = 0;
        } else {
          periods_without_improvement++;
        }
        best = validation;
      } else {
        periods_without_improvement++;
      }
      //printf("best: %.3f periods_without_improvement: %d\n",
             //best, periods_without_improvement);
      if (max_periods_without_improvement != -1 &&
          periods_without_improvement > max_periods_without_improvement) {
        printf("Stopping because of lack of improvement.\n");
        break;
      }
      fflush(stdout);
    }
  }

  NeuralNetwork network_;
};

void FillArgsMap(char** argv, int argc, map<string, string>* args_map) {
  string curkey;
  for (int i = 0; i < argc - 1; ++i) {
    string arg = argv[i + 1];
    if (arg.find("-") == 0) {
      if (!curkey.empty()) {
        printf("Setting key: %s\n", curkey.c_str());
        (*args_map)[curkey] = "";
      }
      curkey = arg;
    } else {
      assert(!curkey.empty());
      printf("Setting key: %s, value: %s\n", curkey.c_str(), arg.c_str());
      (*args_map)[curkey] = arg;
      curkey = "";
    }
  }
}

int main(int argc, char** argv) {
  // Flags initialization.  If you want to add more flags, GetArgsMap will put them into the
  // returned map, but you need to handle the initialization.
  map<string, string> args_map;
  // Weights file
  args_map["-w"] = "";
  // Epochs
  args_map["-e"] = "10";
  // Learning rate
  args_map["-r"] = "0.0005";
  // Network type
  args_map["-t"] = "simple";
  // Random weights
  args_map["-random"] = "0";
  // Number of training examples
  args_map["-n"] = "-1";
  args_map["-max-periods"] = "-1";
  args_map["-perc"] = "0.01";
  args_map["-v"] = "0";
	args_map["-p"] = "0";
  FillArgsMap(argv, argc, &args_map);

  string weights_file;
  weights_file = args_map["-w"];
  double learning_rate = atof(args_map["-r"].c_str());
  int epochs = atoi(args_map["-e"].c_str());
  int random = atoi(args_map["-random"].c_str());
  int training = atoi(args_map["-n"].c_str());
  int max_periods = atoi(args_map["-max-periods"].c_str());
  double perc = atof(args_map["-perc"].c_str());
	int printFlag = atoi(args_map["-p"].c_str());
  printf("Running with parameters: (weights_file: %s, learning_rate: %f, epochs: %d, random: %d)\n",
         weights_file.c_str(), learning_rate, epochs, random);

  //vector<Image*> training_images = DataReader::GetImages("training-9k.txt", training);
  //vector<Image*> validation_images = DataReader::GetImages("validation-1k.txt", -1);
  //vector<Image*> test_images = DataReader::GetImages("test-1k.txt", -1);
  vector<Image*> training_images = DataReader::GetImages("plant-training.txt", training);
  vector<Image*> validation_images = DataReader::GetImages("plant-validation.txt", -1);
  vector<Image*> test_images = DataReader::GetImages("plant-test.txt", -1);
  
  
  PlantNetwork network(args_map["-t"]);
  if (!weights_file.empty()) {
    network.network_.InitFromWeights(DataReader::ReadWeights(weights_file));
  } else if (random) {
    srand(time(NULL));
    printf("Setting random initial weights for %d weights.\n", network.network_.weights_.size());
    // Set random weights
    for (size_t i = 0; i < network.network_.weights_.size(); ++i) {
      //network.network_.weights_[i]->value = drand48() * 0.00001;
      network.network_.weights_[i]->value = drand48() * 0.02 - 0.01;
    }
  }
  printf("inputs: %d, hidden: %d, outputs: %d\n", network.network_.inputs_.size(),
         network.network_.hidden_nodes_.size(),
         network.network_.outputs_.size());
  network.Train(training_images, validation_images, learning_rate, epochs, max_periods, perc, printFlag);
  printf("Test Performance: %f\n", network.Performance(test_images));

  stringstream ss;
  ss << "weights" << time(0);
  // Write out the weights so that we can restart.
  DataReader::DumpWeights(network.network_.GetWeights(), ss.str());
  ClearVector(training_images);
  ClearVector(test_images);
}
