#include "neural-net.h"
#include <cassert>
#include <math.h>
#include <cstdio>
#include <cstdlib>
#include <map>

using std::map;

Weight* NeuralNetwork::GetNewWeight() {
  Weight* weight = new Weight();
  weight->value = 0;
  weights_.push_back(weight);
  return weight;
}

void NeuralNetwork::InitFromWeights(const vector<double>& weights_) {
  assert(this->weights_.size() == weights_.size());
  for (size_t i = 0; i < weights_.size(); ++i) {
    this->weights_[i]->value = weights_[i];
  }
}

vector<double> NeuralNetwork::GetWeights() {
  vector<double> toreturn;
  for (size_t i = 0; i < weights_.size(); ++i) {
    toreturn.push_back(weights_[i]->value);
  }
  return toreturn;
}

double NeuralNetwork::SigmoidPrime(double value) {
  // d/dx (1 / (1 + exp(-x)))
  // = exp(-x) / (1 + exp(-x))^2
  if (value < 0) {
    // Need to prevent inf / inf
    // Rewrite as: exp(-x) / exp(-x)^2 (1 + 1 / exp(-x))^2 = 1 / (exp(-x) (1 + 1 /exp(-x))^2)
    return 1.0 / (exp(-value) * pow(1 + 1 / exp(-value), 2));
  }
  return exp(-value) / pow(1 + exp(-value), 2);
}

double NeuralNetwork::Sigmoid(double value) {
  return 1.0 / (1 + exp(-value));
}

NeuralNetwork::~NeuralNetwork() {
  ClearVector(inputs_);
  ClearVector(hidden_nodes_);
  ClearVector(outputs_);
  ClearVector(weights_);
}

void NeuralNetwork::MarkAsComplete() {
  // Before we mark ourselves as complete, we check that the inputs of all nodes occur before them
  // in the node arrays.
  set<Node*> nodes;
  for (size_t i = 0; i < inputs_.size(); ++i) {
    nodes.insert(inputs_[i]);
    // Inputs should not have any inputs of their own.
    if (inputs_[i]->inputs_.size() > 0) {
      printf("Found an input node that has inputs of its own!");
      assert(false);
    }
  }

  for (size_t i = 0; i < hidden_nodes_.size(); ++i) {
    Node* node = hidden_nodes_[i];
    for (size_t j = 0; j < node->inputs_.size(); ++j) {
      if (!Contains(nodes, node->inputs_[j])) {
        printf("Found node that occurs before one of its inputs!");
        assert(false);
      }
    }
    nodes.insert(node);
  }

  for (size_t i = 0; i < outputs_.size(); ++i) {
    Node* node = outputs_[i];
    for (size_t j = 0; j < node->inputs_.size(); ++j) {
      if (!Contains(nodes, node->inputs_[j])) {
        printf("Found node that occurs before one of its inputs!");
        assert(false);
      }
    }
    if (node->forward_neighbors_.size() > 0) {
      printf("Found output node that has forward neighbors!");
      assert(false);
    }
    nodes.insert(node);
  }
  complete_ = true;
}

void NeuralNetwork::CheckIncomplete() {
  if (complete_) {
    printf("Tried to modify the network when it has already been marked as complete.\n");
    assert(false);
  }
}

void NeuralNetwork::CheckComplete() {
  if (complete_) {
    return;
  } else {
    MarkAsComplete();
  }
}

void Node::AddInput(Node* input, Weight* weight, NeuralNetwork* network) {
  inputs_.push_back(input);
  if (weight == NULL) {
    weight = network->GetNewWeight();
  }
  weights_.push_back(weight);
  input->forward_neighbors_.push_back(this);
  input->forward_weights_.push_back(weight);
  // Only create a fixed_weight_ if we need to (if this is not an input node).
  if (fixed_weight_ == NULL) {
    fixed_weight_ = network->GetNewWeight();
  }
}

void NeuralNetwork::AddNode(Node* node, NodeType node_type) {
  CheckIncomplete();
  if (node_type == INPUT) {
    // Make sure that this node has no inputs_.
    if (node->inputs_.size() > 0) {
      printf("Input node cannot have inputs_.\n");
      assert(false);
    }
  }

  // We need to check that the node only Contains inputs_ that are already in the network.
  for (size_t i = 0; i < node->inputs_.size(); ++i) {
    if (!Contains(node_set_, node->inputs_[i])) {
      printf("Cannot add node whose inputs_ are not yet in the network.\n");
      assert(false);
    }
  }
  node_set_.insert(node);
  switch (node_type) {
    case INPUT:
      inputs_.push_back(node);
      break;
    case HIDDEN:
      hidden_nodes_.push_back(node);
      break;
    case OUTPUT:
      outputs_.push_back(node);
      break;
  }
}
