/**
 * File: TemplatedVocabulary.h
 * Date: February 2011
 * Author: Dorian Galvez-Lopez
 * Description: templated vocabulary 
 * License: see the LICENSE.txt file
 */

#ifndef __D_T_TEMPLATED_VOCABULARY__
#define __D_T_TEMPLATED_VOCABULARY__

#include <cassert>
#include <cstdlib>
#include <vector>
#include <numeric>
#include <fstream>
#include <string>
#include <algorithm>

#include <opencv2/core.hpp>

#include "DBoW2/FeatureVector.h"
#include "DBoW2/BowVector.h"
#include "DBoW2/ScoringObject.h"

#ifdef _MSC_VER
#define DLL_EXPORT __declspec(dllexport)
#else
#define DLL_EXPORT
#endif

namespace DBoW2 {

/**
 * Generic Vocabulary
 * @param TDescriptor class of descriptor
 * @param F class of descriptor functions
 */
template<class TDescriptor, class F>
class DLL_EXPORT TemplatedVocabulary {
public:
  /**
   * Initiates an empty vocabulary
   * @param k branching factor
   * @param L depth levels
   * @param weighting weighting type
   * @param scoring scoring type
   */
  TemplatedVocabulary(const int k = 10, const int L = 5,
                      const WeightingType weighting = TF_IDF, const ScoringType scoring = L1_NORM);

  /**
   * Creates the vocabulary by loading a file
   * @param filename
   */
  TemplatedVocabulary(const std::string& filename);

  /**
   * Creates the vocabulary by loading a file
   * @param filename
   */
  TemplatedVocabulary(const char* filename);

  /** 
   * Copy constructor
   * @param voc
   */
  TemplatedVocabulary(const TemplatedVocabulary<TDescriptor, F>& voc);

  /**
   * Destructor
   */
  virtual ~TemplatedVocabulary();

  /** 
   * Assigns the given vocabulary to this by copying its data and removing
   * all the data contained by this vocabulary before
   * @param voc
   * @return reference to this vocabulary
   */
  TemplatedVocabulary<TDescriptor, F>& operator=(const TemplatedVocabulary<TDescriptor, F>& voc);

  /** 
   * Creates a vocabulary from the training features with the already
   * defined parameters
   * @param training_features
   */
  virtual void create(const std::vector<std::vector<TDescriptor>>& training_features);

  /**
   * Creates a vocabulary from the training features, setting the branching
   * factor and the depth levels of the tree
   * @param training_features
   * @param k branching factor
   * @param L depth levels
   */
  virtual void create(const std::vector<std::vector<TDescriptor>>& training_features,
                      const int k, const int L);

  /**
   * Creates a vocabulary from the training features, setting the branching
   * factor nad the depth levels of the tree, and the weighting and scoring
   * schemes
   */
  virtual void create(const std::vector<std::vector<TDescriptor>>& training_features,
                      const int k, const int L,
                      const WeightingType weighting, const ScoringType scoring);

  /**
   * Returns the number of words in the vocabulary
   * @return number of words
   */
  virtual inline unsigned int size() const;

  /**
   * Returns whether the vocabulary is empty (i.e. it has not been trained)
   * @return true iff the vocabulary is empty
   */
  virtual inline bool empty() const;

  /**
   * Transforms a set of descriptores into a bow vector
   * @param features
   * @param v (out) bow vector of weighted words
   */
  virtual void transform(const std::vector<TDescriptor>& features, BowVector& v) const;

  /**
   * Transform a set of descriptors into a bow vector and a feature vector
   * @param features
   * @param v (out) bow vector
   * @param fv (out) feature vector of nodes and feature indexes
   * @param levelsup levels to go up the vocabulary tree to get the node index
   */
  virtual void transform(const std::vector<TDescriptor>& features, BowVector& v, FeatureVector& fv,
                         const int levelsup) const;

  /**
   * Transforms a single feature into a word (without weight)
   * @param feature
   * @return word id
   */
  virtual WordId transform(const TDescriptor& feature) const;

  /**
   * Returns the score of two vectors
   * @param a vector
   * @param b vector
   * @return score between vectors
   * @note the vectors must be already sorted and normalized if necessary
   */
  inline double score(const BowVector& a, const BowVector& b) const;

  /**
   * Returns the id of the node that is "levelsup" levels from the word given
   * @param wid word id
   * @param levelsup 0..L
   * @return node id. if levelsup is 0, returns the node id associated to the word id
   */
  virtual NodeId getParentNode(const WordId wid, int levelsup) const;

  /**
   * Returns the ids of all the words that are under the given node id,
   * by traversing any of the branches that goes down from the node
   * @param nid starting node id
   * @param words ids of words
   */
  void getWordsFromNode(const NodeId nid, std::vector<WordId>& words) const;

  /**
   * Returns the branching factor of the tree (k)
   * @return k
   */
  inline int getBranchingFactor() const { return m_k; }

  /** 
   * Returns the depth levels of the tree (L)
   * @return L
   */
  inline int getDepthLevels() const { return m_L; }

  /**
   * Returns the real depth levels of the tree on average
   * @return average of depth levels of leaves
   */
  float getEffectiveLevels() const;

  /**
   * Returns the descriptor of a word
   * @param wid word id
   * @return descriptor
   */
  virtual inline TDescriptor getWord(WordId wid) const;

  /**
   * Returns the weight of a word
   * @param wid word id
   * @return weight
   */
  virtual inline WordValue getWordWeight(WordId wid) const;

  /** 
   * Returns the weighting method
   * @return weighting method
   */
  inline WeightingType getWeightingType() const { return m_weighting; }

  /** 
   * Returns the scoring method
   * @return scoring method
   */
  inline ScoringType getScoringType() const { return m_scoring; }

  /**
   * Changes the weighting method
   * @param type new weighting type
   */
  inline void setWeightingType(WeightingType type);

  /**
   * Changes the scoring method
   * @param type new scoring type
   */
  void setScoringType(ScoringType type);

  /**
   * Loads the vocabulary from a text file
   * @param filename
   */
  void loadFromTextFile(const std::string& filename);

  /**
   * Saves the vocabulary into a text file
   * @param filename
   */
  void saveToTextFile(const std::string& filename) const;

  /**
   * Loads the vocabulary from a binary file
   * @param filename
   */
  void loadFromBinaryFile(const std::string& filename);

  /**
   * Saves the vocabulary into a binary file
   * @param filename
   */
  void saveToBinaryFile(const std::string& filename) const;

  /**
   * Saves the vocabulary into a file
   * @param filename
   */
  void save(const std::string& filename) const;

  /**
   * Loads the vocabulary from a file
   * @param filename
   */
  void load(const std::string& filename);

  /** 
   * Saves the vocabulary to a file storage structure
   * @param fn node in file storage
   */
  virtual void save(cv::FileStorage& fs,
                    const std::string& name = "vocabulary") const;

  /**
   * Loads the vocabulary from a file storage node
   * @param fn first node
   * @param subname name of the child node of fn where the tree is stored.
   *        If not given, the fn node is used instead
   */
  virtual void load(const cv::FileStorage& fs,
                    const std::string& name = "vocabulary");

  /** 
   * Stops those words whose weight is below minWeight
   * @details Words are stopped by setting their weight to 0. There are not returned
   *          later when transforming image features into vectors.
   *          Note that when using IDF or TF_IDF, the weight is the idf part, which
   *          is equivalent to -log(f), where f is the frequency of the word
   *          (f = Ni/N, Ni: number of training images where the word is present, 
   *          N: number of training images).
   * @note The old weight is forgotten, and subsequent calls to this 
   *       function with a lower minWeight have no effect.
   * @return number of words stopped now
   */
  virtual int stopWords(const double minWeight);

protected:
  //! Pointer to descriptor
  typedef const TDescriptor* pDescriptor;

  //! Tree node
  struct Node {
    //! Node id
    NodeId id;
    //! Weight if the node is a word
    WordValue weight;
    //! Children
    std::vector<NodeId> children;
    //! Parent node (undefined in case of root)
    NodeId parent;
    //! Node descriptor
    TDescriptor descriptor;

    //! Word id if the node is a word
    WordId word_id;

    /**
     * Empty constructor
     */
    Node()
        : id(0), weight(0), parent(0), word_id(0) {}

    /**
     * Constructor
     * @param _id node id
     */
    Node(const NodeId _id)
        : id(_id), weight(0), parent(0), word_id(0) {}

    /**
     * Returns whether the node is a leaf node
     * @return true iff the node is a leaf
     */
    inline bool isLeaf() const { return children.empty(); }
  };

protected:
  /**
   * Creates an instance of the scoring object accoring to m_scoring
   */
  void createScoringObject();

  /** 
   * Returns a set of pointers to descriptores
   * @param training_features all the features
   * @param features (out) pointers to the training features
   */
  void getFeatures(const std::vector<std::vector<TDescriptor>>& training_features,
                   std::vector<pDescriptor>& features) const;

  /**
   * Returns the word id associated to a feature
   * @param feature
   * @param id (out) word id
   * @param weight (out) word weight
   * @param nid (out) if given, id of the node "levelsup" levels up
   * @param levelsup
   */
  virtual void transform(const TDescriptor& feature,
                         WordId& id, WordValue& weight, NodeId* nid = nullptr, int levelsup = 0) const;

  /**
   * Returns the word id associated to a feature
   * @param feature
   * @param id (out) word id
   */
  virtual void transform(const TDescriptor& feature, WordId& id) const;

  /**
   * Creates a level in the tree, under the parent, by running kmeans with
   * a descriptor set, and recursively creates the subsequent levels too
   * @param parent_id id of parent node
   * @param descriptors descriptors to run the kmeans on
   * @param current_level current level in the tree
   */
  void HKmeansStep(const NodeId parent_id, const std::vector<pDescriptor>& descriptors,
                   const int current_level);

  /**
   * Creates k clusters from the given descriptors with some seeding algorithm.
   * @note In this class, kmeans++ is used, but this function should be
   *   overriden by inherited classes.
   */
  virtual void initiateClusters(const std::vector<pDescriptor>& descriptors,
                                std::vector<TDescriptor>& clusters) const;

  /**
   * Creates k clusters from the given descriptor sets by running the
   * initial step of kmeans++
   * @param descriptors 
   * @param clusters resulting clusters
   */
  void initiateClustersKMpp(const std::vector<pDescriptor>& descriptors,
                            std::vector<TDescriptor>& clusters) const;

  /**
   * Create the words of the vocabulary once the tree has been built
   */
  void createWords();

  /**
   * Sets the weights of the nodes of tree according to the given features.
   * Before calling this function, the nodes and the words must be already
   * created (by calling HKmeansStep and createWords)
   * @param features
   */
  void setNodeWeights(const std::vector<std::vector<TDescriptor>>& features);

  /**
   * Returns a random number in the range [min..max]
   * @param min
   * @param max
   * @return random T number in [min..max]
   */
  template<class T>
  static T RandomValue(const T min, const T max) {
    return ((T)rand() / (T)RAND_MAX) * (max - min) + min;
  }

  /**
   * Returns a random int in the range [min..max]
   * @param min
   * @param max
   * @return random int in [min..max]
   */
  static int RandomInt(const int min, const int max) {
    const int d = max - min + 1;
    return int(((double)rand() / ((double)RAND_MAX + 1.0)) * d) + min;
  }

protected:
  //! Branching factor
  int m_k;

  //! Depth levels
  int m_L;

  //! Weighting method
  WeightingType m_weighting;

  //! Scoring method
  ScoringType m_scoring;

  //! Object for computing scores
  GeneralScoring* m_scoring_object;

  //! Tree nodes
  std::vector<Node> m_nodes;

  //! Words of the vocabulary (tree leaves)
  //! this condition holds: m_words[wid]->word_id == wid
  std::vector<Node*> m_words;
};

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
TemplatedVocabulary<TDescriptor, F>::TemplatedVocabulary(const int k, const int L,
                                                         const WeightingType weighting, const ScoringType scoring)
    : m_k(k), m_L(L), m_weighting(weighting), m_scoring(scoring),
      m_scoring_object(nullptr) {
  createScoringObject();
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
TemplatedVocabulary<TDescriptor, F>::TemplatedVocabulary(const std::string& filename)
    : m_scoring_object(nullptr) {
  load(filename);
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
TemplatedVocabulary<TDescriptor, F>::TemplatedVocabulary(const char* filename)
    : m_scoring_object(nullptr) {
  load(filename);
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
void TemplatedVocabulary<TDescriptor, F>::createScoringObject() {
  delete m_scoring_object;
  m_scoring_object = nullptr;

  switch (m_scoring) {
    case L1_NORM:
      m_scoring_object = new L1Scoring;
      break;

    case L2_NORM:
      m_scoring_object = new L2Scoring;
      break;

    case CHI_SQUARE:
      m_scoring_object = new ChiSquareScoring;
      break;

    case KL:
      m_scoring_object = new KLScoring;
      break;

    case BHATTACHARYYA:
      m_scoring_object = new BhattacharyyaScoring;
      break;

    case DOT_PRODUCT:
      m_scoring_object = new DotProductScoring;
      break;
  }
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
void TemplatedVocabulary<TDescriptor, F>::setScoringType(ScoringType type) {
  m_scoring = type;
  createScoringObject();
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
void TemplatedVocabulary<TDescriptor, F>::setWeightingType(WeightingType type) {
  this->m_weighting = type;
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
TemplatedVocabulary<TDescriptor, F>::TemplatedVocabulary(
    const TemplatedVocabulary<TDescriptor, F>& voc)
    : m_scoring_object(nullptr) {
  *this = voc;
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
TemplatedVocabulary<TDescriptor, F>::~TemplatedVocabulary() {
  delete m_scoring_object;
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
TemplatedVocabulary<TDescriptor, F>&
TemplatedVocabulary<TDescriptor, F>::operator=(const TemplatedVocabulary<TDescriptor, F>& voc) {
  this->m_k = voc.m_k;
  this->m_L = voc.m_L;
  this->m_scoring = voc.m_scoring;
  this->m_weighting = voc.m_weighting;

  this->createScoringObject();

  this->m_nodes.clear();
  this->m_words.clear();

  this->m_nodes = voc.m_nodes;
  this->createWords();

  return *this;
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
void TemplatedVocabulary<TDescriptor, F>::create(
    const std::vector<std::vector<TDescriptor>>& training_features) {
  m_nodes.clear();
  m_words.clear();

  // expected_nodes = Sum_{i=0..L} ( k^i )
  int expected_nodes = (int)((pow((double)m_k, (double)m_L + 1) - 1) / (m_k - 1));

  m_nodes.reserve(expected_nodes); // avoid allocations when creating the tree

  std::vector<pDescriptor> features;
  getFeatures(training_features, features);

  // create root
  m_nodes.push_back(Node(0)); // root

  // create the tree
  HKmeansStep(0, features, 1);

  // create the words
  createWords();

  // and set the weight of each node of the tree
  setNodeWeights(training_features);
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
void TemplatedVocabulary<TDescriptor, F>::create(const std::vector<std::vector<TDescriptor>>& training_features,
                                                 const int k, const int L) {
  m_k = k;
  m_L = L;

  create(training_features);
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
void TemplatedVocabulary<TDescriptor, F>::create(const std::vector<std::vector<TDescriptor>>& training_features,
                                                 const int k, const int L,
                                                 const WeightingType weighting, const ScoringType scoring) {
  m_k = k;
  m_L = L;
  m_weighting = weighting;
  m_scoring = scoring;
  createScoringObject();

  create(training_features);
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
void TemplatedVocabulary<TDescriptor, F>::getFeatures(const std::vector<std::vector<TDescriptor>>& training_features,
                                                      std::vector<pDescriptor>& features) const {
  features.resize(0);

  typename std::vector<std::vector<TDescriptor>>::const_iterator vvit;
  typename std::vector<TDescriptor>::const_iterator vit;
  for (vvit = training_features.begin(); vvit != training_features.end(); ++vvit) {
    features.reserve(features.size() + vvit->size());
    for (vit = vvit->begin(); vit != vvit->end(); ++vit) {
      features.push_back(&(*vit));
    }
  }
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
void TemplatedVocabulary<TDescriptor, F>::HKmeansStep(const NodeId parent_id, const std::vector<pDescriptor>& descriptors,
                                                      const int current_level) {
  if (descriptors.empty())
    return;

  // features associated to each cluster
  std::vector<TDescriptor> clusters;
  std::vector<std::vector<unsigned int>> groups; // groups[i] = [j1, j2, ...]
  // j1, j2, ... indices of descriptors associated to cluster i

  clusters.reserve(m_k);
  groups.reserve(m_k);

  //const int msizes[] = { m_k, descriptors.size() };
  //cv::SparseMat assoc(2, msizes, CV_8U);
  //cv::SparseMat last_assoc(2, msizes, CV_8U);
  //! / assoc.row(cluster_idx).col(descriptor_idx) = 1 iif associated

  if ((int)descriptors.size() <= m_k) {
    // trivial case: one cluster per feature
    groups.resize(descriptors.size());

    for (unsigned int i = 0; i < descriptors.size(); i++) {
      groups[i].push_back(i);
      clusters.push_back(*descriptors[i]);
    }
  }
  else {
    // select clusters and groups with kmeans

    bool first_time = true;
    bool goon = true;

    // to check if clusters move after iterations
    std::vector<int> last_association, current_association;

    while (goon) {
      // 1. Calculate clusters

      if (first_time) {
        // random sample
        initiateClusters(descriptors, clusters);
      }
      else {
        // calculate cluster centres

        for (unsigned int c = 0; c < clusters.size(); ++c) {
          std::vector<pDescriptor> cluster_descriptors;
          cluster_descriptors.reserve(groups[c].size());

          /*
          for(unsigned int d = 0; d < descriptors.size(); ++d)
          {
            if( assoc.find<unsigned char>(c, d) )
            {
              cluster_descriptors.push_back(descriptors[d]);
            }
          }
          */

          std::vector<unsigned int>::const_iterator vit;
          for (vit = groups[c].begin(); vit != groups[c].end(); ++vit) {
            cluster_descriptors.push_back(descriptors[*vit]);
          }

          F::meanValue(cluster_descriptors, clusters[c]);
        }

      } // if(!first_time)

      // 2. Associate features with clusters

      // calculate distances to cluster centers
      groups.clear();
      groups.resize(clusters.size(), std::vector<unsigned int>());
      current_association.resize(descriptors.size());

      //assoc.clear();

      typename std::vector<pDescriptor>::const_iterator fit;
      //unsigned int d = 0;
      for (fit = descriptors.begin(); fit != descriptors.end(); ++fit) //, ++d)
      {
        double best_dist = F::distance(*(*fit), clusters[0]);
        unsigned int icluster = 0;

        for (unsigned int c = 1; c < clusters.size(); ++c) {
          double dist = F::distance(*(*fit), clusters[c]);
          if (dist < best_dist) {
            best_dist = dist;
            icluster = c;
          }
        }

        //assoc.ref<unsigned char>(icluster, d) = 1;

        groups[icluster].push_back(fit - descriptors.begin());
        current_association[fit - descriptors.begin()] = icluster;
      }

      // kmeans++ ensures all the clusters has any feature associated with them

      // 3. check convergence
      if (first_time) {
        first_time = false;
      }
      else {
        //goon = !eqUChar(last_assoc, assoc);

        goon = false;
        for (unsigned int i = 0; i < current_association.size(); i++) {
          if (current_association[i] != last_association[i]) {
            goon = true;
            break;
          }
        }
      }

      if (goon) {
        // copy last feature-cluster association
        last_association = current_association;
        //last_assoc = assoc.clone();
      }

    } // while(goon)

  } // if must run kmeans

  // create nodes
  for (unsigned int i = 0; i < clusters.size(); ++i) {
    NodeId id = m_nodes.size();
    m_nodes.push_back(Node(id));
    m_nodes.back().descriptor = clusters[i];
    m_nodes.back().parent = parent_id;
    m_nodes[parent_id].children.push_back(id);
  }

  // go on with the next level
  if (current_level < m_L) {
    // iterate again with the resulting clusters
    const std::vector<NodeId>& children_ids = m_nodes[parent_id].children;
    for (unsigned int i = 0; i < clusters.size(); ++i) {
      NodeId id = children_ids[i];

      std::vector<pDescriptor> child_features;
      child_features.reserve(groups[i].size());

      std::vector<unsigned int>::const_iterator vit;
      for (vit = groups[i].begin(); vit != groups[i].end(); ++vit) {
        child_features.push_back(descriptors[*vit]);
      }

      if (child_features.size() > 1) {
        HKmeansStep(id, child_features, current_level + 1);
      }
    }
  }
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
void TemplatedVocabulary<TDescriptor, F>::initiateClusters(const std::vector<pDescriptor>& descriptors,
                                                           std::vector<TDescriptor>& clusters) const {
  initiateClustersKMpp(descriptors, clusters);
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
void TemplatedVocabulary<TDescriptor, F>::initiateClustersKMpp(
    const std::vector<pDescriptor>& pfeatures,
    std::vector<TDescriptor>& clusters) const {
  // Implements kmeans++ seeding algorithm
  // Algorithm:
  // 1. Choose one center uniformly at random from among the data points.
  // 2. For each data point x, compute D(x), the distance between x and the nearest
  //    center that has already been chosen.
  // 3. Add one new data point as a center. Each point x is chosen with probability
  //    proportional to D(x)^2.
  // 4. Repeat Steps 2 and 3 until k centers have been chosen.
  // 5. Now that the initial centers have been chosen, proceed using standard k-means
  //    clustering.

  clusters.resize(0);
  clusters.reserve(m_k);
  std::vector<double> min_dists(pfeatures.size(), std::numeric_limits<double>::max());

  // 1.

  int ifeature = RandomInt(0, pfeatures.size() - 1);

  // create first cluster
  clusters.push_back(*pfeatures[ifeature]);

  // compute the initial distances
  typename std::vector<pDescriptor>::const_iterator fit;
  std::vector<double>::iterator dit;
  dit = min_dists.begin();
  for (fit = pfeatures.begin(); fit != pfeatures.end(); ++fit, ++dit) {
    *dit = F::distance(*(*fit), clusters.back());
  }

  while ((int)clusters.size() < m_k) {
    // 2.
    dit = min_dists.begin();
    for (fit = pfeatures.begin(); fit != pfeatures.end(); ++fit, ++dit) {
      if (*dit > 0) {
        double dist = F::distance(*(*fit), clusters.back());
        if (dist < *dit)
          *dit = dist;
      }
    }

    // 3.
    double dist_sum = std::accumulate(min_dists.begin(), min_dists.end(), 0.0);

    if (dist_sum > 0) {
      double cut_d;
      do {
        cut_d = RandomValue<double>(0, dist_sum);
      } while (cut_d == 0.0);

      double d_up_now = 0;
      for (dit = min_dists.begin(); dit != min_dists.end(); ++dit) {
        d_up_now += *dit;
        if (d_up_now >= cut_d)
          break;
      }

      if (dit == min_dists.end())
        ifeature = pfeatures.size() - 1;
      else
        ifeature = dit - min_dists.begin();

      clusters.push_back(*pfeatures[ifeature]);

    } // if dist_sum > 0
    else
      break;

  } // while(used_clusters < m_k)
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
void TemplatedVocabulary<TDescriptor, F>::createWords() {
  m_words.resize(0);

  if (!m_nodes.empty()) {
    m_words.reserve((int)pow((double)m_k, (double)m_L));

    typename std::vector<Node>::iterator nit;

    nit = m_nodes.begin(); // ignore root
    for (++nit; nit != m_nodes.end(); ++nit) {
      if (nit->isLeaf()) {
        nit->word_id = m_words.size();
        m_words.push_back(&(*nit));
      }
    }
  }
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
void TemplatedVocabulary<TDescriptor, F>::setNodeWeights(const std::vector<std::vector<TDescriptor>>& training_features) {
  const unsigned int NWords = m_words.size();
  const unsigned int NDocs = training_features.size();

  if (m_weighting == TF || m_weighting == BINARY) {
    // idf part must be 1 always
    for (unsigned int i = 0; i < NWords; i++)
      m_words[i]->weight = 1;
  }
  else if (m_weighting == IDF || m_weighting == TF_IDF) {
    // IDF and TF-IDF: we calculte the idf path now

    // Note: this actually calculates the idf part of the tf-idf score.
    // The complete tf-idf score is calculated in ::transform

    std::vector<unsigned int> Ni(NWords, 0);
    std::vector<bool> counted(NWords, false);

    typename std::vector<std::vector<TDescriptor>>::const_iterator mit;
    typename std::vector<TDescriptor>::const_iterator fit;

    for (mit = training_features.begin(); mit != training_features.end(); ++mit) {
      fill(counted.begin(), counted.end(), false);

      for (fit = mit->begin(); fit < mit->end(); ++fit) {
        WordId word_id;
        transform(*fit, word_id);

        if (!counted[word_id]) {
          Ni[word_id]++;
          counted[word_id] = true;
        }
      }
    }

    // set ln(N/Ni)
    for (unsigned int i = 0; i < NWords; i++) {
      if (Ni[i] > 0) {
        m_words[i]->weight = log((double)NDocs / (double)Ni[i]);
      } // else // This cannot occur if using kmeans++
    }
  }
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
inline unsigned int TemplatedVocabulary<TDescriptor, F>::size() const {
  return m_words.size();
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
inline bool TemplatedVocabulary<TDescriptor, F>::empty() const {
  return m_words.empty();
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
float TemplatedVocabulary<TDescriptor, F>::getEffectiveLevels() const {
  long sum = 0;
  typename std::vector<Node*>::const_iterator wit;
  for (wit = m_words.begin(); wit != m_words.end(); ++wit) {
    const Node* p = *wit;

    for (; p->id != 0; sum++)
      p = &m_nodes[p->parent];
  }

  return (float)((double)sum / (double)m_words.size());
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
TDescriptor TemplatedVocabulary<TDescriptor, F>::getWord(WordId wid) const {
  return m_words[wid]->descriptor;
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
WordValue TemplatedVocabulary<TDescriptor, F>::getWordWeight(WordId wid) const {
  return m_words[wid]->weight;
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
WordId TemplatedVocabulary<TDescriptor, F>::transform(const TDescriptor& feature) const {
  if (empty()) {
    return 0;
  }

  WordId wid;
  transform(feature, wid);
  return wid;
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
void TemplatedVocabulary<TDescriptor, F>::transform(const std::vector<TDescriptor>& features, BowVector& v) const {
  v.clear();

  if (empty()) {
    return;
  }

  // normalize
  LNorm norm;
  bool must = m_scoring_object->mustNormalize(norm);

  typename std::vector<TDescriptor>::const_iterator fit;

  if (m_weighting == TF || m_weighting == TF_IDF) {
    for (fit = features.begin(); fit < features.end(); ++fit) {
      WordId id;
      WordValue w;
      // w is the idf value if TF_IDF, 1 if TF

      transform(*fit, id, w);

      // not stopped
      if (w > 0)
        v.addWeight(id, w);
    }

    if (!v.empty() && !must) {
      // unnecessary when normalizing
      const double nd = v.size();
      for (BowVector::iterator vit = v.begin(); vit != v.end(); vit++)
        vit->second /= nd;
    }
  }
  else // IDF || BINARY
  {
    for (fit = features.begin(); fit < features.end(); ++fit) {
      WordId id;
      WordValue w;
      // w is idf if IDF, or 1 if BINARY

      transform(*fit, id, w);

      // not stopped
      if (w > 0)
        v.addIfNotExist(id, w);

    } // if add_features
  }   // if m_weighting == ...

  if (must)
    v.normalize(norm);
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
void TemplatedVocabulary<TDescriptor, F>::transform(const std::vector<TDescriptor>& features,
                                                    BowVector& v, FeatureVector& fv, const int levelsup) const {
  v.clear();
  fv.clear();

  if (empty()) // safe for subclasses
  {
    return;
  }

  // normalize
  LNorm norm;
  bool must = m_scoring_object->mustNormalize(norm);

  typename std::vector<TDescriptor>::const_iterator fit;

  if (m_weighting == TF || m_weighting == TF_IDF) {
    unsigned int i_feature = 0;
    for (fit = features.begin(); fit < features.end(); ++fit, ++i_feature) {
      WordId id;
      NodeId nid;
      WordValue w;
      // w is the idf value if TF_IDF, 1 if TF

      transform(*fit, id, w, &nid, levelsup);

      if (w > 0) // not stopped
      {
        v.addWeight(id, w);
        fv.addFeature(nid, i_feature);
      }
    }

    if (!v.empty() && !must) {
      // unnecessary when normalizing
      const double nd = v.size();
      for (BowVector::iterator vit = v.begin(); vit != v.end(); vit++)
        vit->second /= nd;
    }
  }
  else // IDF || BINARY
  {
    unsigned int i_feature = 0;
    for (fit = features.begin(); fit < features.end(); ++fit, ++i_feature) {
      WordId id;
      NodeId nid;
      WordValue w;
      // w is idf if IDF, or 1 if BINARY

      transform(*fit, id, w, &nid, levelsup);

      if (w > 0) // not stopped
      {
        v.addIfNotExist(id, w);
        fv.addFeature(nid, i_feature);
      }
    }
  } // if m_weighting == ...

  if (must)
    v.normalize(norm);
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
inline double TemplatedVocabulary<TDescriptor, F>::score(const BowVector& v1, const BowVector& v2) const {
  return m_scoring_object->score(v1, v2);
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
void TemplatedVocabulary<TDescriptor, F>::transform(const TDescriptor& feature, WordId& id) const {
  WordValue weight;
  transform(feature, id, weight);
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
void TemplatedVocabulary<TDescriptor, F>::transform(const TDescriptor& feature, WordId& word_id, WordValue& weight,
                                                    NodeId* nid, const int levelsup) const {
  // propagate the feature down the tree
  std::vector<NodeId> nodes;
  typename std::vector<NodeId>::const_iterator nit;

  // level at which the node must be stored in nid, if given
  const int nid_level = m_L - levelsup;
  if (nid_level <= 0 && nid != nullptr)
    *nid = 0; // root

  NodeId final_id = 0; // root
  int current_level = 0;

  do {
    ++current_level;
    nodes = m_nodes[final_id].children;
    final_id = nodes[0];

    double best_d = F::distance(feature, m_nodes[final_id].descriptor);

    for (nit = nodes.begin() + 1; nit != nodes.end(); ++nit) {
      NodeId id = *nit;
      double d = F::distance(feature, m_nodes[id].descriptor);
      if (d < best_d) {
        best_d = d;
        final_id = id;
      }
    }

    if (nid != nullptr && current_level == nid_level)
      *nid = final_id;

  } while (!m_nodes[final_id].isLeaf());

  // turn node id into word id
  word_id = m_nodes[final_id].word_id;
  weight = m_nodes[final_id].weight;
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
NodeId TemplatedVocabulary<TDescriptor, F>::getParentNode(const WordId wid, int levelsup) const {
  NodeId ret = m_words[wid]->id;   // node id
  while (levelsup > 0 && ret != 0) // ret == 0 --> root
  {
    --levelsup;
    ret = m_nodes[ret].parent;
  }
  return ret;
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
void TemplatedVocabulary<TDescriptor, F>::getWordsFromNode(const NodeId nid, std::vector<WordId>& words) const {
  words.clear();

  if (m_nodes[nid].isLeaf()) {
    words.push_back(m_nodes[nid].word_id);
  }
  else {
    words.reserve(m_k); // ^1, ^2, ...

    std::vector<NodeId> parents;
    parents.push_back(nid);

    while (!parents.empty()) {
      NodeId parentid = parents.back();
      parents.pop_back();

      const std::vector<NodeId>& child_ids = m_nodes[parentid].children;
      std::vector<NodeId>::const_iterator cit;

      for (cit = child_ids.begin(); cit != child_ids.end(); ++cit) {
        const Node& child_node = m_nodes[*cit];

        if (child_node.isLeaf())
          words.push_back(child_node.word_id);
        else
          parents.push_back(*cit);

      } // for each child
    }   // while !parents.empty
  }
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
int TemplatedVocabulary<TDescriptor, F>::stopWords(const double minWeight) {
  int c = 0;
  typename std::vector<Node*>::iterator wit;
  for (wit = m_words.begin(); wit != m_words.end(); ++wit) {
    if ((*wit)->weight < minWeight) {
      ++c;
      (*wit)->weight = 0;
    }
  }
  return c;
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
void TemplatedVocabulary<TDescriptor, F>::loadFromTextFile(const std::string& filename) {
  std::ifstream ifs;
  ifs.open(filename.c_str());

  if (!ifs) {
    throw std::string("Could not open file: ") + filename;
  }

  m_words.clear();
  m_nodes.clear();

  std::string s;
  getline(ifs, s);
  std::stringstream ss;
  ss << s;
  ss >> m_k;
  ss >> m_L;
  int n1, n2;
  ss >> n1;
  ss >> n2;

  if (m_k < 0 || 20 < m_k || m_L < 1 || 10 < m_L || n1 < 0 || 5 < n1 || n2 < 0 || 3 < n2) {
    throw std::string("Vocabulary loading failed");
  }

  m_scoring = static_cast<ScoringType>(n1);
  m_weighting = static_cast<WeightingType>(n2);
  createScoringObject();

  const auto expected_nodes = static_cast<int>((std::pow(static_cast<double>(m_k), static_cast<double>(m_L) + 1.0) - 1) / (m_k - 1));
  m_nodes.reserve(expected_nodes);

  m_words.reserve(std::pow(static_cast<double>(m_k), static_cast<double>(m_L) + 1.0));

  m_nodes.resize(1);
  m_nodes.at(0).id = 0;

  while (!ifs.eof()) {
    std::string s_node;
    getline(ifs, s_node);
    if (s_node == "") {
      continue;
    }
    std::stringstream ss_node;
    ss_node << s_node;

    const int n_id = m_nodes.size();
    m_nodes.resize(m_nodes.size() + 1);
    m_nodes.at(n_id).id = n_id;

    int p_id;
    ss_node >> p_id;
    m_nodes.at(n_id).parent = p_id;
    m_nodes.at(p_id).children.push_back(n_id);

    int is_leaf;
    ss_node >> is_leaf;

    std::stringstream ss_desc;
    for (int i = 0; i < F::L; ++i) {
      std::string s_desc;
      ss_node >> s_desc;
      ss_desc << s_desc << " ";
    }
    F::fromString(m_nodes.at(n_id).descriptor, ss_desc.str());

    ss_node >> m_nodes.at(n_id).weight;

    if (static_cast<bool>(is_leaf)) {
      const int w_id = m_words.size();
      m_words.resize(w_id + 1);

      m_nodes.at(n_id).word_id = w_id;
      m_words.at(w_id) = &m_nodes.at(n_id);
    }
    else {
      m_nodes.at(n_id).children.reserve(m_k);
    }
  }
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
void TemplatedVocabulary<TDescriptor, F>::saveToTextFile(const std::string& filename) const {
  std::ofstream ofs;
  ofs.open(filename.c_str(), std::ios_base::out);

  if (!ofs) {
    throw std::string("Could not open file: ") + filename;
  }

  ofs << m_k << " " << m_L << " "
      << " " << m_scoring << " " << m_weighting << std::endl;

  for (size_t i = 1; i < m_nodes.size(); ++i) {
    const Node& node = m_nodes.at(i);

    ofs << node.parent << " ";

    if (node.isLeaf()) {
      ofs << 1 << " ";
    }
    else {
      ofs << 0 << " ";
    }

    ofs << F::toString(node.descriptor) << " " << static_cast<double>(node.weight) << std::endl;
  }

  ofs.close();
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
void TemplatedVocabulary<TDescriptor, F>::loadFromBinaryFile(const std::string& filename) {
  std::ifstream ifs;
  ifs.open(filename.c_str(), std::ios_base::in | std::ios::binary);

  if (!ifs) {
    throw std::string("Could not open file: ") + filename;
  }

  unsigned int n_nodes, node_size;
  ifs.read((char*)&n_nodes, sizeof(n_nodes));
  ifs.read((char*)&node_size, sizeof(node_size));
  ifs.read((char*)&m_k, sizeof(m_k));
  ifs.read((char*)&m_L, sizeof(m_L));
  ifs.read((char*)&m_scoring, sizeof(m_scoring));
  ifs.read((char*)&m_weighting, sizeof(m_weighting));
  createScoringObject();

  m_words.clear();
  m_words.reserve(std::pow(static_cast<double>(m_k), static_cast<double>(m_L) + 1.0));

  m_nodes.clear();
  m_nodes.resize(n_nodes);
  m_nodes.at(0).id = 0;

  char* buf = new char[node_size];
  unsigned int n_id = 1;

  while (!ifs.eof()) {
    ifs.read(buf, node_size);
    m_nodes.at(n_id).id = n_id;
    const int* ptr = (int*)buf;

    m_nodes.at(n_id).parent = *ptr;
    m_nodes.at(m_nodes.at(n_id).parent).children.push_back(n_id);
    m_nodes.at(n_id).descriptor = cv::Mat(1, F::L, CV_8U);

    memcpy(m_nodes.at(n_id).descriptor.data, buf + 4, F::L);
    m_nodes.at(n_id).weight = *reinterpret_cast<float*>(buf + 4 + F::L);

    if (buf[8 + F::L]) {
      const int w_id = m_words.size();
      m_words.resize(w_id + 1);
      m_nodes.at(n_id).word_id = w_id;
      m_words.at(w_id) = &m_nodes.at(n_id);
    }
    else {
      m_nodes.at(n_id).children.reserve(m_k);
    }

    ++n_id;
    if (n_id == n_nodes) {
      break;
    }
  }

  ifs.close();

  delete[] buf;
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
void TemplatedVocabulary<TDescriptor, F>::saveToBinaryFile(const std::string& filename) const {
  std::ofstream ofs;
  ofs.open(filename.c_str(), std::ios_base::out | std::ios::binary);

  if (!ofs) {
    throw std::string("Could not open file: ") + filename;
  }

  const unsigned int n_nodes = m_nodes.size();
  const unsigned int node_size = sizeof(m_nodes.at(0).parent) + F::L * sizeof(char) + sizeof(float) + sizeof(bool);

  ofs.write((char*)&n_nodes, sizeof(n_nodes));
  ofs.write((char*)&node_size, sizeof(node_size));
  ofs.write((char*)&m_k, sizeof(m_k));
  ofs.write((char*)&m_L, sizeof(m_L));
  ofs.write((char*)&m_scoring, sizeof(m_scoring));
  ofs.write((char*)&m_weighting, sizeof(m_weighting));

  for (size_t i = 1; i < n_nodes; ++i) {
    const Node& node = m_nodes.at(i);

    ofs.write((char*)&node.parent, sizeof(node.parent));
    ofs.write((char*)node.descriptor.data, F::L);

    const float weight = node.weight;
    ofs.write((char*)&weight, sizeof(weight));

    const bool is_leaf = node.isLeaf();
    ofs.write((char*)&is_leaf, sizeof(is_leaf));
  }

  ofs.close();
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
void TemplatedVocabulary<TDescriptor, F>::save(const std::string& filename) const {
  cv::FileStorage fs(filename.c_str(), cv::FileStorage::WRITE);
  if (!fs.isOpened())
    throw std::string("Could not open file ") + filename;

  save(fs);
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
void TemplatedVocabulary<TDescriptor, F>::load(const std::string& filename) {
  cv::FileStorage fs(filename.c_str(), cv::FileStorage::READ);
  if (!fs.isOpened())
    throw std::string("Could not open file ") + filename;

  this->load(fs);
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
void TemplatedVocabulary<TDescriptor, F>::save(cv::FileStorage& f,
                                               const std::string& name) const {
  // Format YAML:
  // vocabulary
  // {
  //   k:
  //   L:
  //   scoringType:
  //   weightingType:
  //   nodes
  //   [
  //     {
  //       nodeId:
  //       parentId:
  //       weight:
  //       descriptor:
  //     }
  //   ]
  //   words
  //   [
  //     {
  //       wordId:
  //       nodeId:
  //     }
  //   ]
  // }
  //
  // The root node (index 0) is not included in the node vector
  //

  f << name << "{";

  f << "k" << m_k;
  f << "L" << m_L;
  f << "scoringType" << m_scoring;
  f << "weightingType" << m_weighting;

  // tree
  f << "nodes"
    << "[";
  std::vector<NodeId> parents, children;
  std::vector<NodeId>::const_iterator pit;

  parents.push_back(0); // root

  while (!parents.empty()) {
    NodeId pid = parents.back();
    parents.pop_back();

    const Node& parent = m_nodes[pid];
    children = parent.children;

    for (pit = children.begin(); pit != children.end(); pit++) {
      const Node& child = m_nodes[*pit];

      // save node data
      f << "{:";
      f << "nodeId" << (int)child.id;
      f << "parentId" << (int)pid;
      f << "weight" << (double)child.weight;
      f << "descriptor" << F::toString(child.descriptor);
      f << "}";

      // add to parent list
      if (!child.isLeaf()) {
        parents.push_back(*pit);
      }
    }
  }

  f << "]"; // nodes

  // words
  f << "words"
    << "[";

  typename std::vector<Node*>::const_iterator wit;
  for (wit = m_words.begin(); wit != m_words.end(); wit++) {
    WordId id = wit - m_words.begin();
    f << "{:";
    f << "wordId" << (int)id;
    f << "nodeId" << (int)(*wit)->id;
    f << "}";
  }

  f << "]"; // words

  f << "}";
}

// --------------------------------------------------------------------------

template<class TDescriptor, class F>
void TemplatedVocabulary<TDescriptor, F>::load(const cv::FileStorage& fs,
                                               const std::string& name) {
  m_words.clear();
  m_nodes.clear();

  cv::FileNode fvoc = fs[name];

  m_k = (int)fvoc["k"];
  m_L = (int)fvoc["L"];
  m_scoring = (ScoringType)((int)fvoc["scoringType"]);
  m_weighting = (WeightingType)((int)fvoc["weightingType"]);

  createScoringObject();

  // nodes
  cv::FileNode fn = fvoc["nodes"];

  m_nodes.resize(fn.size() + 1); // +1 to include root
  m_nodes[0].id = 0;

  for (unsigned int i = 0; i < fn.size(); ++i) {
    NodeId nid = (int)fn[i]["nodeId"];
    NodeId pid = (int)fn[i]["parentId"];
    WordValue weight = (WordValue)fn[i]["weight"];
    std::string d = (std::string)fn[i]["descriptor"];

    m_nodes[nid].id = nid;
    m_nodes[nid].parent = pid;
    m_nodes[nid].weight = weight;
    m_nodes[pid].children.push_back(nid);

    F::fromString(m_nodes[nid].descriptor, d);
  }

  // words
  fn = fvoc["words"];

  m_words.resize(fn.size());

  for (unsigned int i = 0; i < fn.size(); ++i) {
    NodeId wid = (int)fn[i]["wordId"];
    NodeId nid = (int)fn[i]["nodeId"];

    m_nodes[nid].word_id = wid;
    m_words[wid] = &m_nodes[nid];
  }
}

// --------------------------------------------------------------------------

/**
 * Writes printable information of the vocabulary
 * @param os stream to write to
 * @param voc
 */
template<class TDescriptor, class F>
std::ostream& operator<<(std::ostream& os,
                         const TemplatedVocabulary<TDescriptor, F>& voc) {
  os << "Vocabulary: k = " << voc.getBranchingFactor()
     << ", L = " << voc.getDepthLevels()
     << ", Weighting = ";

  switch (voc.getWeightingType()) {
    case TF_IDF:
      os << "tf-idf";
      break;
    case TF:
      os << "tf";
      break;
    case IDF:
      os << "idf";
      break;
    case BINARY:
      os << "binary";
      break;
  }

  os << ", Scoring = ";
  switch (voc.getScoringType()) {
    case L1_NORM:
      os << "L1-norm";
      break;
    case L2_NORM:
      os << "L2-norm";
      break;
    case CHI_SQUARE:
      os << "Chi square distance";
      break;
    case KL:
      os << "KL-divergence";
      break;
    case BHATTACHARYYA:
      os << "Bhattacharyya coefficient";
      break;
    case DOT_PRODUCT:
      os << "Dot product";
      break;
  }

  os << ", Number of words = " << voc.size();

  return os;
}

} // namespace DBoW2

#endif
