/*
   \file  kmeans_tests.cc
   \brief Unit tests for libgist's multithreaded k-means implementation.
*/

/*
   This file is part of libgist.

   libgist is free software; you can redistribute it and/or modify it
   under the terms of the GNU General Public License as published by the
   Free Software Foundation; either version 2 of the License, or (at your
   option) any later version.

   libgist is distributed in the hope that it will be useful, but WITHOUT
   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
   FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
   for more details.

   You should have received a copy of the GNU General Public License
   along with libgist; if not, write to the Free Software Foundation,
   Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
*/

/*
   REVISION HISTORY

   $HeadURL: http://libgist.googlecode.com/svn/branches/lazebnik_dev/test/kmeans_tests.cc $
   $Id: kmeans_tests.cc 99 2011-11-13 12:42:32Z libgist@gmail.com $
*/

//------------------------------ HEADERS --------------------------------

// libgist
#include "kmeans.hh"

// Boost.Test
#define BOOST_TEST_DYN_LINK
#define BOOST_TEST_MODULE kmeans_tests
#include <boost/test/unit_test.hpp>

// Boost.uBLAS
#include <boost/numeric/ublas/matrix.hpp>
#include <boost/numeric/ublas/vector.hpp>

// Standard C++
#include <algorithm>
#include <vector>
#include <utility>

// Standard C
#include <math.h>

// Unix
#include <unistd.h>

//------------------------------- TYPES ---------------------------------

namespace {

// libgist's k-means expects its input data to be supplied via a
// Boost.uBLAS matrix. The columns of this matrix are the individual data
// points to be clustered. The number of rows corresponds to the data
// dimensionality.
typedef boost::numeric::ublas::matrix<float> matrix ;

// We use this type to be able to get at the column vectors making up the
// input matrix.
typedef boost::numeric::ublas::vector<float> vector ;

// Clustering results are returned via the following type. The number of
// elements in the vector is equal to the number of columns in the input
// matrix. The i-th element of the vector indicates the cluster index for
// the i-th data point and will be a number in the range [0,k] where k is
// the number of clusters.
typedef std::vector<int> cluster_indices ;

// In addition to the cluster indices, libgist's k-means implementation
// also returns the cluster centroids via a matrix. The number of columns
// in this matrix equal the number of clusters (i.e., k) and the number
// of rows corresponds to the data dimensionality.
//
// Both these outputs, i.e., the cluster indices and centroids, are
// returned in an STL pair.
typedef std::pair<cluster_indices, matrix> kmeans_output ;

} // end of local anonymous namespace encapsulating above types

//----------------- K-MEANS CENTROIDS INITIALIZATION --------------------

// One of the crucial steps for k-means to produce good results is to
// supply a good initial set of cluster centers. By default, libgist's
// k-means implementation uses k random data points as the initial
// centers.
//
// However, libgist allows clients to specify their own centroids
// initialization policies. To gauge the correctness of libgist's k-means
// implementation, we need to initialize the centroids with some known
// data points rather than random ones.
//
// The following function object implements a centroids initialization
// policy that takes the first k data points as the initial cluster
// centers for k-means to work with.
namespace {

struct init_centroids_first_k {
   matrix operator()(int k, const matrix& data) ;
} ;

matrix init_centroids_first_k::operator()(int k, const matrix& data)
{
   matrix centroids(data.size1(), k) ;
   for (int i = 0; i < k; ++i)
   {
      using boost::numeric::ublas::column ;
      vector v = column(data, i) ;
      std::copy(v.begin(), v.end(), column(centroids, i).begin()) ;
   }
   return centroids ;
}

} // end of local anonymous namespace encapsulating above policy

//------------------------ K-MEANS INPUT DATA ---------------------------

// We use 3D data to test libgist's k-means implementation. Each octant
// supplies one cluster. Thus, there are a total of eight clusters. Each
// cluster consists of 27 points arranged in a cube like so:
//
//     - three points each on the bottom and top edges of the front face
//       corresponding to the left and right corners of the front face
//       and their corresponding midpoints (6 points so far)
//
//     - the two midpoints of the left and right edges of the front face
//       (6 + 2 = 8 points so far)
//
//     - the centroid of the front face (8 + 1 = 9 points so far)
//
//     - the same nine points on the rear face of the cube
//       (9 + 9 = 18 points so far)
//
//     - the same arrangement of nine points in the imaginary plane
//       bisecting the front and rear faces for a total of 18 + 9 = 27
//       points in the cluster
//
// Thus, the centers of the cubes are the cluster centroids. After
// clustering, we expect to recover these points as the centroids and
// the exact cluster assignments thereby proving libgist's k-means
// implementation correct.
namespace {

// Some parameters
const int K =  8 ; // number of clusters
const int D =  3 ; // number of data dimensions
const int S = 27 ; // number of data points per cluster
const int N = K*S; // total number of data points

// The coordinates of the data points. They are chosen so that each
// cluster is sufficiently far away from every other cluster so that they
// can be unambiguously identified.
const float g_data_points[] = {
    // The first K points are the cluster centers
    10,  10,  10,
    50,  50, -50,
   -30,  30, -30,
   -50,  50,  50,
    50, -50,  50,
    30, -30, -30,
   -50, -50, -50,
   -10, -10,  10,

    // Cluster #1:
     5,   5,  15,
    10,   5,  15,
    15,   5,  15,
     5,  10,  15,
    10,  10,  15,
    15,  10,  15,
     5,  15,  15,
    10,  15,  15,
    15,  15,  15,
     5,   5,  10,
    10,   5,  10,
    15,   5,  10,
     5,  10,  10,
    15,  10,  10,
     5,  15,  10,
    10,  15,  10,
    15,  15,  10,
     5,   5,   5,
    10,   5,   5,
    15,   5,   5,
     5,  10,   5,
    10,  10,   5,
    15,  10,   5,
     5,  15,   5,
    10,  15,   5,
    15,  15,   5,

    // Cluster #2:
    45,  45, -45,
    50,  45, -45,
    55,  45, -45,
    45,  50, -45,
    50,  50, -45,
    55,  50, -45,
    45,  55, -45,
    50,  55, -45,
    55,  55, -45,
    45,  45, -50,
    50,  45, -50,
    55,  45, -50,
    45,  50, -50,
    55,  50, -50,
    45,  55, -50,
    50,  55, -50,
    55,  55, -50,
    45,  45, -55,
    50,  45, -55,
    55,  45, -55,
    45,  50, -55,
    50,  50, -55,
    55,  50, -55,
    45,  55, -55,
    50,  55, -55,
    55,  55, -55,

    // Cluster #3:
   -35,  25, -25,
   -30,  25, -25,
   -25,  25, -25,
   -35,  30, -25,
   -30,  30, -25,
   -25,  30, -25,
   -35,  35, -25,
   -30,  35, -25,
   -25,  35, -25,
   -35,  25, -30,
   -30,  25, -30,
   -25,  25, -30,
   -35,  30, -30,
   -25,  30, -30,
   -35,  35, -30,
   -30,  35, -30,
   -25,  35, -30,
   -35,  25, -35,
   -30,  25, -35,
   -25,  25, -35,
   -35,  30, -35,
   -30,  30, -35,
   -25,  30, -35,
   -35,  35, -35,
   -30,  35, -35,
   -25,  35, -35,

    // Cluster #4:
   -55,  45,  55,
   -50,  45,  55,
   -45,  45,  55,
   -55,  50,  55,
   -50,  50,  55,
   -45,  50,  55,
   -55,  55,  55,
   -50,  55,  55,
   -45,  55,  55,
   -55,  45,  50,
   -50,  45,  50,
   -45,  45,  50,
   -55,  50,  50,
   -45,  50,  50,
   -55,  55,  50,
   -50,  55,  50,
   -45,  55,  50,
   -55,  45,  45,
   -50,  45,  45,
   -45,  45,  45,
   -55,  50,  45,
   -50,  50,  45,
   -45,  50,  45,
   -55,  55,  45,
   -50,  55,  45,
   -45,  55,  45,

    // Cluster #5:
    45, -55,  55,
    50, -55,  55,
    55, -55,  55,
    45, -50,  55,
    50, -50,  55,
    55, -50,  55,
    45, -45,  55,
    50, -45,  55,
    55, -45,  55,
    45, -55,  50,
    50, -55,  50,
    55, -55,  50,
    45, -50,  50,
    55, -50,  50,
    45, -45,  50,
    50, -45,  50,
    55, -45,  50,
    45, -55,  45,
    50, -55,  45,
    55, -55,  45,
    45, -50,  45,
    50, -50,  45,
    55, -50,  45,
    45, -45,  45,
    50, -45,  45,
    55, -45,  45,

    // Cluster #6:
    25, -35, -25,
    30, -35, -25,
    35, -35, -25,
    25, -30, -25,
    30, -30, -25,
    35, -30, -25,
    25, -25, -25,
    30, -25, -25,
    35, -25, -25,
    25, -35, -30,
    30, -35, -30,
    35, -35, -30,
    25, -30, -30,
    35, -30, -30,
    25, -25, -30,
    30, -25, -30,
    35, -25, -30,
    25, -35, -35,
    30, -35, -35,
    35, -35, -35,
    25, -30, -35,
    30, -30, -35,
    35, -30, -35,
    25, -25, -35,
    30, -25, -35,
    35, -25, -35,

    // Cluster #7:
   -55, -55, -45,
   -50, -55, -45,
   -45, -55, -45,
   -55, -50, -45,
   -50, -50, -45,
   -45, -50, -45,
   -55, -45, -45,
   -50, -45, -45,
   -45, -45, -45,
   -55, -55, -50,
   -50, -55, -50,
   -45, -55, -50,
   -55, -50, -50,
   -45, -50, -50,
   -55, -45, -50,
   -50, -45, -50,
   -45, -45, -50,
   -55, -55, -55,
   -50, -55, -55,
   -45, -55, -55,
   -55, -50, -55,
   -50, -50, -55,
   -45, -50, -55,
   -55, -45, -55,
   -50, -45, -55,
   -45, -45, -55,

    // Cluster #8:
   -15, -15,  15,
   -10, -15,  15,
   - 5, -15,  15,
   -15, -10,  15,
   -10, -10,  15,
   - 5, -10,  15,
   -15, - 5,  15,
   -10, - 5,  15,
   - 5, - 5,  15,
   -15, -15,  10,
   -10, -15,  10,
   - 5, -15,  10,
   -15, -10,  10,
   - 5, -10,  10,
   -15, - 5,  10,
   -10, - 5,  10,
   - 5, - 5,  10,
   -15, -15,   5,
   -10, -15,   5,
   - 5, -15,   5,
   -15, -10,   5,
   -10, -10,   5,
   - 5, -10,   5,
   -15, - 5,   5,
   -10, - 5,   5,
   - 5, - 5,   5,
} ;

// Helper function to "convert" above array of data points into a
// Boost.uBLAS matrix that can be used as the input data for k-means
// clustering.
matrix test_data()
{
   matrix data(D, N) ;
   const float* p = g_data_points ;
   for (int i = 0; i < N; ++i, p += D)
      std::copy(p, p + D, boost::numeric::ublas::column(data, i).begin()) ;
   return data ;
}

} // end of local anonymous namespace encapsulating above data and helper

//-------------------- CHECKING CLUSTERING RESULTS ----------------------

namespace {

// This function checks the cluster assignments made by k-means to ensure
// that each input data point is part of the expected 3D cluster.
bool check_cluster_assignments(const cluster_indices& clusters)
{
   // Per our centroids initialization policy and test data layout, the
   // first K points are the actual cluster centers. Thus, their cluster
   // indices must be the same as their positions within the input data
   // array.
   for (int i = 0; i < K; ++i)
      if (clusters[i] != i)
         return false ;

   // The remaining points in the input data array are arranged by
   // cluster. Since each cluster contains S points, the remaining N-K
   // data points will be arranged in clumps of S-1 points. The first
   // group of S-1 points must have cluster index 0; the next S-1 points
   // should have cluster index 1; and so on...
   int c = K ; // already examined first K points; so start at K+1-th point
   const int s = S - 1 ;
   for  (int i = 0; i < K; ++i)     // for each cluster
   for  (int j = 0; j < s; ++j, ++c)// for each S-1 remaining points in cluster
      if (clusters[c] != i) // assigned cluster index must equal actual cluster
         return false ;

   return true ; // great, cluster assignments are as expected
}

// This function confirms that the cluster centers computed by k-means
// are the same as the cube centers we provided as input.
bool check_cluster_centroids(const matrix& centroids)
{
   const float* p = g_data_points ;
   for (int i = 0; i < K; ++i) // for each cluster
   {
      vector v = boost::numeric::ublas::column(centroids, i) ;
      for (int j = 0; j < D; ++j, ++p) // for each data dimension
         if (fabsf(v[j] - *p) > 1e-4f) // computed centroid must be near
            return false ;             // actual centroid's coordinate
   }
   return true ; // great, cluster centroids are as expected
}

} // end of local anonymous namespace encaspulating above helpers

//----------------------- MISCELLANEOUS HELPERS -------------------------

namespace {

// Return the number of processors available on the system
int num_cpu()
{
#ifdef _SC_NPROCESSORS_ONLN
   long n = sysconf(_SC_NPROCESSORS_ONLN) ;
   return (n < 1) ? 1 : static_cast<int>(n) ;
#else // sysconf not setup to determine num CPU's
   return 1 ;
#endif
}

// Shortcut for instantiating libgist's k-means object
typedef gist::kmeans<float, init_centroids_first_k> kmeans ;

} // end of local anonymous namespace encapsulating above helpers

//---------------- SINGLE-THREADED K-MEANS TEST SUITE -------------------

BOOST_AUTO_TEST_CASE(single_threaded)
{
   // Perform single-threaded k-means using test data
   kmeans_output clusters = kmeans().cluster(K, test_data()) ;

   // Basic sanity check: the number of elements of the cluster indices
   // array returned by k-means must match the number of data points
   // supplied to it.
   const int n = clusters.first.size() ;
   BOOST_REQUIRE(n == N) ;

   // Basic sanity checks: the dimensions of the cluster centroids matrix
   // returned by k-means must match the dimensions of the input data
   // matrix.
   const int d = clusters.second.size1() ;
   const int k = clusters.second.size2() ;
   BOOST_REQUIRE(d == D && k == K) ;

   // If basic sanity checks are fine, then ensure that the cluster
   // assignments and centroids computed by k-means are as expected.
   BOOST_CHECK(check_cluster_assignments(clusters.first)) ;
   BOOST_CHECK(check_cluster_centroids(clusters.second))  ;
}

//----------------- MULTITHREADED K-MEANS TEST SUITE --------------------

BOOST_AUTO_TEST_CASE(multithreaded)
{
   // Perform multithreaded k-means using test data and as many threads
   // as there are CPU's available.
   kmeans_output clusters = kmeans(num_cpu()).cluster(K, test_data()) ;

   // Basic sanity check: the number of elements of the cluster indices
   // array returned by k-means must match the number of data points
   // supplied to it.
   const int n = clusters.first.size() ;
   BOOST_REQUIRE(n == N) ;

   // Basic sanity checks: the dimensions of the cluster centroids matrix
   // returned by k-means must match the dimensions of the input data
   // matrix.
   const int d = clusters.second.size1() ;
   const int k = clusters.second.size2() ;
   BOOST_REQUIRE(d == D && k == K) ;

   // If basic sanity checks are fine, then ensure that the cluster
   // assignments and centroids computed by k-means are as expected.
   BOOST_CHECK(check_cluster_assignments(clusters.first)) ;
   BOOST_CHECK(check_cluster_centroids(clusters.second))  ;
}

//-----------------------------------------------------------------------

/* So things look consistent in everyone's emacs... */
/* Local Variables: */
/* indent-tabs-mode: nil */
/* End: */
