#vector_data = to_vector_data(scan_data_binned_frequent)

#n_comp = 20
#n_comp = 3
#pca = PCA(n_components=n_comp)
#X_r = pca.fit(vector_data).transform(vector_data)
#
## Percentage of variance explained for each components
#print('explained variance ratio: \n%s'
#      % str(pca.explained_variance_ratio_))

#GMM
#from sklearn import mixture
#g = mixture.GMM(n_components=3)
#g_res = g.fit(X_r)
#Z = g.predict(X_r)

#x = [item[0] for item in X_r]
#y = [item[1] for item in X_r]
##
#kmeans = KMeans(init='k-means++', n_clusters=3, n_init=10)
#kmeans.fit(X_r)
##
#Z = kmeans.predict(X_r)
#
#plt.figure()
#
#colour_list = OrderedDict()
#for idx in xrange(0, len(Z)):
#    colour_list[scan_data_binned_frequent.keys()[idx]] = my_colors[Z[idx]]
#        
#plt.scatter(x, y, facecolor=colour_list.values(), s = 30)
#plt.show()
#plt.savefig("pca.pdf")
#plt.close
#
#plt.figure()
#plt.bar(xrange(0, len(pca.explained_variance_ratio_)), pca.explained_variance_ratio_)        
#plt.show()
#plt.close()
    
    
#for router in scan_data_binned_frequent:
#    colour_list[router] = my_colors[1]
#
#for router in scan_data_binned:
#    if not colour_list.has_key(router):
#        colour_list[router] = my_colors[0]
        
#plot_figure_1(scan_data_binned_frequent, min(dates), max(dates), "data_bin_" + str(delta) + "min_least_frequent" + datetime.datetime.strftime(start_date, "%Y-%m-%d") + "PCA.pdf", (8,6), True, {}, colour_list)
 

###############################################################################
#PCA STEP BY STEP     http://sebastianraschka.com/Articles/2014_pca_step_by_step.html 
#n_router = len(vector_data)
### vector_data - [date1, date2, date3 ...]
##
#vector_data = np.array(vector_data)
#mean_arr = []
#for col_idx in xrange(0, len(vector_data[0])):
#    mean_arr.append(np.mean(vector_data[:, col_idx]))
#mean_arr = np.array(mean_arr)
#
#scatter_matrix = np.zeros((n_router,n_router))
#for i in range(vector_data.shape[1]):
#    scatter_matrix += (vector_data[:,i].reshape(n_router,1)\
#        - mean_arr).dot((vector_data[:,i].reshape(n_router,1) - mean_arr).T)
#print('Scatter Matrix:\n', scatter_matrix)
#
#cov_mat = np.cov([vector_data[col_idx, :] for col_idx in xrange(0, len(vector_data))])
###cov_mat = np.cov([vector_data[0,:],vector_data[1,:],vector_data[2,:]])
#print('Covariance Matrix:\n', cov_mat)
#
##vector_data = np.array(vector_data)
##cov_mat = np.cov([vector_data[col_idx, :] for col_idx in xrange(0, len(vector_data))])
#
##plt.figure()
##mult_product = np.zeros((len(vector_data), len(vector_data)))
##
##for i in xrange(0, len(vector_data)):
##    for j in xrange(0, len(vector_data)):
###        print sum(vector_data[i] * vector_data[j])
##        mult_product[i,j] = sum(vector_data[i] * vector_data[j]) / float(sum(vector_data[i]))
#        
## eigenvectors and eigenvalues for the from the scatter matrix
#eig_val_sc, eig_vec_sc = np.linalg.eig(scatter_matrix)
#
## eigenvectors and eigenvalues for the from the covariance matrix
#eig_val_cov, eig_vec_cov = np.linalg.eig(cov_mat)
#
#for i in range(len(eig_vec_cov)):
#    eigvec_sc = eig_vec_sc[:,i].reshape(1,n_router).T
#    eigvec_cov = eig_vec_cov[:,i].reshape(1,n_router).T
##    assert eigvec_sc.all() == eigvec_cov.all(), 'Eigenvectors are not identical'
##
##    print('Eigenvector {}: \n{}'.format(i+1, eigvec_sc))
##    print('Eigenvalue {} from scatter matrix: {}'.format(i+1, eig_val_sc[i]))
##    print('Eigenvalue {} from covariance matrix: {}'.format(i+1, eig_val_cov[i]))
##    print('Scaling factor: ', eig_val_sc[i]/eig_val_cov[i])
##print(40 * '-')
#    
##for i in range(len(eig_val_sc)):
##    eigv = eig_vec_sc[:,i].reshape(1,n_router).T
##    np.testing.assert_array_almost_equal(scatter_matrix.dot(eigv),\
##            eig_val_sc[i] * eigv, decimal=6,\
##            err_msg='', verbose=True)
##
##for ev in eig_vec_sc:
##    np.testing.assert_array_almost_equal(1.0, np.linalg.norm(ev))
#    #instead of 'assert' because of rounding errors
###    
###    
#### Make a list of (eigenvalue, eigenvector) tuples
#eig_pairs = [(np.abs(eig_val_sc[i]), eig_vec_sc[:,i]) for i in range(len(eig_val_sc))]
###
#### Sort the (eigenvalue, eigenvector) tuples from high to low
#eig_pairs.sort()
#eig_pairs.reverse()
###
#### Visually confirm that the list is correctly sorted by decreasing eigenvalues
#for i in eig_pairs:
#    print(i[0])
###    
#matrix_w = np.hstack((eig_pairs[0][1].reshape(n_router,1), eig_pairs[1][1].reshape(n_router,1)))
#print('Matrix W:\n', matrix_w)
###
#transformed = matrix_w.T.dot(vector_data)
#
#from matplotlib import pyplot as plt
#from mpl_toolkits.mplot3d import Axes3D
#from mpl_toolkits.mplot3d import proj3d

#GMM
#from sklearn import mixture
#g = mixture.GMM(n_components=2)
#g_res = g.fit(transformed)
#Z = g.predict(transformed)

#x = [item[0] for item in transformed]
#y = [item[1] for item in transformed]
#
#kmeans = KMeans(init='k-means++', n_clusters=3, n_init=10)
#kmeans.fit(transformed)
#
#Z = kmeans.predict(transformed)
#
#plt.figure()
###
#colour_list = OrderedDict()
#for idx in xrange(0, len(Z)):
#    colour_list[scan_data_binned_frequent.keys()[idx]] = my_colors[Z[idx]]
#
#fig = plt.figure()
#ax = fig.add_subplot(111, projection='3d')
#ax.scatter(transformed[0, :], transformed[1, :],\
#    transformed[2, :], c=colour_list.values(), s=50)
#       
#     
#plt.scatter(x, y, facecolor=colour_list.values(), s = 30)
#plt.show()
#plt.savefig("pca_1.pdf")
#plt.close
#
#plt.figure()
#plt.bar(xrange(0, len(pca.explained_variance_ratio_)), pca.explained_variance_ratio_)        
#plt.show()
#plt.close()
    
#for router in scan_data_binned_frequent:
#    colour_list[router] = my_colors[1]
#
#for router in scan_data_binned:
#    if not colour_list.has_key(router):
#        colour_list[router] = my_colors[0]
        
#plot_figure_1(scan_data_binned_frequent, min(dates), max(dates), "data_bin_" + str(delta) + "min_least_frequent" + datetime.datetime.strftime(start_date, "%Y-%m-%d") + "PCA.pdf", (8,6), True, {}, colour_list)
 
 
 
#plt.plot(transformed[0,:], transformed[1,:],\
#     'o', markersize=7, color='blue', alpha=0.5)
#plt.plot(transformed[0,20:40], transformed[1,20:40],
#     '^', markersize=7, color='red', alpha=0.5, label='class2')
#plt.xlim([-4,4])
#plt.ylim([-4,4])
#plt.xlabel('x_values')
#plt.ylabel('y_values')
#plt.legend()
#plt.title('Transformed samples with class labels')
##
#plt.show()

###############################################################################
#
#from sklearn.decomposition import PCA as sklearnPCA
#
#sklearn_pca = sklearnPCA(n_components=3)
#sklearn_transf = sklearn_pca.fit_transform(vector_data)
#
#from matplotlib import pyplot as plt
#from mpl_toolkits.mplot3d import Axes3D
#from mpl_toolkits.mplot3d import proj3d
#
##kmeans = KMeans(init='k-means++', n_clusters=3, n_init=10)
##kmeans.fit(sklearn_transf)
##
##Z = kmeans.predict(sklearn_transf)
#
#colour_list = OrderedDict()
#for idx in xrange(0, len(Z)):
#    colour_list[scan_data_binned_frequent.keys()[idx]] = my_colors[Z[idx] * 2 + 1]
#
#fig = plt.figure(figsize=(8,8))
#ax = fig.add_subplot(111, projection='3d')
#ax.scatter(sklearn_transf[:, 0], sklearn_transf[:, 1],\
#    sklearn_transf[:, 2], c=colour_list.values(), s=50)
#
#plt.show()

#plot_figure_1(scan_data_binned_frequent, min(dates), max(dates), "data_bin_" + str(delta) + "min_GMM" + datetime.datetime.strftime(start_date, "%Y-%m-%d") + "PCA.pdf", (8,6), True, {}, colour_list)