
# coding: utf-8

# In[ ]:

"""This lab delves into exploratory analysis fo neuroscience data, specifically using principal component analysis(PCA) and feature-based aggregation"""

"""Part 1: Work through the steps of PCA on a sample dataset"""

"""(1a) Interpreting PCA"""
"""
   PCA can be interpreted as identfying the "directions" along which the data vary the most. In the first step of PCA, we must first center our data.
"""
"""(1a) Interpreting PCA"""
import numpy as np

def create2DGuassian(mn, sigma, cov, n):
    """Randomly sample points from a two-dimensional Gaussian distribution"""
    np.random.seed(142)
    return np.random.multivariate_normal(np.array([mn, mn]), np.array([[sigma, cov], [cov, sigma]]), n)

dataCorrelated = create2DGaussian(mu=50, sigma=1, cov=.9, n=100)

correlatedData = sc.parallelize(dataCorrelated)

meanCorrelated = correlatedData.sum()/correlatedData.count()

correlatedDataZeroMean = correlatedData.map(lambda lp: lp - meanCorrelated)

print meanCorrelated
print correlatedData.take(1)
print correlatedDataZeroMean.take(1)


# In[ ]:

"""(1b) Sample covariance matrix"""
# Compute the covariance matrix using outer products and correlatedDataZeroMean
correlatedCov = correlatedDataZeroMean.map(lambda x: np.outer(x, x)).sum()/correlatedDataZeroMean.count()
print correlatedCov


# In[ ]:

"""(1c) Covariance Function"""

def estimateCovariance(data):
    """Compute the covariance matrix for a given rdd
    
    Args:
        data (RDD of np.ndarray): An RDD consisting of NumPy arrays.
            
    Returns:
        np.ndarray: A multi-dimensional array where the number of rows and columns both equal the length of the arrays in the input RDD
    """
    meanData = data.sun()/data.count()
    dataZeroMean = data.map(lambda x: x - meanData)
    return dataZeroMean.map(lambda x: np.outer(x, x)).sum()/dataZeroMean.count()

correlatedCovAuto = estimateCovariance(correlatedData)
print correlatedCovAuto


# In[ ]:

"""(1d) Eigendecompostion"""
from numpy.linalg import eigh

# Calculate the eigenvalues and eigenvectors from correlatedCovAuto
eigVals, eigVecs = eigh(correlatedCovAuto)
print 'eigenvalues: {0}'.format(eigVals)
print '\neigenvectors\n{0}'.format(eigVecs)

#Use np.argsort to find the top eigenvector based on the largest eigenvalue
inds = np.argsort(eigVals)[::-1]
print inds
topcomponent = eigVecs[:, inds[0]]
print '\ntop principal component: {0}'.format(topComponent)


# In[ ]:

"""(1e) PCA scores"""
"""
   We just computed the top principal component for a 2-dimensional non-spherical dataset. Now let's use this principal component to derive a 
   one-dimensional representation for the original data. To compute these compact representations, which are sometimes called PCA "scores",
   calculate the dot product between each data point in the raw data and the top principal component.
"""
# USe the topComponent and the data from correlatedDataa to generate PCA scores
correlatedDataScores = correlatedData.map(lambda data: np.dot(topcomponent, data))
print 'one-dimensional data (first three):\n{0}'.format(np.asarray(correlatedDataScores.take(3)))


# In[ ]:

"""Part 2: Write a PCA function and evaluate PCA on sample datasets"""

"""(2a) PCA function"""

def pca(data, k=2):
    """Computes the top 'k' principal components, corresponding scores, and all eigenvalues.
    
    Note:
       All eigenvalues should be returned in sorted order (largest to smallest). eigh returns each eigenvectors as a column. This function
       should also return eigenvectors as columns.
    
    Args:
       data (RDD of np.ndarray): An RDD consisting of NumPy arrays.
       k (int): The number of principal components to return.
       
    Returns:
       tuple of (np.ndarray, RDD of np.ndarray, np.ndarray): A tuple of (eigenvectors, RDD of scores, eigenvalues). Eigenvectors is a multi-dimensional
       array where the number of rows equals the length of the arrays in the input RDD and the number of columns equals of length k. Eigenvalues is
       an array of length d (the number of features)
    """
    eigVals, eigVecs = eigh(estimateCovariance(data))
    inds = np.argsort(eigVals)[::-1]
    eigVal = eigVals[inds]
    topComponents = eigVecs[:,inds[:k]]
    scores = data.map(lambda x: np.dot(x, topComponents))
    return (topComponents, scores, eigVal)

topComponentsCorrelated, correlatedDataScoresAuto, eigenvaluesCorrelated = pca(correlatedData, k=2)

print 'topComponentsCorrelated: \n{0}'.format(topComponentsCorrelated)
print ('\ncorrelatedDataScoresAuto (first three): \n{0}'
       .format('\n'.join(map(str, correlatedDataScoresAuto.take(3)))))
print '\neigenvaluesCorrelated: \n{0}'.format(eigenvaluesCorrelated)

# Create a higher dimensional test set
pcaTestData = sc.parallelize([np.arange(x, x+4) for x in np.arange(0, 20, 4)])
componentsTest, testScores, eigenvaluesTest = pca(pcaTestData, 3)

print '\npcaTestData: \n{0}'.format(np.array(pcaTestData.collect()))
print '\ncomponentsTest: \n{0}'.format(componentsTest)
print ('\ntestScores (first three): \n{0}'
       .format('\n'.join(map(str, testScores.take(3)))))
print '\neigenvaluesTest: \n{0}'.format(eigenvaluesTest)


# In[ ]:

"""(2b) PCA on dataRandom"""

randomData = sc.parallelize(dataRandom)

topComponentsRandom, randomDataScoresAuto, eigenvaluesRandom = pca(randomData, k=2)

print 'topComponentsRandom: \n{0}'.format(topComponentsRandom)
print ('\nrandomDataScoresAuto (first three): \n{0}'
       .format('\n'.join(map(str, randomDataScoresAuto.take(3)))))
print '\neigenvaluesRandom: \n{0}'.format(eigenvaluesRandom)


# In[ ]:

"""Visualization : PCA projection"""
"""
   Plot the original data and the 1-dimensional reconstruction using the top principal component to see how the PCA solution looks. The original
   data is plotted as before; however, the 1-dimensional reconstruction (projection) is plotted in green on top of the original data and the 
   vectors (lines) representing the two principal components are shown as dotted lines.
"""
def projectPointsAndGetLines(data, components, xRange):
    """Project original data onto first component and get line details for top two components."""
    topComponent= components[:, 0]
    slope1, slope2 = components[1, :2] / components[0, :2]

    means = data.mean()[:2]
    demeaned = data.map(lambda v: v - means)
    projected = demeaned.map(lambda v: (v.dot(topComponent) /
                                        topComponent.dot(topComponent)) * topComponent)
    remeaned = projected.map(lambda v: v + means)
    x1,x2 = zip(*remeaned.collect())

    lineStartP1X1, lineStartP1X2 = means - np.asarray([xRange, xRange * slope1])
    lineEndP1X1, lineEndP1X2 = means + np.asarray([xRange, xRange * slope1])
    lineStartP2X1, lineStartP2X2 = means - np.asarray([xRange, xRange * slope2])
    lineEndP2X1, lineEndP2X2 = means + np.asarray([xRange, xRange * slope2])

    return ((x1, x2), ([lineStartP1X1, lineEndP1X1], [lineStartP1X2, lineEndP1X2]),
            ([lineStartP2X1, lineEndP2X1], [lineStartP2X2, lineEndP2X2]))

((x1, x2), (line1X1, line1X2), (line2X1, line2X2)) =     projectPointsAndGetLines(correlatedData, topComponentsCorrelated, 5)

# generate layout and plot data
fig, ax = preparePlot(np.arange(46, 55, 2), np.arange(46, 55, 2), figsize=(7, 7))
ax.set_xlabel(r'Simulated $x_1$ values'), ax.set_ylabel(r'Simulated $x_2$ values')
ax.set_xlim(45.5, 54.5), ax.set_ylim(45.5, 54.5)
plt.plot(line1X1, line1X2, linewidth=3.0, c='#8cbfd0', linestyle='--')
plt.plot(line2X1, line2X2, linewidth=3.0, c='#d6ebf2', linestyle='--')
plt.scatter(dataCorrelated[:,0], dataCorrelated[:,1], s=14**2, c='#d6ebf2',
            edgecolors='#8cbfd0', alpha=0.75)
plt.scatter(x1, x2, s=14**2, c='#62c162', alpha=.75)
pass


# In[ ]:

((x1, x2), (line1X1, line1X2), (line2X1, line2X2)) =     projectPointsAndGetLines(randomData, topComponentsRandom, 5)

# generate layout and plot data
fig, ax = preparePlot(np.arange(46, 55, 2), np.arange(46, 55, 2), figsize=(7, 7))
ax.set_xlabel(r'Simulated $x_1$ values'), ax.set_ylabel(r'Simulated $x_2$ values')
ax.set_xlim(45.5, 54.5), ax.set_ylim(45.5, 54.5)
plt.plot(line1X1, line1X2, linewidth=3.0, c='#8cbfd0', linestyle='--')
plt.plot(line2X1, line2X2, linewidth=3.0, c='#d6ebf2', linestyle='--')
plt.scatter(dataRandom[:,0], dataRandom[:,1], s=14**2, c='#d6ebf2',
            edgecolors='#8cbfd0', alpha=0.75)
plt.scatter(x1, x2, s=14**2, c='#62c162', alpha=.75)
pass


# In[ ]:

"""Visualization : Three-dimensional data"""

from mpl_toolkits.mplot3d import Axes3D

m = 100
mu = np.array([50, 50, 50])
r1_2 = 0.9
r1_3 = 0.7
r2_3 = 0.1
sigma1 = 5
sigma2 = 20
sigma3 = 20
c = np.array([[sigma1 ** 2, r1_2 * sigma1 * sigma2, r1_3 * sigma1 * sigma3],
             [r1_2 * sigma1 * sigma2, sigma2 ** 2, r2_3 * sigma2 * sigma3],
             [r1_3 * sigma1 * sigma3, r2_3 * sigma2 * sigma3, sigma3 ** 2]])
np.random.seed(142)
dataThreeD = np.random.multivariate_normal(mu, c, m)

from matplotlib.colors import ListedColormap, Normalize
from matplotlib.cm import get_cmap
norm = Normalize()
cmap = get_cmap("Blues")
clrs = cmap(np.array(norm(dataThreeD[:,2])))[:,0:3]

fig = plt.figure(figsize=(11, 6))
ax = fig.add_subplot(121, projection='3d')
ax.azim=-100
ax.scatter(dataThreeD[:,0], dataThreeD[:,1], dataThreeD[:,2], c=clrs, s=14**2)

xx, yy = np.meshgrid(np.arange(-15, 10, 1), np.arange(-50, 30, 1))
normal = np.array([0.96981815, -0.188338, -0.15485978])
z = (-normal[0] * xx - normal[1] * yy) * 1. / normal[2]
xx = xx + 50
yy = yy + 50
z = z + 50

ax.set_zlim((-20, 120)), ax.set_ylim((-20, 100)), ax.set_xlim((30, 75))
ax.plot_surface(xx, yy, z, alpha=.10)

ax = fig.add_subplot(122, projection='3d')
ax.azim=10
ax.elev=20
#ax.dist=8
ax.scatter(dataThreeD[:,0], dataThreeD[:,1], dataThreeD[:,2], c=clrs, s=14**2)

ax.set_zlim((-20, 120)), ax.set_ylim((-20, 100)), ax.set_xlim((30, 75))
ax.plot_surface(xx, yy, z, alpha=.1)
plt.tight_layout()
pass


# In[ ]:

"""(2c) 3D to 2D"""

threeDData = sc.parallelize(dataThreeD)
componentsThreeD, threeDScores, eigenvaluesThreeD = pca(threeDDate, k=2)

print 'componentsThreeD: \n{0}'.format(componentsThreeD)
print ('\nthreeDScores (first three): \n{0}'
       .format('\n'.join(map(str, threeDScores.take(3)))))
print '\neigenvaluesThreeD: \n{0}'.format(eigenvaluesThreeD)


# In[ ]:

"""Visualization: 2D representation of 3D data"""
scoresThreeD = np.asarray(threeDScores.collect())

# generate layout and plot data
fig, ax = preparePlot(np.arange(20, 150, 20), np.arange(-40, 110, 20))
ax.set_xlabel(r'New $x_1$ values'), ax.set_ylabel(r'New $x_2$ values')
ax.set_xlim(5, 150), ax.set_ylim(-45, 50)
plt.scatter(scoresThreeD[:,0], scoresThreeD[:,1], s=14**2, c=clrs, edgecolors='#8cbfd0', alpha=0.75)
pass


# In[ ]:

"""(2d) Varianve explained"""
def varianceExplained(data, k=1):
    """Calculate the fraction of variance explained by the top `k` eigenvectors.

    Args:
        data (RDD of np.ndarray): An RDD that contains NumPy arrays which store the
            features for an observation.
        k: The number of principal components to consider.

    Returns:
        float: A number between 0 and 1 representing the percentage of variance explained
            by the top `k` eigenvectors.
    """
    components, scores, eigenvalues = pca(data, k=1)
    return sum([eigenvalues[i] for i in range(0, k)])/sum(eigenvalues)

varianceRandom1 = varianceExplained(randomData, 1)
varianceCorrelated1 = varianceExplained(correlatedData, 1)
varianceRandom2 = varianceExplained(randomData, 2)
varianceCorrelated2 = varianceExplained(correlatedData, 2)
varianceThreeD2 = varianceExplained(threeDData, 2)
print ('Percentage of variance explained by the first component of randomData: {0:.1f}%'
       .format(varianceRandom1 * 100))
print ('Percentage of variance explained by both components of randomData: {0:.1f}%'
       .format(varianceRandom2 * 100))
print ('\nPercentage of variance explained by the first component of correlatedData: {0:.1f}%'.
       format(varianceCorrelated1 * 100))
print ('Percentage of variance explained by both components of correlatedData: {0:.1f}%'
       .format(varianceCorrelated2 * 100))
print ('\nPercentage of variance explained by the first two components of threeDData: {0:.1f}%'
       .format(varianceThreeD2 * 100))


# In[ ]:

"""Part 3: Parse, inspect, and preprocess neuroscience data then perform PCA"""

"""(3a) Load neuroscience data"""

import os
baseDir = os.path.join('data')
inputPath = os.path.join('#','#')

inputFile = os.path.join(baseDir, inputPath)

lines = sc.textFile(inputFile)
print lines.first()[0:100]


# In[ ]:

"""(3b) Parse the data"""
# each line of data like this:0 0 103 103.7 103.2 102.7 103.8 102.8 103 103.3 103.8 103.2 102.1 103.5 103.2 102.7 103.1 102.2 102.
def parse(line):
    """Parse the raw data into a (tuple, np.ndarray) pair.
    
    Note:
        You should store the pixel coordinates as a tuple of two ints and the elements of the pixel intensity
        time series as an np.ndarray of floats.

    Args:
        line (str): A string representing an observation.  Elements are separated by spaces.  The
            first two elements represent the coordinates of the pixel, and the rest of the elements
            represent the pixel intensity over time.

    Returns:
        tuple of tuple, np.ndarray: A (coordinate, pixel intensity array) `tuple` where coordinate is
            a `tuple` containing two values and the pixel intensity is stored in an NumPy array
            which contains 240 values.
    
    """
    array = lines.map(parse)
    ndarray = np.array([float(array[i]) for i in range(2, len(array))])
    return ((int(array[0]), int(array[1])), ndarray)

rawData = lines.map(parse)
rawData.cache()
entry = rawData.first()
print 'Length of movie is {0} seconds'.format(len(entry[1]))
print 'Number of pixels in movie is {0:,}'.format(rawData.count())
print ('\nFirst entry of rawData (with only the first five values of the NumPy array):\n({0}, {1})'
       .format(entry[0], entry[1][:5]))


# In[ ]:

"""(3c) Min and Max flouresence"""

mn = rawData.map(lambda (x, y): y).map(lambda y: min(y)).min()
mx = rawData.map(lambda (x, y): y).map(lambda y: max(y)).max()

print mn, mx


# In[ ]:

"""Visualization: Pixel intensity"""

example = rawData.filter(lambda (k, v): np.std(v) > 100).values().first()

# generate layout and plot data
fig, ax = preparePlot(np.arange(0, 300, 50), np.arange(300, 800, 100))
ax.set_xlabel(r'time'), ax.set_ylabel(r'flouresence')
ax.set_xlim(-20, 270), ax.set_ylim(270, 730)
plt.plot(range(len(example)), example, c='#8cbfd0', linewidth='3.0')
pass


# In[ ]:

"""(3d) Fractional signal change"""
def rescale(ts):
    """Take a np.ndarray and return the standardized array by subtracting and dividing by the mean.

    Note:
        You should first subtract the mean and then divide by the mean.

    Args:
        ts (np.ndarray): Time series data (`np.float`) representing pixel intensity.

    Returns:
        np.ndarray: The times series adjusted by subtracting the mean and dividing by the mean.
    """
    mean = sum(ts)/len(ts)
    return np.array([(ts[i]-mean)/mean for i in range(0, len(ts))])

scaledData = rawData.mapValues(lambda v: rescale(v))
mnScaled = scaledData.map(lambda (k, v): v).map(lambda v: min(v)).min()
mxScaled = scaledData.map(lambda (k, v): v).map(lambda v: max(x)).max()
print mnScaled, mxScaled


# In[ ]:

"""Visualization: Normalized data"""

example = scaledData.filter(lambda (k, v): np.std(v) > 0.1).values().first()

# generate layout and plot data
fig, ax = preparePlot(np.arange(0, 300, 50), np.arange(-.1, .6, .1))
ax.set_xlabel(r'time'), ax.set_ylabel(r'flouresence')
ax.set_xlim(-20, 260), ax.set_ylim(-.12, .52)
plt.plot(range(len(example)), example, c='#8cbfd0', linewidth='3.0')
pass


# In[ ]:

"""(3e) PCA on the scaled data"""
#Run pca using scaledData
componentsScaled, scaledScores, eigenvaluesScaled = pca(scaledData.map(lambda (x, y): y), k=3)


# In[ ]:

"""Visualization: Top two components as images"""
import matplotlib.cm as cm

scoresScaled = np.vstack(scaledScores.collect())
imageOneScaled = scoresScaled[:,0].reshape(230, 202).T

# generate layout and plot data
fig, ax = preparePlot(np.arange(0, 10, 1), np.arange(0, 10, 1), figsize=(9.0, 7.2), hideLabels=True)
ax.grid(False)
ax.set_title('Top Principal Component', color='#888888')
image = plt.imshow(imageOneScaled,interpolation='nearest', aspect='auto', cmap=cm.gray)
pass


# In[ ]:

imageTwoScaled = scoresScaled[:,1].reshape(230, 202).T

# generate layout and plot data
fig, ax = preparePlot(np.arange(0, 10, 1), np.arange(0, 10, 1), figsize=(9.0, 7.2), hideLabels=True)
ax.grid(False)
ax.set_title('Second Principal Component', color='#888888')
image = plt.imshow(imageTwoScaled,interpolation='nearest', aspect='auto', cmap=cm.gray)
pass


# In[ ]:

"""Visualization: Top two components as one image"""

def polarTransform(scale, img):
    """Convert points from cartesian to polar coordinates and map to colors."""
    from matplotlib.colors import hsv_to_rgb

    img = np.asarray(img)
    dims = img.shape

    phi = ((np.arctan2(-img[0], -img[1]) + np.pi/2) % (np.pi*2)) / (2 * np.pi)
    rho = np.sqrt(img[0]**2 + img[1]**2)
    saturation = np.ones((dims[1], dims[2]))

    out = hsv_to_rgb(np.dstack((phi, saturation, scale * rho)))

    return np.clip(out * scale, 0, 1)


# In[ ]:

x1AbsMax = np.max(np.abs(imageOneScaled))
x2AbsMax = np.max(np.abs(imageTwoScaled))

numOfPixels = 300
x1Vals = np.arange(-x1AbsMax, x1AbsMax, (2 * x1AbsMax) / numOfPixels)
x2Vals = np.arange(x2AbsMax, -x2AbsMax, -(2 * x2AbsMax) / numOfPixels)
x2Vals.shape = (numOfPixels, 1)

x1Data = np.tile(x1Vals, (numOfPixels, 1))
x2Data = np.tile(x2Vals, (1, numOfPixels))

# Try changing the first parameter to lower values
polarMap = polarTransform(2.0, [x1Data, x2Data])

gridRange = np.arange(0, numOfPixels + 25, 25)
fig, ax = preparePlot(gridRange, gridRange, figsize=(9.0, 7.2), hideLabels=True)
image = plt.imshow(polarMap, interpolation='nearest', aspect='auto')
ax.set_xlabel('Principal component one'), ax.set_ylabel('Principal component two')
gridMarks = (2 * gridRange / float(numOfPixels) - 1.0)
x1Marks = x1AbsMax * gridMarks
x2Marks = -x2AbsMax * gridMarks
ax.get_xaxis().set_ticklabels(map(lambda x: '{0:.1f}'.format(x), x1Marks))
ax.get_yaxis().set_ticklabels(map(lambda x: '{0:.1f}'.format(x), x2Marks))
pass


# In[ ]:

# Use the same transformation on the image data
# Try changing the first parameter to lower values
brainmap = polarTransform(2.0, [imageOneScaled, imageTwoScaled])

# generate layout and plot data
fig, ax = preparePlot(np.arange(0, 10, 1), np.arange(0, 10, 1), figsize=(9.0, 7.2), hideLabels=True)
ax.grid(False)
image = plt.imshow(brainmap,interpolation='nearest', aspect='auto')
pass


# In[ ]:

"""Part 4: Feature-based aggregation and PCA"""
"""(4a) Aggregate by time"""
"""
   we would like to incorporate knowledge of our experimental setup into our analysis. To do this, we'll first study the temporal aspects of 
   neural response, by aggregating our features by time. In other words, we want to see how different pixels (and the underlying neurons captured
   in these pixels) react in each of the 20 seconds after a new visual pattern is displayed, regardless of what the pattern is. Hence, instead 
   of working with the 240 features individually, we'll aggregate the original features into 20 new features, where the first new feature captures
   the pixel response one second after a visual pattern appears, the second new feature is the response after two seconds, and so on.
"""
# Create a multi-dimensional array to perform the aggregation
T = np.tile(np.eye(20), 12)
timeData = scaledData.map(lambda (x, y): (x, T.dot(y)))

timeData.cache()
print timeData.count()
print timeData.first()


# In[ ]:

"""(4b) Obtain a compact representation"""

componentsTime, timeScores, eigenvaluesTime = pca(timeData.map(lambda (k, v): v), k=3)

print 'componentsTime: (first five) \n{0}'.format(componentsTime[:5,:])
print ('\ntimeScores (first three): \n{0}'
       .format('\n'.join(map(str, timeScores.take(3)))))
print '\neigenvaluesTime: (first five) \n{0}'.format(eigenvaluesTime[:5])


# In[ ]:

"""Visualization: Top two components by time"""
"""
   Let's view the scores from the first two PCs as a composite image. When we preprocess by aggregating by time and then perform PCA, we are 
   only looking at variability related to temporal dynamics. As a result, if neurons appear similar -- have similar colors -- in the resulting
   image, it means that their responses vary similarly over time, regardless of how they might be encoding direction. In the image below, we 
   can define the midline as the horizontal line across the middle of the brain. We see clear patterns of neural activity in different parts of
   the brain, and crucially note that the regions on either side of the midline are similar, which suggests that temporal dynamics do not 
   differ across the two sides of the brain.
"""
scoresTime = np.vstack(timeScores.collect())
imageOneTime = scoresTime[:,0].reshape(230, 202).T
imageTwoTime = scoresTime[:,1].reshape(230, 202).T
brainmap = polarTransform(3, [imageOneTime, imageTwoTime])

# generate layout and plot data
fig, ax = preparePlot(np.arange(0, 10, 1), np.arange(0, 10, 1), figsize=(9.0, 7.2), hideLabels=True)
ax.grid(False)
image = plt.imshow(brainmap,interpolation='nearest', aspect='auto')
pass


# In[ ]:

"""(4c) Aggregate by direction"""
"""
   Let's perform a sceond type of feature aggregation so that we can study the direction-specific aspects of neural response, by aggregating our
   features by direction
"""
# Create a multi-dimensional array to perform the aggregation
D = np.kron(np.eye(12), np.ones(20))

# Transform scaledData using D
directionData = scaledData.map(lambda (x, y): (x, D.dot(y)))

directionData.cache()
print directionData.count()
print directionData.first()


# In[ ]:

"""(4d) Compact representation of direction data"""

componentsDirection, directionScores, eigenvaluesDirection = pca(directionData.map(lambda (k, v): v), k=3)

print 'componentsDirection: (first five) \n{0}'.format(componentsDirection[:5,:])
print ('\ndirectionScores (first three): \n{0}'
       .format('\n'.join(map(str, directionScores.take(3)))))
print '\neigenvaluesDirection: (first five) \n{0}'.format(eigenvaluesDirection[:5])


# In[ ]:

"""Visualization: Top two components by direction"""
"""
   let's view the scores from the first two PCs as a composite image. When we preprocess by averaging across time (group by direction), and 
   then perform PCA, we are only looking at variability related to stimulus direction. As a result, if neurons appear similar -- have similar
   colors -- in the image, it means that their responses vary similarly across directions, regardless of how they evolve over time. In the 
   image below, we see a different pattern of similarity across regions of the brain. Moreover, regions on either side of the midline are 
   colored differently, which suggests that we are looking at a property, direction selectivity, that has a different representation across 
   the two sides of the brain.
"""
scoresDirection = np.vstack(directionScores.collect())
imageOneDirection = scoresDirection[:,0].reshape(230, 202).T
imageTwoDirection = scoresDirection[:,1].reshape(230, 202).T
brainmap = polarTransform(2, [imageOneDirection, imageTwoDirection])
# with thunder: Colorize(cmap='polar', scale=2).transform([imageOneDirection, imageTwoDirection])

# generate layout and plot data
fig, ax = preparePlot(np.arange(0, 10, 1), np.arange(0, 10, 1), figsize=(9.0, 7.2), hideLabels=True)
ax.grid(False)
image = plt.imshow(brainmap, interpolation='nearest', aspect='auto')
pass

