###082313 - merged everythoing in one script. only works for now
###to analyze globaly constraining the total distance to the sum of the other two and floating the sigma values.
### added vectors


from golgy_functions import open_tif_dialog, gaussfit
import numpy, pylab, math, sys
import scipy.special as special
from scipy.ndimage import gaussian_filter
from scipy.signal import resample, fftconvolve
import scipy.optimize as optimize
import scipy.integrate as integrate
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1.axes_rgb import make_rgb_axes, RGBAxes
from decon import richardson_lucy_deconvolution
import matplotlib.mlab as mlab


#input to this program for now is a hyperstack

######params##########
decon_on = False
display_intermediate = False
manual_selection = False


small_gauss_factor = 1.0 #1
large_gauss_factor = 10.0 #10
std_dev = 6.0 #6
tolerance = 8.8 #8
upsample_factor = (15.0,5.0,5.0) #(15.0,5.0,5.0)
diameter = 3.0 #3
distance_cutoff = 800.0 #800, in nm

n = 10000                      #number of iterations
tolerance_MAX = 0.001
numbins = 18
mu_a = 230.0
sigma_a = 100.0
mu_b = 130.0
sigma_b =100.0
sigma_c = 135.0                #sigma for total

##open data
if decon_on == True:
    full_estimate, history = richardson_lucy_deconvolution()
    z_slice = full_estimate.shape[0] / 3
    data_blue = full_estimate[0::3, :, :]
    data_red = full_estimate[1::3, :, :]
    data_green = full_estimate[2::3, :, :]
else:
    data = open_tif_dialog()
    z_slice = data.shape[0] / 3
    data_blue = data[0::3, :, :]
    data_red = data[1::3, :, :]
    data_green = data[2::3, :, :]



##open reference

if z_slice > 3:                                  #for 2D implementation, where there is no fiducial
    if decon_on == True:
        reference, history = richardson_lucy_deconvolution()
        fid_shape = reference.shape[0]/3
        reference_blue = reference[0::3, :, :]
        reference_red = reference[1::3, :, :]
        reference_green = reference[2::3, :, :]
        new_shape = (data_green.shape[0], reference_green.shape[1], reference_green.shape[2])
    else:            
        reference = open_tif_dialog()
        fid_shape = reference.shape[0]/3
        reference_blue = reference[0::3, :, :]
        reference_red = reference[1::3, :, :]
        reference_green = reference[2::3, :, :]
        new_shape = (data_green.shape[0], reference_green.shape[1], reference_green.shape[2])


#condition reference
def condition_reference(reference_data):
    if new_shape[0] < reference_data.shape[0]:         #need to cut
        while new_shape[0] < reference_data.shape[0]:
            if numpy.amax(reference_data[0,:,:]) > numpy.amax(reference_data[-1,:,:]):
                reference_data = numpy.delete(reference_data, -1, axis=0)
                print 'conditioned reference', reference_data.shape
            if numpy.amax(reference_data[0,:,:]) < numpy.amax(reference_data[-1,:,:]):
                reference_data = numpy.delete(reference_data, 0, axis=0)
                print 'conditioned reference', reference_data.shape

    if new_shape[0] > reference_data.shape[0]:          #need to pad
        padding_slice = numpy.zeros((1, new_shape[1], new_shape[2]), dtype=numpy.float64)
        padding_slice = padding_slice+reference_data.min()
        while new_shape[0] > reference_data.shape[0]:
            reference_data = numpy.concatenate((reference_data, padding_slice), axis=0)
            print 'conditioned reference', reference_data.shape
    return reference_data

reference_green = condition_reference(reference_green)
reference_red = condition_reference(reference_red)
reference_blue = condition_reference(reference_blue)



################FFT references##############
def gaussian_filter_fft(data_name, small_gauss_factor, large_gauss_factor):
    filtered_data = gaussian_filter(data_name, small_gauss_factor) - gaussian_filter(data_name, large_gauss_factor)
    return numpy.fft.fftn(filtered_data)

green_reference_fft = gaussian_filter_fft(reference_green, small_gauss_factor, large_gauss_factor)
red_reference_fft = gaussian_filter_fft(reference_red, small_gauss_factor, large_gauss_factor)
blue_reference_fft = gaussian_filter_fft(reference_blue, small_gauss_factor, large_gauss_factor)
            
##############find peaks################
psf_radius = max(reference_green.shape[1:])//2

def find_peaks(data_name, small_gauss_factor, large_gauss_factor, std_dev):
    data_summed = data_name.sum(axis=0)
    data_filtered = gaussian_filter(data_summed, small_gauss_factor) - gaussian_filter(data_summed, large_gauss_factor)
    data_peaks = data_filtered*(data_filtered > (std_dev*data_filtered.std())+data_filtered.mean())
    data_peaks_copy = data_peaks.copy()
    data_peak_locations = []
    while data_peaks_copy.max() > 0:
        x, y = numpy.unravel_index(numpy.argmax(data_peaks_copy), data_peaks_copy.shape)
        data_peak_locations.append((x, y))
        data_peaks_copy[max(x-psf_radius, 0):x+psf_radius,
                        max(y-psf_radius, 0):y+psf_radius] = 0
    return data_peak_locations


blue_peak_locations = find_peaks(data_blue, small_gauss_factor, large_gauss_factor, std_dev)
green_peak_locations = find_peaks(data_green, small_gauss_factor, large_gauss_factor, std_dev)
red_peak_locations = find_peaks(data_red, small_gauss_factor, large_gauss_factor, std_dev)

###########label sorting##############

def peak_label_sorting(tolerance, data1_peak_locations, data2_peak_locations):
    peaks_in_data1_data2 = []
    for x, y in data1_peak_locations:
        for x_b, y_b in data2_peak_locations:
            #Look for a b-match
            if (abs(x - x_b) <= tolerance and
                abs(y - y_b) <= tolerance):
                #We found a b-match
                peaks_in_data1_data2.append((x, y))
                break #Stop searching for a b-match
    return peaks_in_data1_data2


peaks_in_blue_red = peak_label_sorting(tolerance, red_peak_locations, blue_peak_locations)
peaks_in_rgb = peak_label_sorting(tolerance, red_peak_locations, peaks_in_blue_red)
print 'peaks in red', len(red_peak_locations)
print 'peaks in green', len(green_peak_locations)
print 'peaks in blue', len(blue_peak_locations)
print 'peaks in RGB', len(peaks_in_rgb)

def get_rgb(red, green, blue, L):
    extent = (-L,L,-L,L) # half of picture size
    red[red<0] = 0.
    red = red/red.max()
    green[green<0] = 0.
    green = green/green.max()
    blue[blue<0] = 0.
    blue = blue/blue.max()
    R = red
    G = green
    B = blue
    return R, G, B
def make_cube(r, g, b):
    ny, nx = r.shape
    R = numpy.zeros([ny, nx, 3], dtype="d")
    R[:,:,0] = r
    G = numpy.zeros_like(R)
    G[:,:,1] = g
    B = numpy.zeros_like(R)
    B[:,:,2] = b
    RGB = R + G + B
    return R, G, B, RGB
def demo_rgb_scatter(data_red, data_green, data_blue, coord_red, coord_green, coord_blue):
    data_red = data_red.max(axis=0)
    data_green = data_green.max(axis=0)
    data_blue = data_blue.max(axis=0)
    x_bl = []
    y_bl = []
    x_r = []
    y_r = []
    x_g =  []
    y_g = []
    for i in range(len(coord_red)):
        y, x = coord_red[i][0], coord_red[i][1]
        x_r.append(x)
        y_r.append(y)
    for i in range(len(coord_green)):
        y, x = coord_green[i][0], coord_green[i][1]
        x_g.append(x)
        y_g.append(y)
    for i in range(len(coord_blue)):
        y, x = coord_blue[i][0], coord_blue[i][1]
        x_bl.append(x)
        y_bl.append(y)
    fig = plt.figure(1)
    fig.clf()
    ax = fig.add_subplot(111)
    pylab.plot(x_r, y_r,'o', markersize = 2.0, color = 'r')
    pylab.plot(x_g, y_g,'o', markersize = 2.0, color = 'g')
    pylab.plot(x_bl, y_bl,'o', markersize = 2.0, color = 'b')
    ax_r, ax_g, ax_b = make_rgb_axes(ax, pad=0.02)
    #fig.add_axes(ax_r)
    #fig.add_axes(ax_g)
    #fig.add_axes(ax_b)
    L = data_red.shape[1]
    r, g, b = get_rgb(data_red, data_green, data_blue, L)
    im_r, im_g, im_b, im_rgb = make_cube(r, g, b)
    kwargs = dict(origin="lower", interpolation="nearest")
    ax_r.imshow(im_r, **kwargs)
    ax_r.plot(x_r, y_r,'o', markersize = 2.0, color = 'y')
    ax_g.imshow(im_g, **kwargs)
    ax_g.plot(x_g, y_g,'o', markersize = 2.0, color = 'y')
    ax_b.imshow(im_b, **kwargs)
    ax_b.plot(x_bl, y_bl,'o', markersize = 2.0, color = 'y')
    ax.imshow(im_rgb, **kwargs)
    pylab.ion()
def demo_rgb(data_red, data_green, data_blue, x, y):
    L = data_red.shape[1]
    fig = plt.figure(1)
    fig.clf()
    ax = fig.add_subplot(111)
    pylab.plot(x, y,'o', markersize = 2.0, color = 'y')    
    ax_r, ax_g, ax_b = make_rgb_axes(ax, pad=0.02)
    #fig.add_axes(ax_r)
    #fig.add_axes(ax_g)
    #fig.add_axes(ax_b)
    r, g, b = get_rgb(data_red, data_green, data_blue, L)
    im_r, im_g, im_b, im_rgb = make_cube(r, g, b)
    kwargs = dict(origin="lower", interpolation="nearest")
    ax_r.imshow(im_r, **kwargs)
    ax_g.imshow(im_g, **kwargs)
    ax_b.imshow(im_b, **kwargs)
    ax.imshow(im_rgb, **kwargs)
 
    
if display_intermediate == True:
    print 'displaying peaks in RGB'
    demo_rgb_scatter(data_red, data_green, data_blue, peaks_in_rgb, peaks_in_rgb, peaks_in_rgb)
    raw_input()

#######upsampling and fft##############################

def resample_3(array, n, window='hamming'):
    s = array.shape
    array = array.copy()
    for i in range(3):
        array = resample(array, n[i]*s[i], t=None, axis=i, window=window)
    return array


green_reference_upsampled = resample_3(reference_green, upsample_factor, window='hamming')
#print green_reference_upsampled
#print "shape" , green_reference.shape
green_reference_upsampled_fft = numpy.fft.fftn(green_reference_upsampled)

red_reference_upsampled = resample_3(reference_red, upsample_factor, window='hamming')
red_reference_upsampled_fft = numpy.fft.fftn(red_reference_upsampled)

blue_reference_upsampled = resample_3(reference_blue, upsample_factor, window='hamming')
blue_reference_upsampled_fft = numpy.fft.fftn(blue_reference_upsampled)

############upsample_and crosscorrelate_data_peaks###############


def up_and_cross_data(data_name, data_reference, data_reference_upsampled_fft, peak_coordinates, color):
    data_wrapped_shift_pixels = []
    d = data_reference.shape
    print color , "shape", d
    for p in peak_coordinates:
        data_subimage = data_name[:,
                        p[0]-(d[1]//2):p[0]-(d[1]//2)+d[1],
                        p[1]-(d[2]//2):p[1]-(d[2]//2)+d[2]]     
        if data_subimage.shape != d:
            print "Skipping"
            continue
         
        data_subimage_upsampled = resample_3(data_subimage, upsample_factor, window='hamming')

        print "Cross-correlating..."
        data_cross_correlation = numpy.fft.ifftn(
            data_reference_upsampled_fft *
            numpy.conjugate(numpy.fft.fftn(data_subimage_upsampled)))
        print "Finding max..."
        data_shift = numpy.unravel_index(data_cross_correlation.argmax(), data_cross_correlation.shape)
        data_wrapped_shift = list(data_shift)
        pixel_size = 350.0 #z
        data_wrapped_shift_pixels_zxy = []
        for i in range(3):
            if data_shift[i] > data_cross_correlation.shape[i]//2:
                data_wrapped_shift[i] -= data_cross_correlation.shape[i]
            data_wrapped_shift_pixels_zxy.append(data_wrapped_shift[i]/float(upsample_factor[i])*pixel_size)
            pixel_size = 70.0 #xy
        data_wrapped_shift_pixels.append(data_wrapped_shift_pixels_zxy)
        print color, "wrapped shift:", data_wrapped_shift

    return data_wrapped_shift_pixels

############radious_selection###################
peaks_in_rgb_selection = []
d = reference_green.shape
for a in peaks_in_rgb:
    subimage_green = data_green[:,
                    a[0]-(d[1]//2):a[0]-(d[1]//2)+d[1],
                    a[1]-(d[2]//2):a[1]-(d[2]//2)+d[2]]   
    subimage_red = data_red[:,
                    a[0]-(d[1]//2):a[0]-(d[1]//2)+d[1],
                    a[1]-(d[2]//2):a[1]-(d[2]//2)+d[2]]   
    subimage_blue = data_blue[:,
                    a[0]-(d[1]//2):a[0]-(d[1]//2)+d[1],
                    a[1]-(d[2]//2):a[1]-(d[2]//2)+d[2]]
    try:
        height_g, amplitude_g, y_g, x_g, width_y_g, width_x_g, rota_g = gaussfit(subimage_green.max(axis=0),err=None,params=(),autoderiv=True,return_all=False,circle=False,
        fixed=numpy.repeat(False,7),limitedmin=[False,False,False,False,True,True,True],
        limitedmax=[False,False,False,False,False,False,True],
        usemoment=numpy.array([],dtype='bool'),
        minpars=numpy.repeat(0,7),maxpars=[0,0,0,0,0,0,360],
        rotate=1,vheight=1,quiet=True,returnmp=False,
        returnfitimage=False)

        height_b, amplitude_b, y_b, x_b, width_y_b, width_x_b, rota_b = gaussfit(subimage_blue.max(axis=0),err=None,params=(),autoderiv=True,return_all=False,circle=False,
        fixed=numpy.repeat(False,7),limitedmin=[False,False,False,False,True,True,True],
        limitedmax=[False,False,False,False,False,False,True],
        usemoment=numpy.array([],dtype='bool'),
        minpars=numpy.repeat(0,7),maxpars=[0,0,0,0,0,0,360],
        rotate=1,vheight=1,quiet=True,returnmp=False,
        returnfitimage=False)

        height_r, amplitude_r, y_r, x_r, width_y_r, width_x_r, rota_r = gaussfit(subimage_red.max(axis=0),err=None,params=(),autoderiv=True,return_all=False,circle=False,
        fixed=numpy.repeat(False,7),limitedmin=[False,False,False,False,True,True,True],
        limitedmax=[False,False,False,False,False,False,True],
        usemoment=numpy.array([],dtype='bool'),
        minpars=numpy.repeat(0,7),maxpars=[0,0,0,0,0,0,360],
        rotate=1,vheight=1,quiet=True,returnmp=False,
        returnfitimage=False)
    except Exception:
        sys.exc_clear()

    if (subimage_green.shape != d or width_x_r > diameter or width_y_r > diameter or
        width_x_g > diameter or width_x_g > diameter or width_x_b > diameter or width_x_b > diameter):        
        print "Skipping"
        continue
    if manual_selection:
        pylab.close('all')
        pylab.ion()
        demo_rgb(subimage_red.max(axis=0), subimage_green.max(axis=0), subimage_blue.max(axis=0), a[0], a[1])
        pylab.show()
        raw_input()
        pylab.close('all')
        if (raw_input("keep?")=="n"):
            print 'discarding'
            continue
        print 'kept'
        peaks_in_rgb_selection.append((a))
    else:
        peaks_in_rgb_selection.append((a))

print 'peaks in rgb', len(peaks_in_rgb)
print 'selection of rgb', len(peaks_in_rgb_selection)
peaks_in_rgb = peaks_in_rgb_selection

green_wrapped_shift_pixels = up_and_cross_data(data_green, reference_green, green_reference_upsampled_fft, peaks_in_rgb, 'green')
red_wrapped_shift_pixels = up_and_cross_data(data_red, reference_red, red_reference_upsampled_fft, peaks_in_rgb, 'red')
blue_wrapped_shift_pixels = up_and_cross_data(data_blue, reference_blue, blue_reference_upsampled_fft, peaks_in_rgb, 'blue')



#############calculate_distances#############################
def distance3D_calc(xa, xb, ya, yb, za, zb):
    return math.sqrt((xa - xb)**2 + (ya - yb)**2 + (za - zb)**2)

def distance2D_calc(xa, xb, ya, yb):
    return math.sqrt((xa - xb)**2 + (ya - yb)**2)
results = []

RtGlist = []
GtBlist = []
RtBlist = []

for i in range(len(red_wrapped_shift_pixels)):
    x1, y1, z1 = red_wrapped_shift_pixels[i][1], red_wrapped_shift_pixels[i][2], red_wrapped_shift_pixels[i][0]
    x2, y2, z2 = green_wrapped_shift_pixels[i][1], green_wrapped_shift_pixels[i][2], green_wrapped_shift_pixels[i][0] 
    x3, y3, z3 = blue_wrapped_shift_pixels[i][1], blue_wrapped_shift_pixels[i][2], blue_wrapped_shift_pixels[i][0]
    RtG = distance3D_calc(x1, x2, y1, y2, z1, z2)
    GtB = distance3D_calc(x2, x3, y2, y3, z2, z3)
    RtB = distance3D_calc(x1, x3, y1, y3, z1, z3)
    RtG2 = distance2D_calc(x1, x2, y1, y2)
    GtB2 = distance2D_calc(x2, x3, y2, y3)
    RtB2 = distance2D_calc(x1, x3, y1, y3)
    if RtG < distance_cutoff and GtB < distance_cutoff and RtB < distance_cutoff:
        results.append((x1, y1, z1, x2, y2, z2, x3, y3, z3, RtG2, GtB2, RtB2,
                    RtG, GtB, RtB) )
        if RtG == 0:
            RtGlist = numpy.append(RtGlist, 0.005)
        else:
            RtGlist = numpy.append(RtGlist, RtG)
        if GtB == 0:
            GtBlist = numpy.append(GtBlist, 0.005)
        else:
            GtBlist = numpy.append(GtBlist, GtB)
        if RtB == 0:
            RtBlist = numpy.append(RtBlist, 0.005)
        else:
            RtBlist = numpy.append(RtBlist, RtB)


print 'correlated peaks', len(red_wrapped_shift_pixels)
print 'peaks after cut off', len(GtBlist)

file_out = open('AllDistances.txt', 'w')

file_out.write("x_red\ty_red\tz_red\tx_green\ty_green\tz_green\tx_blue\ty_blue\tz_blue\t2DredToGreen\t2DgreenToblue\t2DredToblue\tredTOgreen3D\tgreenTOblue3D\tredTOblue3D\n")
for item in results:
    for r in item:
        file_out.write("%0.2f\t" % r)
    file_out.write("\n")
file_out.close()



######################MAX_Likelyhood####################

###############fit paramsf by through likelihood function

ToCis = RtGlist
Total = GtBlist
ToTrans = RtBlist

first_two= numpy.vstack((ToCis, ToTrans))
data = numpy.vstack((first_two, Total))
print 'data', data
data = data.flatten()

print 'data_flatten', data


#input data is one column txt file with distances, the last batch being the largest distances that corresponds to the sum of the other two
def getData(fileName):
    data = []
    dataFile = open(fileName, 'r')
    for line in dataFile:
        data.append(float(line))
    return data



def P3D(params, data):
    #output is a list of probabilities (y). for each measurements (x)
    output = []
    mu_a, sigma_a, mu_b, sigma_b, sigma_c = params[0], params[1], params[2], params[3], params[4]
    mu_c = mu_a+mu_b
    for x in range(len(data)):
        if data[x] == 0:
            data[x] = 0.005
        if x<(len(data))/3:
            part1 = math.exp(-(data[x]**2+mu_a**2)/(2*sigma_a**2))*data[x]*math.sinh(mu_a*data[x]/sigma_a**2)*(math.sqrt(2/math.pi))/(sigma_a*mu_a)
            output.append(part1)
        if x>(len(data))/3 and x<(len(data))*2/3: 
            part1 = math.exp(-(data[x]**2+mu_b**2)/(2*sigma_b**2))*data[x]*math.sinh(mu_b*data[x]/sigma_b**2)*(math.sqrt(2/math.pi))/(sigma_b*mu_b)
            output.append(part1)
        if x>(len(data))*2/3:
            part1 = math.exp(-(data[x]**2+mu_c**2)/(2*sigma_c**2))*data[x]*math.sinh(mu_c*data[x]/sigma_c**2)*(math.sqrt(2/math.pi))/(sigma_c*mu_c)
            output.append(part1)
    #print 'p3D', output
    #print 'len p3d', len(output)
        #print x
        #print'part1', part1
        #print 'logP', math.log(part1)
    return output

def P3Dpl(mu, sigma, data):
    #output is a list of probabilities (y). for each measurements (x)
    output = []
    for x in data:
        part1 = math.exp(-(x**2+mu**2)/(2*sigma**2))*x*math.sinh(mu*x/sigma**2)*(math.sqrt(2/math.pi))/(sigma*mu)
        output.append(part1)
    #print 'p3D', output
    #print 'len p3d', len(output)
    return output

def prod(muSigma, *data):   ###
    #-sum((log(P3D(params,data)))) ///// c *= a is equivalent to c = c * a
    prod = 1
    for p in P3D(muSigma, data):
        #print 'p', p
        #print 'logp', math.log(p)
        if p == 0:
            p = 0.0005
        prod += math.log(p)
        #print 'prod', prod
    return (-prod)


def intP3D(mu, sigma, edges):
    #integration through measurements (x). output, two values????????????????//
    q = []
    f = lambda x: math.exp(-(x**2+mu**2)/(2*sigma**2))*x*math.sinh(mu*x/sigma**2)*(math.sqrt(2/math.pi))/(sigma*mu)
    for i in range(len(edges) - 2):
        q.append(integrate.quad(f, edges[i], edges[i + 1]))
    return q
    

params = [mu_a, sigma_a, mu_b, sigma_b, sigma_c]
#residuals = lambda muSigma, x, y: (prod(muSigma, x) - y)
#residuals = lambda muSigma, x: (prod(muSigma, x))
func = lambda params, *data: prod(params, *data)
paramsf = optimize.fmin(func, params, args = data, maxiter=None, xtol = tolerance_MAX, disp = False)

print 'paramsf', paramsf


###plots
def plot_data(mu, sigma, data, title):  
    pylab.close('all')
    #################make bins:
    num_pts = len(data)
    binfreq, edges = numpy.histogram(data, numbins)   #binfreq is number of data points in each bin, edges are the bin separation values
    deltaX = edges[1] - edges[0]                   #xbin in matlab is center x of each bin not the edge; matlab lists start at 1 not 0
    ex_pts = math.floor(edges[0] / deltaX)
    pts = []
    j = (edges[0] + deltaX / 2) - ex_pts * deltaX           #J:D:K  is the same as [J, J+D, ..., J+m*D] where m = fix((K-J)/D)
    for i in range(len(binfreq)):
        pts.append(j + i*deltaX)                            #make list of x position of center of each bin

    ##edgesFit = [0]
    ##for i in range(len(edges)):
    ##    edgesFit.append(edges[i])
    ##edgesFit.append(edges[-1]+deltaX)

    q = intP3D(mu,sigma,edges)

    graph_data = []
    for j in range(len(pts)):
            i=0
            pts_add = binfreq[j]
            while i < binfreq[j]:
                    i=i+1
                    graph_data.append(pts_add/float(num_pts))

    #################separate simulated probablity
    sim_int_prob = []
    for i in range(len(q)):
        sim_int_prob.append(q[i][0])
    plot_edges = []
    for i in (range(len(edges)-2)):
        plot_edges.append(edges[i+1]-deltaX/2)

    
    #####plot bins:
    freq = 0
    for b in binfreq:
        if b > freq: freq = b


    """
        
    pylab.figure(0)
    pylab.bar(left = edges[:-1], height = binfreq*1.0/binfreq.sum(), width=deltaX)
    pylab.xlabel('Distance')
    pylab.ylabel('Frequency')
    pylab.title('Histogram of data/Line of calculated')
    pylab.axis([0, int(pts[-1:] + pts[0]), 0, (max(graph_data)+0.1)])
    pylab.plot(plot_edges, sim_int_prob, 'r--', linewidth=1)
    """
    ##Determine the confidence limits
    sigma_range = 4 * sigma / math.sqrt(num_pts)
    resolution = 0.001 * sigma
    x = []
    d = resolution
    k = mu + sigma_range

    #J:D:K  is the same as [J, J+D, ..., J+m*D] where m = fix((K-J)/D)
    if sigma < mu:
        j = mu - sigma_range
        m = int(math.floor((k-j)/d))
        for i in range(m+1):
            x.append(j + i*d)
    else:
        j = resolution
        m = int(math.floor((k-j)/d))
        for i in range(m+1):
            x.append(d + i*d)

    #Calculate the log likelihood for a range of distances
    L = []
    for i in range(len(x)):
        t = []
        for j in P3Dpl(x[i], sigma, data):
            t.append(math.log(j))
        L.append(sum(t))

    #Plot the log likelihood


    #pylab.figure(1)
    #pt1 = pylab.plot(x, L, 'r-')
    #pylab.setp(pt1, color = 'g', linewidth = 2.0)

    #Determine the distances one sigma from the center distance 68% conf limits    
    maxL = L[0]
    for i in L:
        if i > maxL: maxL = i
    Ls = []
    for i in L:
        Ls.append(abs(i - ( maxL - 0.5)))
    LsSorted = sorted(Ls)
    f1 = Ls.index(LsSorted[0])
    f2 = Ls.index(LsSorted[1])
    conflim = []
    if f1 < f2:
        conflim.append(mu - x[f1])
        conflim.append(x[f2] - mu)
    else:
        conflim.append(mu - x[f2])
        conflim.append(x[f1] - mu)

    if conflim[1] > conflim[0]:
        conflim_out = conflim[1]
    else:
        conflim_out = conflim[0]

    print "Results of the maximum likelihood fit to p3D(r) from Eq.(4):"
    print "     n = %d" % num_pts
    print "     Distance %.6f (+ %.6f) (- %.6f)" % (mu, conflim[1], conflim[0])
    print "     Sigma %.6f" % sigma

    pylab.figure(0)
    pylab.bar(left = edges[:-1], height = binfreq*1.0/binfreq.sum(), width=deltaX)
    pylab.xlabel('Distance')
    pylab.ylabel('Frequency')
    pylab.title('Histogram of data/Line of calculated ' + title)
    pylab.axis([0, 800, 0, (max(graph_data)+0.1)]) #int(pts[-1:] + pts[0])
    pylab.text(400, (max(graph_data)+0.1-0.025), "Distance = %.6f (+ %.6f) (- %.6f)" % (mu, conflim[1], conflim[0]), horizontalalignment='center',verticalalignment='center')
    pylab.text(400, (max(graph_data)+0.1-0.05), "Sigma =  %.6f and n = %.6f " % (sigma, num_pts), horizontalalignment='center',verticalalignment='center')
    pylab.plot(plot_edges, sim_int_prob, 'r--', linewidth=1)
    pylab.ion()
    pylab.savefig(title + ".png")
    raw_input()
    pylab.clf()
    pylab.close('all')

    pylab.ion()
    return conflim[0], conflim[1]
    
dataA=[]
dataB = []
dataC= []
for i in range(len(data)):
    if i<(len(data)/3):
          dataA.append(data[i])
    if i>(len(data)/3) and i<(len(data)*2/3):
          dataB.append(data[i])
    if i>(len(data)*2/3):
          dataC.append(data[i])



conflim0, conflim1 = plot_data(paramsf[0], paramsf[1], ToCis, 'ToCis')
if conflim1 > conflim0:
    conflim_cis = conflim1
else:
    conflim_cis = conflim0

conflim0, conflim1 = plot_data(paramsf[2], paramsf[3], ToTrans, 'ToTrans')
if conflim1 > conflim0:
    conflim_trans = conflim1
else:
    conflim_trans = conflim0
        
musf= (paramsf[0]+paramsf[2])
conflim0, conflim1 = plot_data(musf, paramsf[4], Total, 'Total')
if conflim1 > conflim0:
    conflim_total = conflim1
else:
    conflim_total = conflim0
        

norm_cis = paramsf[0]/musf
norm_cisErr = norm_cis*math.sqrt((conflim_cis/paramsf[0])**2+(conflim_total/musf)**2)

norm_trans = paramsf[2]/musf
norm_transErr = norm_trans*math.sqrt((conflim_trans/paramsf[2])**2+(conflim_total/musf)**2)

"""
vector calculation
"""




a = numpy.loadtxt('AllDistances.txt', skiprows=1)

red=a[:,0:3]
green= a[:,3:6]
blue = a[:,6:9]

c = len(red)

cis = green
protein = red
trans = blue

protein = protein - cis
trans = trans -cis
cis = cis-cis

normalized = []
total_dist = []
distance = []
scalar = []
scalar_norm = []
projection = []
average = 0
stdev = 0


for x in range(len(protein)):
    protein_dot_trans = numpy.vdot(protein[x], trans[x])
    Tdist = numpy.linalg.norm(trans[x])
    mag_sq = (Tdist) ** 2.0
    vector = trans[x] * protein_dot_trans / mag_sq
    proj = numpy.linalg.norm(vector)
    normalized_val = proj/(numpy.linalg.norm(trans[x]))
    projection.append(proj)
    normalized.append(normalized_val)
    total_dist.append(Tdist)
    distance.append(proj)
    scalar.append((protein_dot_trans/Tdist))
    scalar_norm.append(((protein_dot_trans/Tdist)/Tdist))
    print ((protein_dot_trans/Tdist)/Tdist)

average  = numpy.mean(scalar_norm)
stdev = numpy.std(scalar_norm)
print 'average', average
print 'stdev', stdev

# example data
x = scalar_norm

num_bins = 10
# the histogram of the data
n, bins, patches = plt.hist(x, num_bins, normed=1, facecolor='green', alpha=0.5)
# add a 'best fit' line
y = mlab.normpdf(bins, average, stdev)
plt.plot(bins, y, 'r--')
plt.xlabel('Smarts')
plt.ylabel('Probability')
plt.title(r"Vector distances based on cis, Distance = %.3f STD = %.3f n = %.1f" % (average, stdev, c))


# Tweak spacing to prevent clipping of ylabel
plt.subplots_adjust(left=0.15)
plt.show()
plt.savefig('vectorsOnCis.png')

average_cis = average
stdev_cis = stdev



'''
create summary file
'''
file_out = open('summary.txt', 'w')
summary = []
file_out.write("item\tmean\tstdev\n")
file_out.write("ToCis\t")
file_out.write("%0.3f\t" % norm_cis)
file_out.write("%0.3f\t" % norm_cisErr)
file_out.write("\nToTrans\t")
file_out.write("%0.3f\t" % norm_trans)
file_out.write("%0.3f\t" % norm_transErr)
file_out.write("\nVectorOnCis\t")
file_out.write("%0.3f\t" % average_cis)
file_out.write("%0.3f\t" % stdev_cis)
file_out.write("\nNumber\t")
file_out.write("%0.1f\t" % c)
file_out.close()

