import numpy as np

def build_data_list(inputCSV):
    sKey = []
    fn = inputCSV
    f = open(inputCSV)
    #ra = csv.DictReader(file(fn), dialect="excel")
    ra = csv.DictReader(f, dialect="excel")
    
    for record in ra:
        #print record[ra.fieldnames[0]], type(record[ra.fieldnames[-1]])
        for item in ra.fieldnames:
            temp = int(float(record[item]))
            sKey.append(temp)
    sKey = np.array(sKey)
    sKey.shape=(-1,len(ra.fieldnames))
    return sKey

def contiguity_item(ID):
    i = 0
    ROW = spContiguity.shape[0]
    #print ROW
    contiguityItem = []
    j = 0
    
    while i < ROW:
        if (spContiguity[i,0]== ID) and (spContiguity[i,0]<> ID):
            contiguityItem.append(spContiguity[i,1])
            j = j + 1
        i = i + 1
    if j == 1:
        print ID, "does not have contiguity items."
    #elif j == 
    return contiguityItem

def generateWeightMatrix(x, spContiguity):
    weight = np.zeros((len(x), len(x)))
    for i in range(len(spContiguity)):
        o = spContiguity[i,0]
        d = spContiguity[i,1]
        if int(o) <> int(d):
            weight[o,d] = 1
            weight[d,o] = 1
    return weight    


def localmoransi_2(x, weight):
    z = x - x.mean()
    z = z/x.std()
    print 'std =', x.std()
    #print 'z =', z
    den = sum(z*z)
    print 'den =', den
    zl = np.dot(z, weight)
    print (len(x)-1)*z*zl/den

def localmoransi_3(x, nei):
    z = x - x.mean()
    z = z/x.std()
    den = sum(z*z)
    c = nei - x.mean()
    c = c/x.std()
    zl = sum(c)
    print (len(x)-1)*z*zl/den


def localmoransi(curx, nei, mean):
    print np.dot(nei - mean, nei - mean)
    #print sum(nei - mean)
    #s2 = np.dot(data[1:,-1], data[1:,-1]) * 1.0/(len(data) - 1)# - mean*mean
    #moransi = (curx - mean) * (sum(nei - mean)) /s2
    #print moransi
    s2 = np.dot(data[1:,-1] - mean, data[1:,-1] - mean) * 1.0/(len(data) - 1)# - mean*mean
    moransi = (curx - mean) * (sum(nei - mean)) /s2
    print moransi
    s2 = np.dot(data[:,-1] - mean, data[:,-1] - mean) * 1.0/(len(data))# - mean*mean
    #n = len(data)
    #print s2, data[:,-1].std()*data[:,-1].std()*n/(n-1)
    moransi = (curx - mean) * (sum(nei - mean)) /s2
    print moransi
    s2 = len(nei)/(len(data) - 1) - mean*mean
    moransi = (curx - mean) * (sum(nei - mean)) /s2
    print moransi

print '---------------------------------------------------------'
filepath = 'C:/_WORK/redcap_Aug2012/redcap/example data/NC_SID/temp/'
file = filepath + '1.csv'
data = build_data_list(file)
contiguityCSV = filepath + '2.csv'
spContiguity = build_data_list(contiguityCSV)

weight = generateWeightMatrix(data[:,-1], spContiguity)
#print weight
curx = 21.0
nei = np.array([61,24,6,4,28])
#mean = (sum(nei) + curx)*1.0/(len(nei) + 1)
mean = data[:,-1].mean()
#print mean
localmoransi(curx, nei, mean)
#localmoransi_2(data[:,-1], weight)
#localmoransi_3(data[:,-1], nei)

