import numpy as np
import csv
import random
from datetime import datetime
from time import strftime
from dbfpy import dbf

'''
to calculate the false nagetive, false positive for satscan

'''

def getCurTime():
    """
    get current time
    Return value of the date string format(%Y-%m-%d %H:%M:%S)
    """
    format='%Y-%m-%d %H:%M:%S'
    sdate = None
    cdate = datetime.now()
    try:
        sdate = cdate.strftime(format)
    except:
        raise ValueError
    return sdate

def build_data_list(inputCSV):
    sKey = []
    fn = inputCSV
    ra = csv.DictReader(file(fn), dialect="excel")
    
    for record in ra:
        #print record[ra.fieldnames[0]], type(record[ra.fieldnames[-1]])
        for item in ra.fieldnames:
            temp = float(record[item])
            sKey.append(temp)
    sKey = np.array(sKey)
    sKey.shape=(-1,len(ra.fieldnames))
    return sKey


def build_satscanresult_dbf(inputDBF):
    sKey = np.array([])
    fn = inputDBF
    db = dbf.Dbf(fn)
    for record in db:
        temp = float(record[db.fieldNames[2]])
        if temp < 0.05:
            temp_id = int(float(record[db.fieldNames[0]]))
            sKey = np.append(sKey, temp_id)
    return sKey

def build_satscanresult(inputCSV):
    sKey = np.array([])
    fn = inputCSV
    
    ra = csv.DictReader(file(fn), dialect="excel")
    print ra.fieldnames[2]
    for record in ra:
        print "here"
        temp = float(record[ra.fieldnames[2]])
        
        if temp < 0.05:
            temp_id = int(float(record[ra.fieldnames[0]]))
            sKey =np.append(sKey, temp_id)
    return sKey

def cal_sum_within_id(list, id):
    # list: [value_1, value_2]
    #id: [id_1, id_2, ...]
    total = 0
    risk = 0
    i = 0
    for item in list:
        total += item
        if i in id:
            risk += item
        i += 1
    return [total, risk]
   

#--------------------------------------------------------------------------
#MAIN
if __name__ == "__main__":
    print "begin at " + getCurTime()
    H1 = [8,16,844,915,919,921,923,924]
    L2 = [5,103,106,513,517,518,520,531,534,535,536,541]
    H3 = [63,265,267,268,333,336,337,339,340,342,343,348]
    H4 = [13,174,178,198,886,887,888,889,890]
    L5 = [146,171,182,810,811,814,815,864,867]
    L6 = [20,133,692,694,695,696,698,702,705]
    H7 = [69,70,87,88,369,370,372,442,443]
    riskareaID = H1 + H3 + H4 + H7 + L2 + L5 + L6
    unitCSV = 'C:/_DATA/CancerData/test/Jan15/TP1000_1m.csv'
    unit_attri = np.zeros((1000,2)) # [id, pop, cancer]
    dataMatrix = build_data_list(unitCSV)  # [id, pop, cancer1, cancer2, cancer3]
    unit_attri[:,0] = dataMatrix[:,0]
    unit_attri[:,1] = dataMatrix[:,1]
    #print unit_attri
    #riskarea_attri = np.zeros((1,3))    #[total_pop, total_cancer, risk_pop, risk_cancer]
    riskarea_attri = cal_sum_within_id(unit_attri[:,1], riskareaID)
    #print riskarea_attri
    output = np.array([])
    for repeat in range(0,1000):
        print repeat
        satscanCSV = 'C:/_DATA/CancerData/test/Jan15/satscan/highlow/' + str(repeat) + '.dbf'
        satscan_id = build_satscanresult_dbf(satscanCSV)
        #unit_attri[:,2] = dataMatrix[:,repeat+1]
        #riskarea_attri[1], riskarea_attri[3] = cal_sum_within_id(unit_attri[:,2], riskareaID)
        result = np.zeros(4) #[TP, FP, FN, TP/(FP+FN)]
        #print satscan_id
        for id in satscan_id:
            if int(id) in riskareaID:
                result[0] += unit_attri[int(id), 1]
            else:
                result[1] += unit_attri[int(id), 1]
        #print unit_attri.shape
        result[2] = riskarea_attri[1] - result[0]
        result[3] = result[0]/(result[1] + result[2])
        output = np.append(output, result)
    output.shape = (-1, 4)
    print output
    filePath = 'C:/_DATA/CancerData/test/Jan15/satscan/highlow/result.csv'
    np.savetxt(filePath, output, delimiter=',')


    print "end at " + getCurTime()
    print "========================================================================"  
    
            