import HelperFunctions as HF
import numpy as np
from datetime import datetime
from time import strftime
import csv
import os

def getCurTime():
    """
    get current time
    Return value of the date string format(%Y-%m-%d %H:%M:%S)
    """
    format='%Y-%m-%d %H:%M:%S'
    sdate = None
    cdate = datetime.now()
    try:
        sdate = cdate.strftime(format)
    except:
        raise ValueError
    return sdate

def build_data_list(inputCSV):
    sKey = []
    fn = inputCSV
    ra = csv.DictReader(file(fn), dialect="excel")
    for record in ra:
        for item in ra.fieldnames:
            temp = float(record[item])
            sKey.append(temp)
    sKey = np.array(sKey)
    sKey.shape=(-1,len(ra.fieldnames))
    return sKey

def delete_shapefiles(temp_path):
    # e.g. temp_path = "c:/temp/a.shp"    a.dbf, a.prj, a.sbn, a.sbx, a.shp, a.shp.xml, a.shx will be deleted
    temp_suffix = ["dbf", "prj", "sbn", "sbx", "shp", "shp.xml", "shx"]
    if os.path.exists(temp_path):
        for item in temp_suffix:
            if os.path.exists(temp_path[:-3] + item):
                os.remove(temp_path[:-3] + item)
    else:
        print temp_path + " does not exist!"
        return

def updateColumnRecord(tempSHP, field, tempData):
    pRows = HF.pGP.UpdateCursor(tempSHP)
    pRow = pRows.Next()
    iError = 0
    iCnt = 0
    
    while pRow <> None:
        try:
            pRow.SetValue(field, tempData[iCnt])                    
            pRows.UpdateRow(pRow)
            iCnt = iCnt + 1
        except:
            iError = iError + 1
        pRow = pRows.Next()
    if iError > 0:
        print "error = " + iError
    pRows = None

def delete_shapefiles(temp_path):
    # e.g. temp_path = "c:/temp/a.shp"    a.dbf, a.prj, a.sbn, a.sbx, a.shp, a.shp.xml, a.shx will be deleted
    temp_suffix = ["dbf", "prj", "sbn", "sbx", "shp", "shp.xml", "shx"]
    if os.path.exists(temp_path):
        for item in temp_suffix:
            if os.path.exists(temp_path[:-3] + item):
                os.remove(temp_path[:-3] + item)
    else:
        print temp_path + " does not exist!"
        return 
    

def addColumnRecord(tempSHP, field, fieldtype, tempData):
    HF.pGP.AddField(tempSHP, field, fieldtype)
    updateColumnRecord(tempSHP, field, tempData)

#--------------------------------------------------------------------------
#MAIN

if __name__ == "__main__":
    #Dissolve_management(in_features, out_feature_class,
                         #{dissolve_field}, {statistics_fields}, {multi_part}, {unsplit_lines})
    inputShp = 'C:/_DATA/CancerData/SatScan/mult6000/redcap/three16/LLR/random/NortheeaternUS.shp'
    inputCSV = 'C:/_DATA/CancerData/SatScan/mult6000/redcap/three16/LLR/random/NortheeaternUS.csv'
    field = 'disID'
    dissolve = 1
    addAttri = 0
    folder = 'C:/_DATA/CancerData/SatScan/mult6000/redcap/three16/LLR/random/no_compactness/'
    if dissolve:
        for i in range(0,99):
            inputRegionCSV = folder + str(i) + '.csv'
            regionID = build_data_list(inputRegionCSV)
            inputRegionAttriCSV =  folder + str(i) + '_output.csv'
            regionAttri = build_data_list(inputRegionAttriCSV)
            outputShp = folder + 'shp/' + str(i) + '.shp'
            #if not properties.dcFields.has_key(field.upper()):
            #HF.pGP.AddField(inputShp, field, "LONG")
            updateColumnRecord(inputShp, field, regionID[:,-1])
            #"C0 SUM;POP SUM" MULTI_PART DISSOLVE_LINES
            if os.path.exists(outputShp):
                delete_shapefiles(outputShp)
            HF.pGP.Dissolve_management(inputShp, outputShp, field, "", "MULTI_PART", "DISSOLVE_LINES")
            addColumnRecord(outputShp, "cases", "LONG", regionAttri[:,1])
            addColumnRecord(outputShp, "pop", "LONG", regionAttri[:,2])
            addColumnRecord(outputShp, "rate", "FLOAT", regionAttri[:,3])
            addColumnRecord(outputShp, "llr", "FLOAT", regionAttri[:,4])
            print i

    print 'DONE!'