import HelperFunctions as HF
import numpy as np
from datetime import datetime
from time import strftime
import csv
import os

def getCurTime():
    """
    get current time
    Return value of the date string format(%Y-%m-%d %H:%M:%S)
    """
    format='%Y-%m-%d %H:%M:%S'
    sdate = None
    cdate = datetime.now()
    try:
        sdate = cdate.strftime(format)
    except:
        raise ValueError
    return sdate

def get_inputs():
    """Given the list of inputs for the script when run, return an object
    containing the variables for the script"""
    try:
        '''
        sInputFC = HF.pGP.GetParameterAsText(0)                     # Input Feature Class
        sZField = HF.pGP.GetParameterAsText(1).upper()              # Attribute Field Name
        sOutputFC = HF.pGP.GetParameterAsText(2)                    # Output Feature Class
        lsField = sZField.split(";")
        '''

        #This block can be used to test the code using provided datasets
        sInputFC = "C:/_DATA/CancerData/SatScan/NortheeaternUS.shp"
        #contiguityFC = "C:/_DATA/CancerData/NCTrtCancer_ROOK.csv"
        contiguityFC = "C:/_DATA/CancerData/SatScan/NortheeaternUS.ctg"
        #sZField =  "Pop;Cancer;Density"
        sOutputFolder = "C:/temp/NortheeaternUS"
        sOutputFC = "C:/temp/NortheeaternUS_sharedborderlength.csv"
        sUniqueID = "FID"
 
    except: raise HF.ReportError (HF.msgParseErr)

    try:
        obj = HF.Gen()
        obj.add('sInputFC',sInputFC)
        obj.add('sOutputFolder',sOutputFolder)
        obj.add('contiguityFC',contiguityFC)
        obj.add('sOutputFC', sOutputFC)
        obj.add('sUniqueID', sUniqueID)

    except: raise HF.ReportError (HF.msgInputsErr)

    return obj

def build_contiguity_list(inputCSV):
    fn = inputCSV
    ra = csv.DictReader(file(fn), dialect="excel")
    temp_contiguity = np.array([])

    for record in ra:
        #print record[ra.fieldnames[0]], type(record[ra.fieldnames[-1]])
        #unit_attri[i,4] = int(float(record[ra.fieldnames[-1]]))
        temp_contiguity = np.append(temp_contiguity, [int(float(record[ra.fieldnames[0]])), int(float(record[ra.fieldnames[1]])), 0.0])
    temp_contiguity.shape = (-1,3)
    return temp_contiguity

def cal_perimeter():
    HF.pGP.AddMessage ("Calculating...")
    #print inputs.sInputFC
    pRows = HF.pGP.SearchCursor(inputs.sInputFC)
    pRow = pRows.Next()
    '''
    fn = "C:\_WORK\MyPythonWork\CreateEntropyData\Cell.csv"    
    csv_data = np.array([])

    for record in csv.DictReader(file(fn), dialect="excel"):
        csv_data = np.append(csv_data, [record['Density']])
    csv_data.shape =(-1,1)
    data = np.array([],dtype='f')
    data = csv_data
    '''
    iBadRecCnt = 0
    temp_perimeter = np.array([])
    while pRow <> None:
        feat = pRow.shape
        try:
            dTemp = []
            dTemp.append(pRow.GetValue(inputs.sUniqueID))
            dTemp.append(feat.Length)
            temp_perimeter = np.append(temp_perimeter,dTemp)
        except:
            iBadRecCnt += 1   
        pRow = pRows.Next()
        
    temp_perimeter.shape = (-1, 2)
    pRows = None           
    print iBadRecCnt
    return temp_perimeter  

def delete_shapefiles(temp_path):
    # e.g. temp_path = "c:/temp/a.shp"    a.dbf, a.prj, a.sbn, a.sbx, a.shp, a.shp.xml, a.shx will be deleted
    temp_suffix = ["dbf", "prj", "sbn", "sbx", "shp", "shp.xml", "shx"]
    if os.path.exists(temp_path):
        for item in temp_suffix:
            if os.path.exists(temp_path[:-3] + item):
                os.remove(temp_path[:-3] + item)
    else:
        print temp_path + " does not exist!"
        return 

                      

def extract_each_polygon():
    for item in perimeter:
        i = int(item[0])
        search_clause = inputs.sUniqueID + ' = '+ str(i) 
        output_filename = inputs.sOutputFolder + "/temp_" + str(i) + ".shp"
        if os.path.exists(output_filename):
            delete_shapefiles(output_filename)
        print inputs.sInputFC, output_filename, search_clause
        HF.pGP.select_analysis(inputs.sInputFC, output_filename, search_clause)
        print i

    #pRows = HF.pGP.SearchCursor(inputs.sInputFC, search_clause)
    #pRow = pRows.Next()
    #print pRow.shape.Length
    #pRows = None  
    #for item in contiguity:
        #if int(item[0])<>int(item[1]):

def reduce_contiguity(temp_contiguity):
    temp_re_contiguity = np.array([])
    for item in temp_contiguity:
        if int(item[0])<> int(item[1]):
            q = 1
            for p in temp_re_contiguity:
                if p<>[]:
                    if (int(item[0]) == int(p[0])) & (int(item[1]) == int(p[1])):  # if item exists in temp_re_contiguity
                        q = 0
                    elif (int(item[0]) == int(p[1])) & (int(item[1]) == int(p[0])):
                        q = 0
            if q == 1:
                temp_re_contiguity = np.append(temp_re_contiguity, item)
                temp_re_contiguity.shape = (-1,3)
    return temp_re_contiguity
    

def cal_shared_border():
    #temp_border_length = np.array([])
    x = 0
    
    '''
    i = 77
    j = 92
    intersect_input_shp_a = inputs.sOutputFolder + "/temp_" + str(i) + ".shp"
    intersect_input_shp_b = inputs.sOutputFolder + "/temp_" + str(j) + ".shp"
    intersect_input_shp = intersect_input_shp_a + ";"+ intersect_input_shp_b
    intersect_output_shp = inputs.sOutputFolder + "/temp_ins_out_"+ str(x)+".shp"
    print intersect_input_shp
    if os.path.exists(intersect_output_shp):
        delete_shapefiles(intersect_output_shp)
    HF.pGP.Intersect_analysis(intersect_input_shp, intersect_output_shp, "#", "", "LINE")
    pRows = HF.pGP.SearchCursor(intersect_output_shp)
    pRow = pRows.Next()
    if pRow <> None:
        temp_border_length = np.append(temp_border_length, pRow.shape.Length)
    else:
        temp_border_length = np.append(temp_border_length, 0)
    pRows = None
    delete_shapefiles(intersect_output_shp)
    print temp_border_length
    '''
    excludeID = [100,103,127,134,137,141,149]
    for item in contiguity:
        i = int(item[0])
        j = int(item[1])
        if (i in excludeID) or (j in excludeID):
            item[2] = -1
        else:
            intersect_input_shp_a = inputs.sOutputFolder + "/temp_" + str(i) + ".shp"
            intersect_input_shp_b = inputs.sOutputFolder + "/temp_" + str(j) + ".shp"
            intersect_input_shp = intersect_input_shp_a + ";"+ intersect_input_shp_b
            intersect_output_shp = inputs.sOutputFolder + "/temp_ins_out_"+ str(x)+".shp"
            print intersect_input_shp
            if os.path.exists(intersect_output_shp):
                delete_shapefiles(intersect_output_shp)
            HF.pGP.Intersect_analysis(intersect_input_shp, intersect_output_shp, "#", "", "LINE")
            pRows = HF.pGP.SearchCursor(intersect_output_shp)
            pRow = pRows.Next()
            if pRow <> None:
                #temp_border_length = np.append(temp_border_length, pRow.shape.Length)
                item[2] = pRow.shape.Length
            else:
                #temp_border_length = np.append(temp_border_length, 0)
                item[2] = 0
            pRows = None
            delete_shapefiles(intersect_output_shp)
        print x
        x += 1

    #return temp_border_length
     
def build_list(inputCSV):
    fn = inputCSV
    ra = csv.DictReader(file(fn), dialect="excel")
    temp_list = np.array([])
    i = 0
    for record in ra:
        #print record[ra.fieldnames[0]], type(record[ra.fieldnames[-1]])
        #unit_attri[i,4] = int(float(record[ra.fieldnames[-1]]))
        temp_list = np.append(temp_list, int(float(record[ra.fieldnames[0]])))
    return temp_list

def build_border_length_list(inputCSV):
    fn = inputCSV
    ra = csv.DictReader(file(fn), dialect="excel")
    temp_list = np.array([])
    i = 0
    for record in ra:
        #print record[ra.fieldnames[0]], type(record[ra.fieldnames[-1]])
        #unit_attri[i,4] = int(float(record[ra.fieldnames[-1]]))
        temp_list = np.append(temp_list, [float(record[ra.fieldnames[0]]),float(record[ra.fieldnames[1]]),float(record[ra.fieldnames[2]])])
    temp_list.shape = (-1,3)
    return temp_list

def find_perimeter(temp_id):
    # find the perimeter while GIST_ID == temp_id
    for item in perimeter:
        if int(item[0]) == int(temp_id):
            return item[1]

def build_contiguity():
    temp_contiguity = np.array([])

    for item in contiguity:
        peri_a = find_perimeter(item[0])
        peri_b = find_perimeter(item[1])
        if peri_a > peri_b:
            min_peri = peri_b
        else:
            min_peri = peri_a
        if item[2] > (min_peri/10):
            temp_contiguity = np.append(temp_contiguity, [item[0], item[1], item[1], item[0]])
        if item[2] == -1:
            temp_contiguity = np.append(temp_contiguity, [item[0], item[1], item[1], item[0]])
    
    for item in perimeter:
        temp_contiguity = np.append(temp_contiguity, [item[0], item[0]])
    temp_contiguity.shape = (-1, 2)
    temp_contiguity = temp_contiguity[np.argsort(temp_contiguity[:,1]),:]
    temp_contiguity = temp_contiguity[np.argsort(temp_contiguity[:,0]),:]
    return temp_contiguity  
    
#--------------------------------------------------------------------------
#MAIN

if __name__ == "__main__":
    inputs = get_inputs()
    perimeter = cal_perimeter() # [FID, perimeter]
    #print perimeter
    #extract_each_polygon()

    contiguity = build_contiguity_list(inputs.contiguityFC)
    contiguity = reduce_contiguity(contiguity)
    np.savetxt("C:/temp/NortheeaternUS/reduced_NortheeaternUS_contiguity.csv", contiguity, delimiter=',')
    cal_shared_border()
    np.savetxt("C:/temp/NortheeaternUS/NortheeaternUS_shared_border_length.csv", contiguity, delimiter=',')

    #border_csv = "C:/temp/reduced_TP1000_1m_contiguity.csv"
    #contiguity = build_border_length_list(border_csv)    # [id1, id2, lengths]
    re_contiguity = build_contiguity()
    np.savetxt("C:/temp/NortheeaternUS/NortheeaternUS_re_contiguity.csv", re_contiguity, delimiter=',')
    #delete_each_polygon()
    
    print "Done!"