import HelperFunctions as HF
import numpy as np
from datetime import datetime
from time import strftime
import csv
import os

def get_inputs():
    """Given the list of inputs for the script when run, return an object
    containing the variables for the script"""
    try:
        '''
        sInputFC = HF.pGP.GetParameterAsText(0)                     # Input Feature Class
        sZField = HF.pGP.GetParameterAsText(1).upper()              # Attribute Field Name
        sOutputFC = HF.pGP.GetParameterAsText(2)                    # Output Feature Class
        lsField = sZField.split(";")
        '''

        #This block can be used to test the code using provided datasets
        sInputFC = "F:/terrorism/gr_001_ro_bu.shp"
        #contiguityFC = "C:/_DATA/CancerData/NCTrtCancer_ROOK.csv"
        #contiguityFC = "C:/temp/reduced_NCTrtCancer_ROOK.csv"
        #sZField =  "Pop;Cancer;Density"
        sOutputFolder = "C:/peng"
        #sOutputFC = "C:/temp/LATrtCancerS_sharedborderlength.csv"
 
    except: raise HF.ReportError (HF.msgParseErr)

    try:
        obj = HF.Gen()
        obj.add('sInputFC',sInputFC)
        obj.add('sOutputFolder',sOutputFolder)
        #obj.add('contiguityFC',contiguityFC)
        #obj.add('sOutputFC', sOutputFC)

    except: raise HF.ReportError (HF.msgInputsErr)

    return obj

def delete_shapefiles(temp_path):
    # e.g. temp_path = "c:/temp/a.shp"    a.dbf, a.prj, a.sbn, a.sbx, a.shp, a.shp.xml, a.shx will be deleted
    temp_suffix = ["dbf", "prj", "sbn", "sbx", "shp", "shp.xml", "shx"]
    if os.path.exists(temp_path):
        for item in temp_suffix:
            if os.path.exists(temp_path[:-3] + item):
                os.remove(temp_path[:-3] + item)
    else:
        print temp_path + " does not exist!"
        return 

                      

def extract_each_polygon():
    temp_search_clause = ""
    for item in clusterID:
        i = int(item)
        temp_search_clause += '"FID" = '+ str(i) + " or "
    search_clause = temp_search_clause[:-4]
    #print search_clause
    output_filename = inputs.sOutputFolder + "/temp_" + str(output_id) + ".shp"
    if os.path.exists(output_filename):
        delete_shapefiles(output_filename)
    HF.pGP.select_analysis(inputs.sInputFC, output_filename, search_clause)
    
    
    
def build_traj_list(inputCSV):
    fn = inputCSV
    ra = csv.DictReader(file(fn), dialect="excel")
    temp_list = np.array([])
    i = 0
    for record in ra:
        #print record[ra.fieldnames[0]], type(record[ra.fieldnames[-1]])
        #unit_attri[i,4] = int(float(record[ra.fieldnames[-1]]))
        temp_list = np.append(temp_list, int(float(record["ClusterID"])))
    return temp_list
#--------------------------------------------------------------------------
#MAIN

if __name__ == "__main__":
    inputs = get_inputs()
    output_id = 0
    for i in range(0,23):
        output_id = i
        input_traj_csv = "F:/terrorism/traj" + str(i) + ".csv"
        clusterID = build_traj_list(input_traj_csv)
        extract_each_polygon()
        
    #perimeter = cal_perimeter() # [GIST_ID, perimeter]
    #contiguity = build_contiguity_list(inputs.contiguityFC)
    #re_contiguity = reduce_contiguity(contiguity)
    #np.savetxt("C:/temp/reduced_NCTrtCancer_ROOK.csv", re_contiguity, delimiter=',')
    #print re_contiguity
    #extract_each_polygon()
    
    #border_length = cal_shared_border()
    #border_csv = "C:/_DATA/CancerData/LATrtCancerS_shared_border_length.csv"
    #border_length = build_border_length_list(border_csv)    # [id1, id2, lengths]
    #re_contiguity = build_contiguity()
    #np.savetxt("C:/temp/re_contiguity.csv", re_contiguity, delimiter=',')
    
    print "Done!"